mirror of
https://git.yoctoproject.org/poky
synced 2026-01-30 13:28:43 +01:00
Compare commits
42 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b29bd836ad | ||
|
|
72eafe23fc | ||
|
|
5cc700daae | ||
|
|
c25d4ba28c | ||
|
|
045b982378 | ||
|
|
f99f7e354f | ||
|
|
82925e8d7f | ||
|
|
55e159c873 | ||
|
|
eddcc607c9 | ||
|
|
b82067c8e7 | ||
|
|
b861de3d39 | ||
|
|
10e9286aba | ||
|
|
e0964ab86c | ||
|
|
8f7bba3812 | ||
|
|
3a92d518b4 | ||
|
|
9fad50948c | ||
|
|
43a1d901f5 | ||
|
|
3ca17dfc31 | ||
|
|
d7919b22fe | ||
|
|
e8bb07c601 | ||
|
|
8626d103f7 | ||
|
|
0b02870404 | ||
|
|
7ed5c2cebc | ||
|
|
12e3b599ca | ||
|
|
76f6ab09ca | ||
|
|
a68bc3912a | ||
|
|
1411386a6b | ||
|
|
9cb8f533fe | ||
|
|
f2bc66b102 | ||
|
|
5dfee8d314 | ||
|
|
c1ee270d2c | ||
|
|
80ca44d44c | ||
|
|
1218b39f24 | ||
|
|
c846e5848e | ||
|
|
d2ce340b34 | ||
|
|
5f72e5d0f2 | ||
|
|
1a651898a8 | ||
|
|
0e1d3d91d6 | ||
|
|
935b598736 | ||
|
|
1a6a15e568 | ||
|
|
c7edcc0316 | ||
|
|
6d80967bcf |
28
.gitignore
vendored
28
.gitignore
vendored
@@ -1,27 +1,13 @@
|
||||
*.pyc
|
||||
*.pyo
|
||||
/*.patch
|
||||
/build*/
|
||||
pyshtables.py
|
||||
pstage/
|
||||
scripts/oe-git-proxy-socks
|
||||
build/conf/local.conf
|
||||
build/tmp/
|
||||
scripts/poky-git-proxy-socks
|
||||
sources/
|
||||
meta-*/
|
||||
!meta-skeleton
|
||||
!meta-selftest
|
||||
hob-image-*.bb
|
||||
meta-darwin
|
||||
meta-maemo
|
||||
meta-prvt*
|
||||
poky-autobuilder*
|
||||
*.swp
|
||||
*.orig
|
||||
*.rej
|
||||
*~
|
||||
!meta-poky
|
||||
!meta-yocto
|
||||
!meta-yocto-bsp
|
||||
!meta-yocto-imported
|
||||
documentation/user-manual/user-manual.html
|
||||
documentation/user-manual/user-manual.pdf
|
||||
documentation/user-manual/user-manual.tgz
|
||||
pull-*/
|
||||
bitbake/lib/toaster/contrib/tts/backlog.txt
|
||||
bitbake/lib/toaster/contrib/tts/log/*
|
||||
bitbake/lib/toaster/contrib/tts/.cache/*
|
||||
@@ -1,2 +0,0 @@
|
||||
# Template settings
|
||||
TEMPLATECONF=${TEMPLATECONF:-meta-poky/conf}
|
||||
13
LICENSE
13
LICENSE
@@ -1,14 +1,11 @@
|
||||
Different components of OpenEmbedded are under different licenses (a mix
|
||||
of MIT and GPLv2). Please see:
|
||||
Different components of Poky are under different licenses (a mix of
|
||||
MIT and GPLv2). Please see:
|
||||
|
||||
meta/COPYING.GPLv2 (GPLv2)
|
||||
bitbake/COPYING (GPLv2)
|
||||
meta/COPYING.MIT (MIT)
|
||||
meta-selftest/COPYING.MIT (MIT)
|
||||
meta-skeleton/COPYING.MIT (MIT)
|
||||
meta-extras/COPYING.MIT (MIT)
|
||||
|
||||
All metadata is MIT licensed unless otherwise stated. Source code
|
||||
included in tree for individual recipes is under the LICENSE stated in
|
||||
the associated recipe (.bb file) unless otherwise stated.
|
||||
which cover the components in those subdirectories.
|
||||
|
||||
License information for any other files is either explicitly stated
|
||||
or defaults to GPL version 2.
|
||||
|
||||
63
README
63
README
@@ -1,58 +1,15 @@
|
||||
Poky
|
||||
====
|
||||
|
||||
Poky is an integration of various components to form a complete prepackaged
|
||||
build system and development environment. It features support for building
|
||||
customised embedded device style images. There are reference demo images
|
||||
featuring a X11/Matchbox/GTK themed UI called Sato. The system supports
|
||||
cross-architecture application development using QEMU emulation and a
|
||||
standalone toolchain and SDK with IDE integration.
|
||||
Poky platform builder is a combined cross build system and development
|
||||
environment. It features support for building X11/Matchbox/GTK based
|
||||
filesystem images for various embedded devices and boards. It also
|
||||
supports cross-architecture application development using QEMU emulation
|
||||
and a standalone toolchain and SDK with IDE integration.
|
||||
|
||||
Poky has an extensive handbook, the source of which is contained in
|
||||
the handbook directory. For compiled HTML or pdf versions of this,
|
||||
see the Poky website http://pokylinux.org.
|
||||
|
||||
Additional information on the specifics of hardware that Poky supports
|
||||
is available in README.hardware. Further hardware support can easily be added
|
||||
in the form of layers which extend the systems capabilities in a modular way.
|
||||
|
||||
As an integration layer Poky consists of several upstream projects such as
|
||||
BitBake, OpenEmbedded-Core, Yocto documentation and various sources of information
|
||||
e.g. for the hardware support. Poky is in turn a component of the Yocto Project.
|
||||
|
||||
The Yocto Project has extensive documentation about the system including a
|
||||
reference manual which can be found at:
|
||||
http://yoctoproject.org/documentation
|
||||
|
||||
OpenEmbedded-Core is a layer containing the core metadata for current versions
|
||||
of OpenEmbedded. It is distro-less (can build a functional image with
|
||||
DISTRO = "nodistro") and contains only emulated machine support.
|
||||
|
||||
For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
http://www.openembedded.org/
|
||||
|
||||
Where to Send Patches
|
||||
=====================
|
||||
|
||||
As Poky is an integration repository (built using a tool called combo-layer),
|
||||
patches against the various components should be sent to their respective
|
||||
upstreams:
|
||||
|
||||
bitbake:
|
||||
Git repository: http://git.openembedded.org/bitbake/
|
||||
Mailing list: bitbake-devel@lists.openembedded.org
|
||||
|
||||
documentation:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/
|
||||
Mailing list: yocto@yoctoproject.org
|
||||
|
||||
meta-poky, meta-yocto-bsp:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/meta-yocto(-bsp)
|
||||
Mailing list: poky@yoctoproject.org
|
||||
|
||||
Everything else should be sent to the OpenEmbedded Core mailing list. If in
|
||||
doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Before sending, be sure the patches apply cleanly to the current oe-core git
|
||||
repository.
|
||||
|
||||
Git repository: http://git.openembedded.org/openembedded-core/
|
||||
Mailing list: openembedded-core@lists.openembedded.org
|
||||
|
||||
Note: The scripts directory should be treated with extra care as it is a mix of
|
||||
oe-core and poky-specific files.
|
||||
is available in README.hardware.
|
||||
|
||||
656
README.hardware
656
README.hardware
@@ -1,500 +1,436 @@
|
||||
Poky Hardware README
|
||||
====================
|
||||
Poky Hardware Reference Guide
|
||||
=============================
|
||||
|
||||
This file gives details about using Poky with the reference machines
|
||||
supported out of the box. A full list of supported reference target machines
|
||||
can be found by looking in the following directories:
|
||||
This file gives details about using Poky with different hardware reference
|
||||
boards and consumer devices. A full list of target machines can be found by
|
||||
looking in the meta/conf/machine/ directory. If in doubt about using Poky with
|
||||
your hardware, consult the documentation for your board/device. To discuss
|
||||
support for further hardware reference boards/devices please contact OpenedHand.
|
||||
|
||||
meta/conf/machine/
|
||||
meta-yocto-bsp/conf/machine/
|
||||
|
||||
If you are in doubt about using Poky/OpenEmbedded with your hardware, consult
|
||||
the documentation for your board/device.
|
||||
|
||||
Support for additional devices is normally added by creating BSP layers - for
|
||||
more information please see the Yocto Board Support Package (BSP) Developer's
|
||||
Guide - documentation source is in documentation/bspguide or download the PDF
|
||||
from:
|
||||
|
||||
http://yoctoproject.org/documentation
|
||||
|
||||
Support for physical reference hardware has now been split out into a
|
||||
meta-yocto-bsp layer which can be removed separately from other layers if not
|
||||
needed.
|
||||
|
||||
|
||||
QEMU Emulation Targets
|
||||
======================
|
||||
|
||||
To simplify development, the build system supports building images to
|
||||
work with the QEMU emulator in system emulation mode. Several architectures
|
||||
are currently supported:
|
||||
|
||||
* ARM (qemuarm)
|
||||
* x86 (qemux86)
|
||||
* x86-64 (qemux86-64)
|
||||
* PowerPC (qemuppc)
|
||||
* MIPS (qemumips)
|
||||
|
||||
Use of the QEMU images is covered in the Yocto Project Reference Manual.
|
||||
The appropriate MACHINE variable value corresponding to the target is given
|
||||
in brackets.
|
||||
QEMU Emulation Images (qemuarm and qemux86)
|
||||
===========================================
|
||||
|
||||
To simplify development Poky supports building images to work with the QEMU
|
||||
emulator in system emulation mode. Two architectures are currently supported,
|
||||
ARM (via qemuarm) and x86 (via qemux86). Use of the QEMU images is covered
|
||||
in the Poky Handbook.
|
||||
|
||||
Hardware Reference Boards
|
||||
=========================
|
||||
|
||||
The following boards are supported by the meta-yocto-bsp layer:
|
||||
The following boards are supported by Poky:
|
||||
|
||||
* Texas Instruments Beaglebone (beaglebone)
|
||||
* Freescale MPC8315E-RDB (mpc8315e-rdb)
|
||||
* Compulab CM-X270 (cm-x270)
|
||||
* Compulab EM-X270 (em-x270)
|
||||
* FreeScale iMX31ADS (mx31ads)
|
||||
* Marvell PXA3xx Zylonite (zylonite)
|
||||
* Logic iMX31 Lite Kit (mx31litekit)
|
||||
* Phytec phyCORE-iMX31 (mx31phy)
|
||||
|
||||
For more information see the board's section below. The appropriate MACHINE
|
||||
variable value corresponding to the board is given in brackets.
|
||||
|
||||
Reference Board Maintenance
|
||||
===========================
|
||||
|
||||
Send pull requests, patches, comments or questions about meta-yocto-bsps to poky@yoctoproject.org
|
||||
|
||||
Maintainers: Kevin Hao <kexin.hao@windriver.com>
|
||||
Bruce Ashfield <bruce.ashfield@windriver.com>
|
||||
For more information see board's section below. The Poky MACHINE setting
|
||||
corresponding to the board is given in brackets.
|
||||
|
||||
Consumer Devices
|
||||
================
|
||||
|
||||
The following consumer devices are supported by the meta-yocto-bsp layer:
|
||||
The following consumer devices are supported by Poky:
|
||||
|
||||
* Intel x86 based PCs and devices (genericx86)
|
||||
* Ubiquiti Networks EdgeRouter Lite (edgerouter)
|
||||
* FIC Neo1973 GTA01 smartphone (fic-gta01)
|
||||
* HTC Universal (htcuniversal)
|
||||
* Nokia 770/N800/N810 Internet Tablets (nokia770 and nokia800)
|
||||
* Sharp Zaurus SL-C7x0 series (c7x0)
|
||||
* Sharp Zaurus SL-C1000 (akita)
|
||||
* Sharp Zaurus SL-C3x00 series (spitz)
|
||||
|
||||
For more information see the device's section below. The appropriate MACHINE
|
||||
variable value corresponding to the device is given in brackets.
|
||||
For more information see board's section below. The Poky MACHINE setting
|
||||
corresponding to the board is given in brackets.
|
||||
|
||||
Poky Boot CD (bootcdx86)
|
||||
========================
|
||||
|
||||
The Poky boot CD iso images are designed as a demonstration of the Poky
|
||||
environment and to show the versatile image formats Poky can generate. It will
|
||||
run on Pentium2 or greater PC style computers. The iso image can be
|
||||
burnt to CD and then booted from.
|
||||
|
||||
Specific Hardware Documentation
|
||||
===============================
|
||||
|
||||
Hardware Reference Boards
|
||||
=========================
|
||||
|
||||
Intel x86 based PCs and devices (genericx86)
|
||||
==========================================
|
||||
Compulab CM-X270 (cm-x270)
|
||||
==========================
|
||||
|
||||
The genericx86 MACHINE is tested on the following platforms:
|
||||
The bootloader on this board doesn't support writing jffs2 images directly to
|
||||
NAND and normally uses a proprietary kernel flash driver. To allow the use of
|
||||
jffs2 images, a two stage updating procedure is needed. Firstly, an initramfs
|
||||
is booted which contains mtd utilities and this is then used to write the main
|
||||
filesystem.
|
||||
|
||||
Intel Xeon/Core i-Series:
|
||||
+ Intel Romley Server: Sandy Bridge Xeon processor, C600 PCH (Patsburg), (Canoe Pass CRB)
|
||||
+ Intel Romley Server: Ivy Bridge Xeon processor, C600 PCH (Patsburg), (Intel SDP S2R3)
|
||||
+ Intel Crystal Forest Server: Sandy Bridge Xeon processor, DH89xx PCH (Cave Creek), (Stargo CRB)
|
||||
+ Intel Chief River Mobile: Ivy Bridge Mobile processor, QM77 PCH (Panther Point-M), (Emerald Lake II CRB, Sabino Canyon CRB)
|
||||
+ Intel Huron River Mobile: Sandy Bridge processor, QM67 PCH (Cougar Point), (Emerald Lake CRB, EVOC EC7-1817LNAR board)
|
||||
+ Intel Calpella Platform: Core i7 processor, QM57 PCH (Ibex Peak-M), (Red Fort CRB, Emerson MATXM CORE-411-B)
|
||||
+ Intel Nehalem/Westmere-EP Server: Xeon 56xx/55xx processors, 5520 chipset, ICH10R IOH (82801), (Hanlan Creek CRB)
|
||||
+ Intel Nehalem Workstation: Xeon 56xx/55xx processors, System SC5650SCWS (Greencity CRB)
|
||||
+ Intel Picket Post Server: Xeon 56xx/55xx processors (Jasper Forest), 3420 chipset (Ibex Peak), (Osage CRB)
|
||||
+ Intel Storage Platform: Sandy Bridge Xeon processor, C600 PCH (Patsburg), (Oak Creek Canyon CRB)
|
||||
+ Intel Shark Bay Client Platform: Haswell processor, LynxPoint PCH, (Walnut Canyon CRB, Lava Canyon CRB, Basking Ridge CRB, Flathead Creek CRB)
|
||||
+ Intel Shark Bay Ultrabook Platform: Haswell ULT processor, Lynx Point-LP PCH, (WhiteTip Mountain 1 CRB)
|
||||
It is assumed the board is connected to a network where a TFTP server is
|
||||
available and that a serial terminal is available to communicate with the
|
||||
bootloader (38400, 8N1). If a DHCP server is available the device will use it
|
||||
to obtain an IP address. If not, run:
|
||||
|
||||
Intel Atom platforms:
|
||||
+ Intel embedded Menlow: Intel Atom Z510/530 CPU, System Controller Hub US15W (Portwell NANO-8044)
|
||||
+ Intel Luna Pier: Intel Atom N4xx/D5xx series CPU (aka: Pineview-D & -M), 82801HM I/O Hub (ICH8M), (Advantech AIMB-212, Moon Creek CRB)
|
||||
+ Intel Queens Bay platform: Intel Atom E6xx CPU (aka: Tunnel Creek), Topcliff EG20T I/O Hub (Emerson NITX-315, Crown Bay CRB, Minnow Board)
|
||||
+ Intel Fish River Island platform: Intel Atom E6xx CPU (aka: Tunnel Creek), Topcliff EG20T I/O Hub (Kontron KM2M806)
|
||||
+ Intel Cedar Trail platform: Intel Atom N2000 & D2000 series CPU (aka: Cedarview), NM10 Express Chipset (Norco kit BIS-6630, Cedar Rock CRB)
|
||||
ARMmon > setip dhcp off
|
||||
ARMmon > setip ip 192.168.1.203
|
||||
ARMmon > setip mask 255.255.255.0
|
||||
|
||||
and is likely to work on many unlisted Atom/Core/Xeon based devices. The MACHINE
|
||||
type supports ethernet, wifi, sound, and Intel/vesa graphics by default in
|
||||
addition to common PC input devices, busses, and so on.
|
||||
To reflash the kernel:
|
||||
|
||||
Depending on the device, it can boot from a traditional hard-disk, a USB device,
|
||||
or over the network. Writing generated images to physical media is
|
||||
straightforward with a caveat for USB devices. The following examples assume the
|
||||
target boot device is /dev/sdb, be sure to verify this and use the correct
|
||||
device as the following commands are run as root and are not reversable.
|
||||
ARMmon > download kernel tftp zimage 192.168.1.202
|
||||
ARMmon > flash kernel
|
||||
|
||||
USB Device:
|
||||
1. Build a live image. This image type consists of a simple filesystem
|
||||
without a partition table, which is suitable for USB keys, and with the
|
||||
default setup for the genericx86 machine, this image type is built
|
||||
automatically for any image you build. For example:
|
||||
where zimage is the name of the kernel on the TFTP server and its IP address is
|
||||
192.168.1.202. The names of the files must be all lowercase.
|
||||
|
||||
$ bitbake core-image-minimal
|
||||
To reflash the initrd/initramfs:
|
||||
|
||||
2. Use the "dd" utility to write the image to the raw block device. For
|
||||
example:
|
||||
ARMmon > download ramdisk tftp diskimage 192.168.1.202
|
||||
ARMmon > flash ramdisk
|
||||
|
||||
# dd if=core-image-minimal-genericx86.hddimg of=/dev/sdb
|
||||
where diskimage is the name of the initramfs image (a cpio.gz file).
|
||||
|
||||
If the device fails to boot with "Boot error" displayed, or apparently
|
||||
stops just after the SYSLINUX version banner, it is likely the BIOS cannot
|
||||
understand the physical layout of the disk (or rather it expects a
|
||||
particular layout and cannot handle anything else). There are two possible
|
||||
solutions to this problem:
|
||||
To boot the initramfs:
|
||||
|
||||
1. Change the BIOS USB Device setting to HDD mode. The label will vary by
|
||||
device, but the idea is to force BIOS to read the Cylinder/Head/Sector
|
||||
geometry from the device.
|
||||
ARMmon > ramdisk on
|
||||
ARMmon > bootos "console=ttyS0,38400 rdinit=/sbin/init"
|
||||
|
||||
2. Without such an option, the BIOS generally boots the device in USB-ZIP
|
||||
mode. To write an image to a USB device that will be bootable in
|
||||
USB-ZIP mode, carry out the following actions:
|
||||
To reflash the main image login to the system as user "root", then run:
|
||||
|
||||
a. Determine the geometry of your USB device using fdisk:
|
||||
# ifconfig eth0 192.168.1.203
|
||||
# tftp -g -r mainimage 192.168.1.202
|
||||
# flash_eraseall /dev/mtd1
|
||||
# nandwrite /dev/mtd1 mainimage
|
||||
|
||||
# fdisk /dev/sdb
|
||||
Command (m for help): p
|
||||
which configures the network interface with the IP address 192.168.1.203,
|
||||
downloads the "mainimage" file from the TFTP server at 192.168.1.202, erases
|
||||
the flash and then writes the new image to the flash.
|
||||
|
||||
Disk /dev/sdb: 4011 MB, 4011491328 bytes
|
||||
124 heads, 62 sectors/track, 1019 cylinders, total 7834944 sectors
|
||||
...
|
||||
The main image can then be booted with:
|
||||
|
||||
Command (m for help): q
|
||||
ARMmon > bootos "console=ttyS0,38400 root=/dev/mtdblock1 rootfstype=jffs2"
|
||||
|
||||
b. Configure the USB device for USB-ZIP mode:
|
||||
|
||||
# mkdiskimage -4 /dev/sdb 1019 124 62
|
||||
Note that the initramfs image is built by poky in a slightly different mode to
|
||||
normal since it uses uclibc. To generate this use a command like:
|
||||
|
||||
Where 1019, 124 and 62 are the cylinder, head and sectors/track counts
|
||||
as reported by fdisk (substitute the values reported for your device).
|
||||
When the operation has finished and the access LED (if any) on the
|
||||
device stops flashing, remove and reinsert the device to allow the
|
||||
kernel to detect the new partition layout.
|
||||
IMAGE_FSTYPES=cpio.gz MACHINE=cm-x270 POKYLIBC=uclibc bitbake poky-image-minimal-mtdutils
|
||||
|
||||
c. Copy the contents of the image to the USB-ZIP mode device:
|
||||
|
||||
# mkdir /tmp/image
|
||||
# mkdir /tmp/usbkey
|
||||
# mount -o loop core-image-minimal-genericx86.hddimg /tmp/image
|
||||
# mount /dev/sdb4 /tmp/usbkey
|
||||
# cp -rf /tmp/image/* /tmp/usbkey
|
||||
Compulab EM-X270 (em-x270)
|
||||
==========================
|
||||
|
||||
d. Install the syslinux boot loader:
|
||||
Fetch the "Linux - kernel and run-time image (Angstrom)" ZIP file from the
|
||||
Compulab website. Inside the images directory of this ZIP file is another ZIP
|
||||
file called 'LiveDisk.zip'. Extract this over a cleanly formatted vfat USB flash
|
||||
drive. Replace the 'em_x270.img' file with the 'updater-em-x270.ext2' file.
|
||||
|
||||
# syslinux /dev/sdb4
|
||||
Insert this USB disk into the supplied adapter and connect this to the
|
||||
board. Whilst holding down the the suspend button press the reset button. The
|
||||
board will now boot off the USB key and into a version of Angstrom. On the
|
||||
desktop is an icon labelled "Updater". Run this program to launch the updater
|
||||
that will flash the Poky kernel and rootfs to the board.
|
||||
|
||||
e. Unmount everything:
|
||||
|
||||
# umount /tmp/image
|
||||
# umount /tmp/usbkey
|
||||
FreeScale iMX31ADS (mx31ads)
|
||||
===========================
|
||||
|
||||
Install the boot device in the target board and configure the BIOS to boot
|
||||
from it.
|
||||
The correct serial port is the top-most female connector to the right of the
|
||||
ethernet socket.
|
||||
|
||||
For more details on the USB-ZIP scenario, see the syslinux documentation:
|
||||
http://git.kernel.org/?p=boot/syslinux/syslinux.git;a=blob_plain;f=doc/usbkey.txt;hb=HEAD
|
||||
For uploading data to RedBoot we are going to use tftp. In this example we
|
||||
assume that the tftpserver is on 192.168.9.1 and the board is on192.168.9.2.
|
||||
|
||||
To set the IP address, run:
|
||||
|
||||
Texas Instruments Beaglebone (beaglebone)
|
||||
=========================================
|
||||
ip_address -l 192.168.9.2/24 -h 192.168.9.1
|
||||
|
||||
The Beaglebone is an ARM Cortex-A8 development board with USB, Ethernet, 2D/3D
|
||||
accelerated graphics, audio, serial, JTAG, and SD/MMC. The Black adds a faster
|
||||
CPU, more RAM, eMMC flash and a micro HDMI port. The beaglebone MACHINE is
|
||||
tested on the following platforms:
|
||||
To download a kernel called "zimage" from the TFTP server, run:
|
||||
|
||||
o Beaglebone Black A6
|
||||
o Beaglebone A6 (the original "White" model)
|
||||
load -r -b 0x100000 zimage
|
||||
|
||||
The Beaglebone Black has eMMC, while the White does not. Pressing the USER/BOOT
|
||||
button when powering on will temporarily change the boot order. But for the sake
|
||||
of simplicity, these instructions assume you have erased the eMMC on the Black,
|
||||
so its boot behavior matches that of the White and boots off of SD card. To do
|
||||
this, issue the following commands from the u-boot prompt:
|
||||
To write the kernel to flash run:
|
||||
|
||||
# mmc dev 1
|
||||
# mmc erase 0 512
|
||||
fis create kernel
|
||||
|
||||
To further tailor these instructions for your board, please refer to the
|
||||
documentation at http://www.beagleboard.org/bone and http://www.beagleboard.org/black
|
||||
To download a rootfs jffs2 image "rootfs" from the TFTP server, run:
|
||||
|
||||
From a Linux system with access to the image files perform the following steps
|
||||
as root, replacing mmcblk0* with the SD card device on your machine (such as sdc
|
||||
if used via a usb card reader):
|
||||
load -r -b 0x100000 rootfs
|
||||
|
||||
1. Partition and format an SD card:
|
||||
# fdisk -lu /dev/mmcblk0
|
||||
To write the root filesystem to flash run:
|
||||
|
||||
Disk /dev/mmcblk0: 3951 MB, 3951034368 bytes
|
||||
255 heads, 63 sectors/track, 480 cylinders, total 7716864 sectors
|
||||
Units = sectors of 1 * 512 = 512 bytes
|
||||
fis create root
|
||||
|
||||
Device Boot Start End Blocks Id System
|
||||
/dev/mmcblk0p1 * 63 144584 72261 c Win95 FAT32 (LBA)
|
||||
/dev/mmcblk0p2 144585 465884 160650 83 Linux
|
||||
To load and boot a kernel and rootfs from flash:
|
||||
|
||||
# mkfs.vfat -F 16 -n "boot" /dev/mmcblk0p1
|
||||
# mke2fs -j -L "root" /dev/mmcblk0p2
|
||||
fis load kernel
|
||||
exec -b 0x100000 -l 0x200000 -c "noinitrd console=ttymxc0,115200 root=/dev/mtdblock2 rootfstype=jffs2 init=linuxrc ip=none"
|
||||
|
||||
The following assumes the SD card partitions 1 and 2 are mounted at
|
||||
/media/boot and /media/root respectively. Removing the card and reinserting
|
||||
it will do just that on most modern Linux desktop environments.
|
||||
To load and boot a kernel from a TFTP server with the rootfs over NFS:
|
||||
|
||||
The files referenced below are made available after the build in
|
||||
build/tmp/deploy/images.
|
||||
load -r -b 0x100000 zimage
|
||||
exec -b 0x100000 -l 0x200000 -c "noinitrd console=ttymxc0,115200 root=/dev/nfs nfsroot=192.168.9.1:/mnt/nfsmx31 rw ip=192.168.9.2::192.168.9.1:255.255.255.0"
|
||||
|
||||
2. Install the boot loaders
|
||||
# cp MLO-beaglebone /media/boot/MLO
|
||||
# cp u-boot-beaglebone.img /media/boot/u-boot.img
|
||||
The instructions above are for using the (default) NOR flash on the board,
|
||||
there is also 128M of NAND flash. It is possible to install Poky to the NAND
|
||||
flash which gives more space for the rootfs and instructions for using this are
|
||||
given below. To switch to the NAND flash:
|
||||
|
||||
3. Install the root filesystem
|
||||
# tar x -C /media/root -f core-image-$IMAGE_TYPE-beaglebone.tar.bz2
|
||||
factive NAND
|
||||
|
||||
4. If using core-image-base or core-image-sato images, the SD card is ready
|
||||
and rootfs already contains the kernel, modules and device tree (DTB)
|
||||
files necessary to be booted with U-boot's default configuration, so
|
||||
skip directly to step 8.
|
||||
For core-image-minimal, proceed through next steps.
|
||||
This will then restart RedBoot using the NAND rather than the NOR. If you
|
||||
have not used the NAND before then it is unlikely that there will be a
|
||||
partition table yet. You can get the list of partitions with 'fis list'.
|
||||
|
||||
5. If using core-image-minimal rootfs, install the modules
|
||||
# tar x -C /media/root -f modules-beaglebone.tgz
|
||||
If this shows no partitions then you can create them with:
|
||||
|
||||
6. If using core-image-minimal rootfs, install the kernel zImage into /boot
|
||||
directory of rootfs
|
||||
# cp zImage-beaglebone.bin /media/root/boot/zImage
|
||||
fis init
|
||||
|
||||
7. If using core-image-minimal rootfs, also install device tree (DTB) files
|
||||
into /boot directory of rootfs
|
||||
# cp zImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp zImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
The output of 'fis list' should now show:
|
||||
|
||||
8. Unmount the SD partitions, insert the SD card into the Beaglebone, and
|
||||
boot the Beaglebone
|
||||
Name FLASH addr Mem addr Length Entry point
|
||||
RedBoot 0xE0000000 0xE0000000 0x00040000 0x00000000
|
||||
FIS directory 0xE7FF4000 0xE7FF4000 0x00003000 0x00000000
|
||||
RedBoot config 0xE7FF7000 0xE7FF7000 0x00001000 0x00000000
|
||||
|
||||
Partitions for the kernel and rootfs need to be created:
|
||||
|
||||
Freescale MPC8315E-RDB (mpc8315e-rdb)
|
||||
=====================================
|
||||
fis create -l 0x1A0000 -e 0x00100000 kernel
|
||||
fis create -l 0x5000000 -e 0x00100000 root
|
||||
|
||||
The MPC8315 PowerPC reference platform (MPC8315E-RDB) is aimed at hardware and
|
||||
software development of network attached storage (NAS) and digital media server
|
||||
applications. The MPC8315E-RDB features the PowerQUICC II Pro processor, which
|
||||
includes a built-in security accelerator.
|
||||
You may now use the instructions above for flashing. However it is important
|
||||
to note that the erase block size for the NAND is different to the NOR so the
|
||||
JFFS erase size will need to be changed to 0x4000. Stardard images are built
|
||||
for NOR and you will need to build custom images for NAND.
|
||||
|
||||
(Note: you may find it easier to order MPC8315E-RDBA; this appears to be the
|
||||
same board in an enclosure with accessories. In any case it is fully
|
||||
compatible with the instructions given here.)
|
||||
You will also need to update the kernel command line to use the correct root
|
||||
filesystem. This should be '/dev/mtdblock7' if you adhere to the partitioning
|
||||
scheme shown above. If this fails then you can doublecheck against the output
|
||||
from the kernel when it evaluates the available mtd partitions.
|
||||
|
||||
Setup instructions
|
||||
------------------
|
||||
|
||||
You will need the following:
|
||||
* NFS root setup on your workstation
|
||||
* TFTP server installed on your workstation
|
||||
* Straight-thru 9-conductor serial cable (DB9, M/F) connected from your
|
||||
PC to UART1
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
Marvell PXA3xx Zylonite (zylonite)
|
||||
==================================
|
||||
|
||||
--- Preparation ---
|
||||
These instructions assume the Zylonite is connected to a machine running a TFTP
|
||||
server at address 192.168.123.5 and that a serial link (38400 8N1) is available
|
||||
to access the blob bootloader. The kernel is on the TFTP server as
|
||||
"zylonite-kernel" and the root filesystem jffs2 file is "zylonite-rootfs" and
|
||||
the images are to be saved in NAND flash.
|
||||
|
||||
Note: if you have altered your board's ethernet MAC address(es) from the
|
||||
defaults, or you need to do so because you want multiple boards on the same
|
||||
network, then you will need to change the values in the dts file (patch
|
||||
linux/arch/powerpc/boot/dts/mpc8315erdb.dts within the kernel source). If
|
||||
you have left them at the factory default then you shouldn't need to do
|
||||
anything here.
|
||||
The following commands setup blob:
|
||||
|
||||
--- Booting from NFS root ---
|
||||
blob> setip client 192.168.123.4
|
||||
blob> setip server 192.168.123.5
|
||||
|
||||
Load the kernel and dtb (device tree blob), and boot the system as follows:
|
||||
To flash the kernel:
|
||||
|
||||
1. Get the kernel (uImage-mpc8315e-rdb.bin) and dtb (uImage-mpc8315e-rdb.dtb)
|
||||
files from the tmp/deploy directory, and make them available on your TFTP
|
||||
server.
|
||||
blob> tftp zylonite-kernel
|
||||
blob> nandwrite -j 0x80800000 0x60000 0x200000
|
||||
|
||||
2. Connect the board's first serial port to your workstation and then start up
|
||||
your favourite serial terminal so that you will be able to interact with
|
||||
the serial console. If you don't have a favourite, picocom is suggested:
|
||||
To flash the rootfs:
|
||||
|
||||
$ picocom /dev/ttyUSB0 -b 115200
|
||||
blob> tftp zylonite-rootfs
|
||||
blob> nanderase -j 0x260000 0x5000000
|
||||
blob> nandwrite -j 0x80800000 0x260000 <length>
|
||||
|
||||
3. Power up or reset the board and press a key on the terminal when prompted
|
||||
to get to the U-Boot command line
|
||||
(where <length> is the rootfs size which will be printed by the tftp step)
|
||||
|
||||
4. Set up the environment in U-Boot:
|
||||
To boot the board:
|
||||
|
||||
=> setenv ipaddr <board ip>
|
||||
=> setenv serverip <tftp server ip>
|
||||
=> setenv bootargs root=/dev/nfs rw nfsroot=<nfsroot ip>:<rootfs path> ip=<board ip>:<server ip>:<gateway ip>:255.255.255.0:mpc8315e:eth0:off console=ttyS0,115200
|
||||
blob> nkernel
|
||||
blob> boot
|
||||
|
||||
5. Download the kernel and dtb, and boot:
|
||||
|
||||
=> tftp 1000000 uImage-mpc8315e-rdb.bin
|
||||
=> tftp 2000000 uImage-mpc8315e-rdb.dtb
|
||||
=> bootm 1000000 - 2000000
|
||||
Logic iMX31 Lite Kit (mx31litekit)
|
||||
===============================
|
||||
|
||||
--- Booting from JFFS2 root ---
|
||||
The easiest method to boot this board is to take an MMC/SD card and format
|
||||
the first partition as ext2, then extract the poky image onto this as root.
|
||||
Assuming the board is network connected, a TFTP server is available at
|
||||
192.168.1.33 and a serial terminal is available (115200 8N1), the following
|
||||
commands will boot a kernel called "mx31kern" from the TFTP server:
|
||||
|
||||
1. First boot the board with NFS root.
|
||||
losh> ifconfig sm0 192.168.1.203 255.255.255.0 192.168.1.33
|
||||
losh> load raw 0x80100000 0x200000 /tftp/192.168.1.33:mx31kern
|
||||
losh> exec 0x80100000 -
|
||||
|
||||
2. Erase the MTD partition which will be used as root:
|
||||
|
||||
$ flash_eraseall /dev/mtd3
|
||||
Phytec phyCORE-iMX31 (mx31phy)
|
||||
==============================
|
||||
|
||||
3. Copy the JFFS2 image to the MTD partition:
|
||||
Support for this board is currently being developed. Experimental jffs2
|
||||
images and a suitable kernel are available and are known to work with the
|
||||
board.
|
||||
|
||||
$ flashcp core-image-minimal-mpc8315e-rdb.jffs2 /dev/mtd3
|
||||
|
||||
4. Then reboot the board and set up the environment in U-Boot:
|
||||
Consumer Devices
|
||||
================
|
||||
|
||||
=> setenv bootargs root=/dev/mtdblock3 rootfstype=jffs2 console=ttyS0,115200
|
||||
FIC Neo1973 GTA01 smartphone (fic-gta01)
|
||||
========================================
|
||||
|
||||
To install Poky on a GTA01 smartphone you will need "dfu-util" tool
|
||||
which you can build with "bitbake dfu-util-native" command.
|
||||
|
||||
Ubiquiti Networks EdgeRouter Lite (edgerouter)
|
||||
==============================================
|
||||
Flashing requires these steps:
|
||||
|
||||
The EdgeRouter Lite is part of the EdgeMax series. It is a MIPS64 router
|
||||
(based on the Cavium Octeon processor) with 512MB of RAM, which uses an
|
||||
internal USB pendrive for storage.
|
||||
1. Power down the device.
|
||||
2. Connect the device to the host machine via USB.
|
||||
3. Hold AUX key and press Power key. There should be a bootmenu
|
||||
on screen.
|
||||
4. Run "dfu-util -l" to check if the phone is visible on the USB bus.
|
||||
The output should look like this:
|
||||
|
||||
Setup instructions
|
||||
------------------
|
||||
dfu-util - (C) 2007 by OpenMoko Inc.
|
||||
This program is Free Software and has ABSOLUTELY NO WARRANTY
|
||||
|
||||
You will need the following:
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the device
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
Found Runtime: [0x1457:0x5119] devnum=19, cfg=0, intf=2, alt=0, name="USB Device Firmware Upgrade"
|
||||
|
||||
If using NFS as part of the setup process, you will also need:
|
||||
* NFS root setup on your workstation
|
||||
* TFTP server installed on your workstation (if fetching the kernel from
|
||||
TFTP, see below).
|
||||
5. Flash the kernel with "dfu-util -a kernel -D uImage-2.6.21.6-moko11-r2-fic-gta01.bin"
|
||||
6. Flash rootfs with "dfu-util -a rootfs -D <image>", where <image> is the
|
||||
jffs2 image file to use as the root filesystem
|
||||
(e.g. ./tmp/deploy/images/poky-image-sato-fic-gta01.jffs2)
|
||||
|
||||
--- Preparation ---
|
||||
|
||||
Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE.
|
||||
In the following instruction it is based on core-image-minimal. Another target
|
||||
may be similiar with it.
|
||||
HTC Universal (htcuniversal)
|
||||
============================
|
||||
|
||||
--- Booting from NFS root / kernel via TFTP ---
|
||||
Note: HTC Universal support is highly experimental.
|
||||
|
||||
Load the kernel, and boot the system as follows:
|
||||
On the HTC Universal, entirely replacing the Windows installation is not
|
||||
supported, instead Poky is booted from an MMC/SD card from Windows. Once Poky
|
||||
has booted, Windows is no longer in memory or active but when power is removed,
|
||||
the user will be returned to windows and will need to return to Linux from
|
||||
there.
|
||||
|
||||
1. Get the kernel (vmlinux) file from the tmp/deploy/images/edgerouter
|
||||
directory, and make them available on your TFTP server.
|
||||
Once an MMC/SD card is available it is suggested its split into two partitions,
|
||||
one for a program called HaRET which lets you boot Linux from within Windows
|
||||
and the second for the rootfs. The HaRET partition should be the first partition
|
||||
on the card and be vfat formatted. It doesn't need to be large, just enough for
|
||||
HaRET and a kernel (say 5MB max). The rootfs should be ext2 and is usually the
|
||||
second partition. The first partition should be vfat so Windows recognises it
|
||||
as if it doesn't, it has been known to reformat cards.
|
||||
|
||||
2. Connect the board's first serial port to your workstation and then start up
|
||||
your favourite serial terminal so that you will be able to interact with
|
||||
the serial console. If you don't have a favourite, picocom is suggested:
|
||||
On the first partition you need three files:
|
||||
|
||||
$ picocom /dev/ttyS0 -b 115200
|
||||
* a HaRET binary (version 0.5.1 works well and a working version
|
||||
should be part of the last Poky release)
|
||||
* a kernel renamed to "zImage"
|
||||
* a default.txt which contains:
|
||||
|
||||
3. Power up or reset the board and press a key on the terminal when prompted
|
||||
to get to the U-Boot command line
|
||||
set kernel "zImage"
|
||||
set mtype "855"
|
||||
set cmdline "root=/dev/mmcblk0p2 rw console=ttyS0,115200n8 console=tty0 rootdelay=5 fbcon=rotate:1"
|
||||
boot2
|
||||
|
||||
4. Set up the environment in U-Boot:
|
||||
On the second parition the root file system is extracted as root. A different
|
||||
partition layout or other kernel options can be changed in the default.txt file.
|
||||
|
||||
=> setenv ipaddr <board ip>
|
||||
=> setenv serverip <tftp server ip>
|
||||
When inserted into the device, Windows should see the card and let you browse
|
||||
its contents using File Explorer. Running the HaRET binary will present a dialog
|
||||
box (maybe after messages warning about running unsigned binaries) where you
|
||||
select OK and you should then see Poky boot. Kernel messages can be seen by
|
||||
adding psplash=false to the kernel commandline.
|
||||
|
||||
5. Download the kernel and boot:
|
||||
|
||||
=> tftp tftp $loadaddr vmlinux
|
||||
=> bootoctlinux $loadaddr coremask=0x3 root=/dev/nfs rw nfsroot=<nfsroot ip>:<rootfs path> ip=<board ip>:<server ip>:<gateway ip>:<netmask>:edgerouter:eth0:off mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom)
|
||||
Nokia 770/N800/N810 Internet Tablets (nokia770 and nokia800)
|
||||
============================================================
|
||||
|
||||
--- Booting from USB root ---
|
||||
Note: Nokia tablet support is highly experimental.
|
||||
|
||||
To boot from the USB disk, you either need to remove it from the edgerouter
|
||||
box and populate it from another computer, or use a previously booted NFS
|
||||
image and populate from the edgerouter itself.
|
||||
The Nokia internet tablet devices are OMAP based tablet formfactor devices
|
||||
with large screens (800x480), wifi and touchscreen.
|
||||
|
||||
Type 1: Mounted USB disk
|
||||
------------------------
|
||||
To flash images to these devices you need the "flasher" utility which can be
|
||||
downloaded from the http://tablets-dev.nokia.com/d3.php?f=flasher-3.0. This
|
||||
utility needs to be run as root and the usb filesystem needs to be mounted
|
||||
although most distributions will have done this for you. Once you have this
|
||||
follow these steps:
|
||||
|
||||
To boot from the USB disk there are two available partitions on the factory
|
||||
USB storage. The rest of this guide assumes that these partitions are left
|
||||
intact. If you change the partition scheme, you must update your boot method
|
||||
appropriately.
|
||||
1. Power down the device.
|
||||
2. Connect the device to the host machine via USB
|
||||
(connecting power to the device doesn't hurt either).
|
||||
3. Run "flasher -i"
|
||||
4. Power on the device.
|
||||
5. The program should give an indication it's found
|
||||
a tablet device. If not, recheck the cables, make sure you're
|
||||
root and usbfs/usbdevfs is mounted.
|
||||
6. Run "flasher -r <image> -k <kernel> -f", where <image> is the
|
||||
jffs2 image file to use as the root filesystem
|
||||
(e.g. ./tmp/deploy/images/poky-image-sato-nokia800.jffs2)
|
||||
and <kernel> is the kernel to use
|
||||
(e.g. ./tmp/deploy/images/zImage-nokia800.bin).
|
||||
7. Run "flasher -R" to reboot the device.
|
||||
8. The device should boot into Poky.
|
||||
|
||||
The standard partitions are:
|
||||
The nokia800 images and kernel will run on both the N800 and N810.
|
||||
|
||||
- 1: vfat partition containing factory kernels
|
||||
- 2: ext3 partition for the root filesystem.
|
||||
|
||||
You can place the kernel on either partition 1, or partition 2, but the roofs
|
||||
must go on partition 2 (due to its size).
|
||||
Sharp Zaurus SL-C7x0 series (c7x0)
|
||||
==================================
|
||||
|
||||
Note: If you place the kernel on the ext3 partition, you must re-create the
|
||||
ext3 filesystem, since the factory u-boot can only handle 128 byte inodes and
|
||||
cannot read the partition otherwise.
|
||||
The Sharp Zaurus c7x0 series (SL-C700, SL-C750, SL-C760, SL-C860, SL-7500)
|
||||
are PXA25x based handheld PDAs with VGA screens. To install Poky images on
|
||||
these devices follow these steps:
|
||||
|
||||
Steps:
|
||||
1. Obtain an SD/MMC or CF card with a vfat or ext2 filesystem.
|
||||
2. Copy a jffs2 image file (e.g. poky-image-sato-c7x0.jffs2) onto the
|
||||
card as "initrd.bin":
|
||||
|
||||
1. Remove the USB disk from the edgerouter and insert it into a computer
|
||||
that has access to your build artifacts.
|
||||
$ cp ./tmp/deploy/images/poky-image-sato-c7x0.jffs2 /path/to/my-cf-card/initrd.bin
|
||||
|
||||
2. Copy the kernel image to the USB storage (assuming discovered as 'sdb' on
|
||||
the development machine):
|
||||
3. Copy an Linux kernel file (zImage-c7x0.bin) onto the card as
|
||||
"zImage.bin":
|
||||
|
||||
2a) if booting from vfat
|
||||
|
||||
# mount /dev/sdb1 /mnt
|
||||
# cp tmp/deploy/images/edgerouter/vmlinux /mnt
|
||||
# umount /mnt
|
||||
$ cp ./tmp/deploy/images/zImage-c7x0.bin /path/to/my-cf-card/zImage.bin
|
||||
|
||||
2b) if booting from ext3
|
||||
4. Copy an updater script (updater.sh.c7x0) onto the card
|
||||
as "updater.sh":
|
||||
|
||||
# mkfs.ext3 -I 128 /dev/sdb2
|
||||
# mount /dev/sdb2 /mnt
|
||||
# mkdir /mnt/boot
|
||||
# cp tmp/deploy/images/edgerouter/vmlinux /mnt/boot
|
||||
# umount /mnt
|
||||
$ cp ./tmp/deploy/images/updater.sh.c7x0 /path/to/my-cf-card/updater.sh
|
||||
|
||||
3. Extract the rootfs to the USB storage ext3 partition
|
||||
5. Power down the Zaurus.
|
||||
6. Hold "OK" key and power on the device. An update menu should appear
|
||||
(in Japanese).
|
||||
7. Choose "Update" (item 4).
|
||||
8. The next screen will ask for the source, choose the appropriate
|
||||
card (CF or SD).
|
||||
9. Make sure AC power is connected.
|
||||
10. The next screen asks for confirmation, choose "Yes" (the left button).
|
||||
11. The update process will start, flash the files on the card onto
|
||||
the device and the device will then reboot into Poky.
|
||||
|
||||
# mount /dev/sdb2 /mnt
|
||||
# tar -xvjpf core-image-minimal-XXX.tar.bz2 -C /mnt
|
||||
# umount /mnt
|
||||
|
||||
4. Reboot the board and press a key on the terminal when prompted to get to the U-Boot
|
||||
command line:
|
||||
Sharp Zaurus SL-C1000 (akita)
|
||||
=============================
|
||||
|
||||
5. Load the kernel and boot:
|
||||
The Sharp Zaurus SL-C1000 is a PXA270 based device otherwise similar to the
|
||||
c7x0. To install Poky images on this device follow the instructions for
|
||||
the c7x0 but replace "c7x0" with "akita" where appropriate.
|
||||
|
||||
5a) vfat boot
|
||||
|
||||
=> fatload usb 0:1 $loadaddr vmlinux
|
||||
Sharp Zaurus SL-C3x00 series (spitz)
|
||||
====================================
|
||||
|
||||
5b) ext3 boot
|
||||
The Sharp Zaurus SL-C3x00 devices are PXA270 based devices similar
|
||||
to akita but with an internal microdrive. The installation procedure
|
||||
assumes a standard microdrive based device where the root (first)
|
||||
partition has been enlarged to fit the image (at least 100MB,
|
||||
400MB for the SDK).
|
||||
|
||||
=> ext2load usb 0:2 $loadaddr boot/vmlinux
|
||||
|
||||
=> bootoctlinux $loadaddr coremask=0x3 root=/dev/sda2 rw rootwait mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom)
|
||||
|
||||
The procedure is the same as for the c7x0 and akita models with the
|
||||
following differences:
|
||||
|
||||
Type 2: NFS
|
||||
-----------
|
||||
1. Instead of a jffs2 image you need to copy a compressed tarball of the
|
||||
root fileystem (e.g. poky-image-sato-spitz.tar.gz) onto the
|
||||
card as "hdimage1.tgz":
|
||||
|
||||
Note: If you place the kernel on the ext3 partition, you must re-create the
|
||||
ext3 filesystem, since the factory u-boot can only handle 128 byte inodes and
|
||||
cannot read the partition otherwise.
|
||||
$ cp ./tmp/deploy/images/poky-image-sato-spitz.tar.gz /path/to/my-cf-card/hdimage1.tgz
|
||||
|
||||
These boot instructions assume that you have recreated the ext3 filesystem with
|
||||
128 byte inodes, you have an updated uboot or you are running and image capable
|
||||
of making the filesystem on the board itself.
|
||||
2. You additionally need to copy a special tar utility (gnu-tar) onto
|
||||
the card as "gnu-tar":
|
||||
|
||||
$ cp ./tmp/deploy/images/gnu-tar /path/to/my-cf-card/gnu-tar
|
||||
|
||||
1. Boot from NFS root
|
||||
|
||||
2. Mount the USB disk partition 2 and then extract the contents of
|
||||
tmp/deploy/core-image-XXXX.tar.bz2 into it.
|
||||
|
||||
Before starting, copy core-image-minimal-xxx.tar.bz2 and vmlinux into
|
||||
rootfs path on your workstation.
|
||||
|
||||
and then,
|
||||
|
||||
# mount /dev/sda2 /media/sda2
|
||||
# tar -xvjpf core-image-minimal-XXX.tar.bz2 -C /media/sda2
|
||||
# cp vmlinux /media/sda2/boot/vmlinux
|
||||
# umount /media/sda2
|
||||
# reboot
|
||||
|
||||
3. Reboot the board and press a key on the terminal when prompted to get to the U-Boot
|
||||
command line:
|
||||
|
||||
# reboot
|
||||
|
||||
4. Load the kernel and boot:
|
||||
|
||||
=> ext2load usb 0:2 $loadaddr boot/vmlinux
|
||||
=> bootoctlinux $loadaddr coremask=0x3 root=/dev/sda2 rw rootwait mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom)
|
||||
|
||||
10
bitbake-dev/AUTHORS
Normal file
10
bitbake-dev/AUTHORS
Normal file
@@ -0,0 +1,10 @@
|
||||
Tim Ansell <mithro@mithis.net>
|
||||
Phil Blundell <pb@handhelds.org>
|
||||
Seb Frankengul <seb@frankengul.org>
|
||||
Holger Freyther <zecke@handhelds.org>
|
||||
Marcin Juszkiewicz <marcin@haerwu.biz>
|
||||
Chris Larson <kergoth@handhelds.org>
|
||||
Ulrich Luckas <luckas@musoft.de>
|
||||
Mickey Lauer <mickey@Vanille.de>
|
||||
Richard Purdie <rpurdie@rpsys.net>
|
||||
Holger Schurig <holgerschurig@gmx.de>
|
||||
315
bitbake-dev/ChangeLog
Normal file
315
bitbake-dev/ChangeLog
Normal file
@@ -0,0 +1,315 @@
|
||||
Changes in Bitbake 1.9.x:
|
||||
- Add PE (Package Epoch) support from Philipp Zabel (pH5)
|
||||
- Treat python functions the same as shell functions for logging
|
||||
- Use TMPDIR/anonfunc as a __anonfunc temp directory (T)
|
||||
- Catch truncated cache file errors
|
||||
- Allow operations other than assignment on flag variables
|
||||
- Add code to handle inter-task dependencies
|
||||
- Fix cache errors when generation dotGraphs
|
||||
- Make sure __inherit_cache is updated before calling include() (from Michael Krelin)
|
||||
- Fix bug when target was in ASSUME_PROVIDED (#2236)
|
||||
- Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062)
|
||||
- Fix invalid regexp in BBMASK error handling (missing import) (#1124)
|
||||
- Promote certain warnings from debug to note 2 level
|
||||
- Update manual
|
||||
- Correctly redirect stdin when forking
|
||||
- If parsing errors are found, exit, too many users miss the errors
|
||||
- Remove supriours PREFERRED_PROVIDER warnings
|
||||
- svn fetcher: Add _buildsvncommand function
|
||||
- Improve certain error messages
|
||||
- Rewrite svn fetcher to make adding extra operations easier
|
||||
as part of future SRCDATE="now" fixes
|
||||
(requires new FETCHCMD_svn definition in bitbake.conf)
|
||||
- Change SVNDIR layout to be more unique (fixes #2644 and #2624)
|
||||
- Add ConfigParsed Event after configuration parsing is complete
|
||||
- Add SRCREV support for svn fetcher
|
||||
- data.emit_var() - only call getVar if we need the variable
|
||||
- Stop generating the A variable (seems to be legacy code)
|
||||
- Make sure intertask depends get processed correcting in recursive depends
|
||||
- Add pn-PN to overrides when evaluating PREFERRED_VERSION
|
||||
- Improve the progress indicator by skipping tasks that have
|
||||
already run before starting the build rather than during it
|
||||
- Add profiling option (-P)
|
||||
- Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache
|
||||
- Add SRCREV_FORMAT support
|
||||
- Fix local fetcher's localpath return values
|
||||
- Apply OVERRIDES before performing immediate expansions
|
||||
- Allow the -b -e option combination to take regular expressions
|
||||
- Fix handling of variables with expansion in the name using _append/_prepend
|
||||
e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
|
||||
- Add plain message function to bb.msg
|
||||
- Sort the list of providers before processing so dependency problems are
|
||||
reproducible rather than effectively random
|
||||
- Fix/improve bitbake -s output
|
||||
- Add locking for fetchers so only one tries to fetch a given file at a given time
|
||||
- Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains
|
||||
- Expand data in addtasks
|
||||
- Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...."
|
||||
error message.
|
||||
- Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction)
|
||||
- Sort digraph output to make builds more reproducible
|
||||
- Split expandKeys into two for loops to benefit from the expand_cache (12% speedup)
|
||||
- runqueue.py: Fix idepends handling to avoid dependency errors
|
||||
- Clear the terminal TOSTOP flag if set (and warn the user)
|
||||
- Fix regression from r653 and make SRCDATE/CVSDATE work for packages again
|
||||
- Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530)
|
||||
- Warn about malformed PREFERRED_PROVIDERS (#1072)
|
||||
- Add support for BB_NICE_LEVEL option (#1627)
|
||||
- Psyco is used only on x86 as there is no support for other architectures.
|
||||
- Sort initial providers list by default preference (#1145, #2024)
|
||||
- Improve provider sorting so prefered versions have preference over latest versions (#768)
|
||||
- Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359)
|
||||
- Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed
|
||||
- Handle paths in svn fetcher module parameter
|
||||
- Support the syntax "export VARIABLE"
|
||||
- Add bzr fetcher
|
||||
- Add support for cleaning directories before a task in the form:
|
||||
do_taskname[cleandirs] = "dir"
|
||||
- bzr fetcher tweaks from Robert Schuster (#2913)
|
||||
- Add mercurial (hg) fetcher from Robert Schuster (#2913)
|
||||
- Don't add duplicates to BBPATH
|
||||
- Fix preferred_version return values (providers.py)
|
||||
- Fix 'depends' flag splitting
|
||||
- Fix unexport handling (#3135)
|
||||
- Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting)
|
||||
- Allow multiple options for deptask flag
|
||||
- Use git-fetch instead of git-pull removing any need for merges when
|
||||
fetching (we don't care about the index). Fixes fetch errors.
|
||||
- Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches
|
||||
faster at the expense of not creating mirror tarballs.
|
||||
- SRCREV handling updates, improvements and fixes from Poky
|
||||
- Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky
|
||||
- Add support for task selfstamp and lockfiles flags
|
||||
- Disable task number acceleration since it can allow the tasks to run
|
||||
out of sequence
|
||||
- Improve runqueue code comments
|
||||
- Add task scheduler abstraction and some example schedulers
|
||||
- Improve circular dependency chain debugging code and user feedback
|
||||
- Don't give a stacktrace for invalid tasks, have a user friendly message (#3431)
|
||||
- Add support for "-e target" (#3432)
|
||||
- Fix shell showdata command (#3259)
|
||||
- Fix shell data updating problems (#1880)
|
||||
- Properly raise errors for invalid source URI protocols
|
||||
- Change the wget fetcher failure handling to avoid lockfile problems
|
||||
- Add support for branches in git fetcher (Otavio Salvador, Michael Lauer)
|
||||
- Make taskdata and runqueue errors more user friendly
|
||||
- Add norecurse and fullpath options to cvs fetcher
|
||||
- Fix exit code for build failures in --continue mode
|
||||
- Fix git branch tags fetching
|
||||
- Change parseConfigurationFile so it works on real data, not a copy
|
||||
- Handle 'base' inherit and all other INHERITs from parseConfigurationFile
|
||||
instead of BBHandler
|
||||
- Fix getVarFlags bug in data_smart
|
||||
- Optmise cache handling by more quickly detecting an invalid cache, only
|
||||
saving the cache when its changed, moving the cache validity check into
|
||||
the parsing loop and factoring some getVar calls outside a for loop
|
||||
- Cooker: Remove a debug message from the parsing loop to lower overhead
|
||||
- Convert build.py exec_task to use getVarFlags
|
||||
- Update shell to use cooker.buildFile
|
||||
- Add StampUpdate event
|
||||
- Convert -b option to use taskdata/runqueue
|
||||
- Remove digraph and switch to new stamp checking code. exec_task no longer
|
||||
honours dependencies
|
||||
- Make fetcher timestamp updating non-fatal when permissions don't allow
|
||||
updates
|
||||
- Add BB_SCHEDULER variable/option ("completion" or "speed") controlling
|
||||
the way bitbake schedules tasks
|
||||
- Add BB_STAMP_POLICY variable/option ("perfile" or "full") controlling
|
||||
how extensively stamps are looked at for validity
|
||||
- When handling build target failures make sure idepends are checked and
|
||||
failed where needed. Fixes --continue mode crashes.
|
||||
- Fix -f (force) in conjunction with -b
|
||||
- Fix problems with recrdeptask handling where some idepends weren't handled
|
||||
correctly.
|
||||
- Handle exit codes correctly (from pH5)
|
||||
- Work around refs/HEAD issues with git over http (#3410)
|
||||
- Add proxy support to the CVS fetcher (from Cyril Chemparathy)
|
||||
- Improve runfetchcmd so errors are seen and various GIT variables are exported
|
||||
- Add ability to fetchers to check URL validity without downloading
|
||||
- Improve runtime PREFERRED_PROVIDERS warning message
|
||||
- Add BB_STAMP_WHITELIST option which contains a list of stamps to ignore when
|
||||
checking stamp dependencies and using a BB_STAMP_POLICY of "whitelist"
|
||||
- No longer weight providers on the basis of a package being "already staged". This
|
||||
leads to builds being non-deterministic.
|
||||
- Flush stdout/stderr before forking to fix duplicate console output
|
||||
- Make sure recrdeps tasks include all inter-task dependencies of a given fn
|
||||
- Add bb.runqueue.check_stamp_fn() for use by packaged-staging
|
||||
- Add PERSISTENT_DIR to store the PersistData in a persistent
|
||||
directory != the cache dir.
|
||||
- Add md5 and sha256 checksum generation functions to utils.py
|
||||
- Correctly handle '-' characters in class names (#2958)
|
||||
- Make sure expandKeys has been called on the data dictonary before running tasks
|
||||
- Correctly add a task override in the form task-TASKNAME.
|
||||
- Revert the '-' character fix in class names since it breaks things
|
||||
- When a regexp fails to compile for PACKAGES_DYNAMIC, print a more useful error (#4444)
|
||||
- Allow to checkout CVS by Date and Time. Just add HHmm to the SRCDATE.
|
||||
- Move prunedir function to utils.py and add explode_dep_versions function
|
||||
- Raise an exception if SRCREV == 'INVALID'
|
||||
- Fix hg fetcher username/password handling and fix crash
|
||||
- Fix PACKAGES_DYNAMIC handling of packages with '++' in the name
|
||||
- Rename __depends to __base_depends after configuration parsing so we don't
|
||||
recheck the validity of the config files time after time
|
||||
- Add better environmental variable handling. By default it will now only pass certain
|
||||
whitelisted variables into the data store. If BB_PRESERVE_ENV is set bitbake will use
|
||||
all variable from the environment. If BB_ENV_WHITELIST is set, that whitelist will be
|
||||
used instead of the internal bitbake one. Alternatively, BB_ENV_EXTRAWHITE can be used
|
||||
to extend the internal whitelist.
|
||||
- Perforce fetcher fix to use commandline options instead of being overriden by the environment
|
||||
- bb.utils.prunedir can cope with symlinks to directoriees without exceptions
|
||||
- use @rev when doing a svn checkout
|
||||
- Add osc fetcher (from Joshua Lock in Poky)
|
||||
- When SRCREV autorevisioning for a recipe is in use, don't cache the recipe
|
||||
- Add tryaltconfigs option to control whether bitbake trys using alternative providers
|
||||
to fulfil failed dependencies. It defaults to off, changing the default since this
|
||||
behaviour confuses many users and isn't often useful.
|
||||
- Improve lock file function error handling
|
||||
- Add username handling to the git fetcher (Robert Bragg)
|
||||
- Add support for HTTP_PROXY and HTTP_PROXY_IGNORE variables to the wget fetcher
|
||||
- Export more variables to the fetcher commands to allow ssh checkouts and checkouts through
|
||||
proxies to work better. (from Poky)
|
||||
- Also allow user and pswd options in SRC_URIs globally (from Poky)
|
||||
- Improve proxy handling when using mirrors (from Poky)
|
||||
- Add bb.utils.prune_suffix function
|
||||
- Fix hg checkouts of specific revisions (from Poky)
|
||||
- Fix wget fetching of urls with parameters specified (from Poky)
|
||||
- Add username handling to git fetcher (from Poky)
|
||||
- Set HOME environmental variable when running fetcher commands (from Poky)
|
||||
- Make sure allowed variables inherited from the environment are exported again (from Poky)
|
||||
- When running a stage task in bbshell, run populate_staging, not the stage task (from Poky)
|
||||
|
||||
Changes in Bitbake 1.8.0:
|
||||
- Release 1.7.x as a stable series
|
||||
|
||||
Changes in BitBake 1.7.x:
|
||||
- Major updates of the dependency handling and execution
|
||||
of tasks. Code from bin/bitbake replaced with runqueue.py
|
||||
and taskdata.py
|
||||
- New task execution code supports multithreading with a simplistic
|
||||
threading algorithm controlled by BB_NUMBER_THREADS
|
||||
- Change of the SVN Fetcher to keep the checkout around
|
||||
courtsey of Paul Sokolovsky (#1367)
|
||||
- PATH fix to bbimage (#1108)
|
||||
- Allow debug domains to be specified on the commandline (-l)
|
||||
- Allow 'interactive' tasks
|
||||
- Logging message improvements
|
||||
- Drop now uneeded BUILD_ALL_DEPS variable
|
||||
- Add support for wildcards to -b option
|
||||
- Major overhaul of the fetchers making a large amount of code common
|
||||
including mirroring code
|
||||
- Fetchers now touch md5 stamps upon access (to show activity)
|
||||
- Fix -f force option when used without -b (long standing bug)
|
||||
- Add expand_cache to data_cache.py, caching expanded data (speedup)
|
||||
- Allow version field in DEPENDS (ignored for now)
|
||||
- Add abort flag support to the shell
|
||||
- Make inherit fail if the class doesn't exist (#1478)
|
||||
- Fix data.emit_env() to expand keynames as well as values
|
||||
- Add ssh fetcher
|
||||
- Add perforce fetcher
|
||||
- Make PREFERRED_PROVIDER_foobar defaults to foobar if available
|
||||
- Share the parser's mtime_cache, reducing the number of stat syscalls
|
||||
- Compile all anonfuncs at once!
|
||||
*** Anonfuncs must now use common spacing format ***
|
||||
- Memorise the list of handlers in __BBHANDLERS and tasks in __BBTASKS
|
||||
This removes 2 million function calls resulting in a 5-10% speedup
|
||||
- Add manpage
|
||||
- Update generateDotGraph to use taskData/runQueue improving accuracy
|
||||
and also adding a task dependency graph
|
||||
- Fix/standardise on GPLv2 licence
|
||||
- Move most functionality from bin/bitbake to cooker.py and split into
|
||||
separate funcitons
|
||||
- CVS fetcher: Added support for non-default port
|
||||
- Add BBINCLUDELOGS_LINES, the number of lines to read from any logfile
|
||||
- Drop shebangs from lib/bb scripts
|
||||
|
||||
Changes in Bitbake 1.6.0:
|
||||
- Better msg handling
|
||||
- COW dict implementation from Tim Ansell (mithro) leading
|
||||
to better performance
|
||||
- Speed up of -s
|
||||
|
||||
Changes in Bitbake 1.4.4:
|
||||
- SRCDATE now handling courtsey Justin Patrin
|
||||
- #1017 fix to work with rm_work
|
||||
|
||||
Changes in BitBake 1.4.2:
|
||||
- Send logs to oe.pastebin.com instead of pastebin.com
|
||||
fixes #856
|
||||
- Copy the internal bitbake data before building the
|
||||
dependency graph. This fixes nano not having a
|
||||
virtual/libc dependency
|
||||
- Allow multiple TARBALL_STASH entries
|
||||
- Cache, check if the directory exists before changing
|
||||
into it
|
||||
- git speedup cloning by not doing a checkout
|
||||
- allow to have spaces in filenames (.conf, .bb, .bbclass)
|
||||
|
||||
Changes in BitBake 1.4.0:
|
||||
- Fix to check both RDEPENDS and RDEPENDS_${PN}
|
||||
- Fix a RDEPENDS parsing bug in utils:explode_deps()
|
||||
- Update git fetcher behaviour to match git changes
|
||||
- ASSUME_PROVIDED allowed to include runtime packages
|
||||
- git fetcher cleanup and efficency improvements
|
||||
- Change the format of the cache
|
||||
- Update usermanual to document the Fetchers
|
||||
- Major changes to caching with a new strategy
|
||||
giving a major performance increase when reparsing
|
||||
with few data changes
|
||||
|
||||
Changes in BitBake 1.3.3:
|
||||
- Create a new Fetcher module to ease the
|
||||
development of new Fetchers.
|
||||
Issue #438 fixed by rpurdie@openedhand.com
|
||||
- Make the Subversion fetcher honor the SRC Date
|
||||
(CVSDATE).
|
||||
Issue #555 fixed by chris@openedhand.com
|
||||
- Expand PREFERRED_PROVIDER properly
|
||||
Issue #436 fixed by rprudie@openedhand.com
|
||||
- Typo fix for Issue #531 by Philipp Zabel for the
|
||||
BitBake Shell
|
||||
- Introduce a new special variable SRCDATE as
|
||||
a generic naming to replace CVSDATE.
|
||||
- Introduce a new keyword 'required'. In contrast
|
||||
to 'include' parsing will fail if a to be included
|
||||
file can not be found.
|
||||
- Remove hardcoding of the STAMP directory. Patch
|
||||
courtsey pHilipp Zabel
|
||||
- Track the RDEPENDS of each package (rpurdie@openedhand.com)
|
||||
- Introduce BUILD_ALL_DEPS to build all RDEPENDS. E.g
|
||||
this is used by the OpenEmbedded Meta Packages.
|
||||
(rpurdie@openedhand.com).
|
||||
|
||||
Changes in BitBake 1.3.2:
|
||||
- reintegration of make.py into BitBake
|
||||
- bbread is gone, use bitbake -e
|
||||
- lots of shell updates and bugfixes
|
||||
- Introduction of the .= and =. operator
|
||||
- Sort variables, keys and groups in bitdoc
|
||||
- Fix regression in the handling of BBCOLLECTIONS
|
||||
- Update the bitbake usermanual
|
||||
|
||||
Changes in BitBake 1.3.0:
|
||||
- add bitbake interactive shell (bitbake -i)
|
||||
- refactor bitbake utility in OO style
|
||||
- kill default arguments in methods in the bb.data module
|
||||
- kill default arguments in methods in the bb.fetch module
|
||||
- the http/https/ftp fetcher will fail if the to be
|
||||
downloaded file was not found in DL_DIR (this is needed
|
||||
to avoid unpacking the sourceforge mirror page)
|
||||
- Switch to a cow like data instance for persistent and non
|
||||
persisting mode (called data_smart.py)
|
||||
- Changed the callback of bb.make.collect_bbfiles to carry
|
||||
additional parameters
|
||||
- Drastically reduced the amount of needed RAM by not holding
|
||||
each data instance in memory when using a cache/persistent
|
||||
storage
|
||||
|
||||
Changes in BitBake 1.2.1:
|
||||
The 1.2.1 release is meant as a intermediate release to lay the
|
||||
ground for more radical changes. The most notable changes are:
|
||||
|
||||
- Do not hardcode {}, use bb.data.init() instead if you want to
|
||||
get a instance of a data class
|
||||
- bb.data.init() is a factory and the old bb.data methods are delegates
|
||||
- Do not use deepcopy use bb.data.createCopy() instead.
|
||||
- Removed default arguments in bb.fetch
|
||||
|
||||
207
bitbake-dev/bin/bitbake
Executable file
207
bitbake-dev/bin/bitbake
Executable file
@@ -0,0 +1,207 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys, os, getopt, re, time, optparse, xmlrpclib
|
||||
sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
import bb
|
||||
from bb import cooker
|
||||
from bb import daemonize
|
||||
from bb import ui
|
||||
from bb.ui import uievent
|
||||
|
||||
__version__ = "1.9.0"
|
||||
|
||||
if sys.hexversion < 0x020600F0:
|
||||
print "Sorry, python 2.6 or later is required for this version of bitbake"
|
||||
sys.exit(1)
|
||||
|
||||
#============================================================================#
|
||||
# BBOptions
|
||||
#============================================================================#
|
||||
class BBConfiguration( object ):
|
||||
"""
|
||||
Manages build options and configurations for one run
|
||||
"""
|
||||
def __init__( self, options ):
|
||||
for key, val in options.__dict__.items():
|
||||
setattr( self, key, val )
|
||||
|
||||
|
||||
def print_exception(exc, value, tb):
|
||||
"""
|
||||
Print the exception to stderr, only showing the traceback if bitbake
|
||||
debugging is enabled.
|
||||
"""
|
||||
if not bb.msg.debug_level['default']:
|
||||
tb = None
|
||||
|
||||
sys.__excepthook__(exc, value, tb)
|
||||
|
||||
|
||||
#============================================================================#
|
||||
# main
|
||||
#============================================================================#
|
||||
|
||||
def main():
|
||||
return_value = 0
|
||||
pythonver = sys.version_info
|
||||
if pythonver[0] < 2 or (pythonver[0] == 2 and pythonver[1] < 5):
|
||||
print "Sorry, bitbake needs python 2.5 or later."
|
||||
sys.exit(1)
|
||||
|
||||
parser = optparse.OptionParser( version = "BitBake Build Tool Core version %s, %%prog version %s" % ( bb.__version__, __version__ ),
|
||||
usage = """%prog [options] [package ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of BitBake files.
|
||||
It expects that BBFILES is defined, which is a space separated list of files to
|
||||
be executed. BBFILES does support wildcards.
|
||||
Default BBFILES are the .bb files in the current directory.""" )
|
||||
|
||||
parser.add_option( "-b", "--buildfile", help = "execute the task against this .bb file, rather than a package from BBFILES.",
|
||||
action = "store", dest = "buildfile", default = None )
|
||||
|
||||
parser.add_option( "-k", "--continue", help = "continue as much as possible after an error. While the target that failed, and those that depend on it, cannot be remade, the other dependencies of these targets can be processed all the same.",
|
||||
action = "store_false", dest = "abort", default = True )
|
||||
|
||||
parser.add_option( "-a", "--tryaltconfigs", help = "continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False )
|
||||
|
||||
parser.add_option( "-f", "--force", help = "force run of specified cmd, regardless of stamp status",
|
||||
action = "store_true", dest = "force", default = False )
|
||||
|
||||
parser.add_option( "-i", "--interactive", help = "drop into the interactive mode also called the BitBake shell.",
|
||||
action = "store_true", dest = "interactive", default = False )
|
||||
|
||||
parser.add_option( "-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing). Depending on the base.bbclass a listtasks tasks is defined and will show available tasks",
|
||||
action = "store", dest = "cmd" )
|
||||
|
||||
parser.add_option( "-r", "--read", help = "read the specified file before bitbake.conf",
|
||||
action = "append", dest = "file", default = [] )
|
||||
|
||||
parser.add_option( "-v", "--verbose", help = "output more chit-chat to the terminal",
|
||||
action = "store_true", dest = "verbose", default = False )
|
||||
|
||||
parser.add_option( "-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option( "-n", "--dry-run", help = "don't execute, just go through the motions",
|
||||
action = "store_true", dest = "dry_run", default = False )
|
||||
|
||||
parser.add_option( "-p", "--parse-only", help = "quit after parsing the BB files (developers only)",
|
||||
action = "store_true", dest = "parse_only", default = False )
|
||||
|
||||
parser.add_option( "-d", "--disable-psyco", help = "disable using the psyco just-in-time compiler (not recommended)",
|
||||
action = "store_true", dest = "disable_psyco", default = False )
|
||||
|
||||
parser.add_option( "-s", "--show-versions", help = "show current and preferred versions of all packages",
|
||||
action = "store_true", dest = "show_versions", default = False )
|
||||
|
||||
parser.add_option( "-e", "--environment", help = "show the global or per-package environment (this is what used to be bbread)",
|
||||
action = "store_true", dest = "show_environment", default = False )
|
||||
|
||||
parser.add_option( "-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
|
||||
action = "store_true", dest = "dot_graph", default = False )
|
||||
|
||||
parser.add_option( "-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [] )
|
||||
|
||||
parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [] )
|
||||
|
||||
parser.add_option( "-P", "--profile", help = "profile the command and print a report",
|
||||
action = "store_true", dest = "profile", default = False )
|
||||
|
||||
parser.add_option( "-u", "--ui", help = "userinterface to use",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option( "", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not",
|
||||
action = "store_true", dest = "revisions_changed", default = False )
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
configuration = BBConfiguration(options)
|
||||
configuration.pkgs_to_build = []
|
||||
configuration.pkgs_to_build.extend(args[1:])
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration)
|
||||
|
||||
# Clear away any spurious environment variables. But don't wipe the
|
||||
# environment totally. This is necessary to ensure the correct operation
|
||||
# of the UIs (e.g. for DISPLAY, etc.)
|
||||
bb.utils.clean_environment()
|
||||
|
||||
cooker.parseCommandLine()
|
||||
host = cooker.server.host
|
||||
port = cooker.server.port
|
||||
|
||||
# Save a logfile for cooker into the current working directory. When the
|
||||
# server is daemonized this logfile will be truncated.
|
||||
cooker_logfile = os.path.join (os.getcwd(), "cooker.log")
|
||||
|
||||
daemonize.createDaemon(cooker.serve, cooker_logfile)
|
||||
del cooker
|
||||
|
||||
# Setup a connection to the server (cooker)
|
||||
server = xmlrpclib.Server("http://%s:%s" % (host, port), allow_none=True)
|
||||
# Setup an event receiving queue
|
||||
eventHandler = uievent.BBUIEventQueue(server)
|
||||
|
||||
# Launch the UI
|
||||
if configuration.ui:
|
||||
ui = configuration.ui
|
||||
else:
|
||||
ui = "knotty"
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
exec "from bb.ui import " + ui
|
||||
exec "return_value = " + ui + ".init(server, eventHandler)"
|
||||
except ImportError:
|
||||
print "FATAL: Invalid user interface '%s' specified. " % ui
|
||||
print "Valid interfaces are 'ncurses', 'depexp' or the default, 'knotty'."
|
||||
except Exception, e:
|
||||
print "FATAL: Unable to start to '%s' UI: %s." % (configuration.ui, e.message)
|
||||
finally:
|
||||
# Don't wait for server indefinitely
|
||||
import socket
|
||||
socket.setdefaulttimeout(2)
|
||||
try:
|
||||
eventHandler.system_quit()
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
server.terminateServer()
|
||||
except:
|
||||
pass
|
||||
return return_value
|
||||
|
||||
if __name__ == "__main__":
|
||||
print """WARNING, WARNING, WARNING
|
||||
This is a Bitbake from the Unstable/Development 1.9 Branch. This software is a work in progress and should only be used by Bitbake developers/testers"""
|
||||
|
||||
ret = main()
|
||||
sys.exit(ret)
|
||||
|
||||
534
bitbake-dev/bin/bitdoc
Executable file
534
bitbake-dev/bin/bitdoc
Executable file
@@ -0,0 +1,534 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import optparse, os, sys
|
||||
|
||||
# bitbake
|
||||
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
import bb
|
||||
import bb.parse
|
||||
from string import split, join
|
||||
|
||||
__version__ = "0.0.2"
|
||||
|
||||
class HTMLFormatter:
|
||||
"""
|
||||
Simple class to help to generate some sort of HTML files. It is
|
||||
quite inferior solution compared to docbook, gtkdoc, doxygen but it
|
||||
should work for now.
|
||||
We've a global introduction site (index.html) and then one site for
|
||||
the list of keys (alphabetical sorted) and one for the list of groups,
|
||||
one site for each key with links to the relations and groups.
|
||||
|
||||
index.html
|
||||
all_keys.html
|
||||
all_groups.html
|
||||
groupNAME.html
|
||||
keyNAME.html
|
||||
"""
|
||||
|
||||
def replace(self, text, *pairs):
|
||||
"""
|
||||
From pydoc... almost identical at least
|
||||
"""
|
||||
while pairs:
|
||||
(a,b) = pairs[0]
|
||||
text = join(split(text, a), b)
|
||||
pairs = pairs[1:]
|
||||
return text
|
||||
def escape(self, text):
|
||||
"""
|
||||
Escape string to be conform HTML
|
||||
"""
|
||||
return self.replace(text,
|
||||
('&', '&'),
|
||||
('<', '<' ),
|
||||
('>', '>' ) )
|
||||
def createNavigator(self):
|
||||
"""
|
||||
Create the navgiator
|
||||
"""
|
||||
return """<table class="navigation" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
|
||||
<tr valign="middle">
|
||||
<td><a accesskey="g" href="index.html">Home</a></td>
|
||||
<td><a accesskey="n" href="all_groups.html">Groups</a></td>
|
||||
<td><a accesskey="u" href="all_keys.html">Keys</a></td>
|
||||
</tr></table>
|
||||
"""
|
||||
|
||||
def relatedKeys(self, item):
|
||||
"""
|
||||
Create HTML to link to foreign keys
|
||||
"""
|
||||
|
||||
if len(item.related()) == 0:
|
||||
return ""
|
||||
|
||||
txt = "<p><b>See also:</b><br>"
|
||||
txts = []
|
||||
for it in item.related():
|
||||
txts.append("""<a href="key%(it)s.html">%(it)s</a>""" % vars() )
|
||||
|
||||
return txt + ",".join(txts)
|
||||
|
||||
def groups(self,item):
|
||||
"""
|
||||
Create HTML to link to related groups
|
||||
"""
|
||||
|
||||
if len(item.groups()) == 0:
|
||||
return ""
|
||||
|
||||
|
||||
txt = "<p><b>See also:</b><br>"
|
||||
txts = []
|
||||
for group in item.groups():
|
||||
txts.append( """<a href="group%s.html">%s</a> """ % (group,group) )
|
||||
|
||||
return txt + ",".join(txts)
|
||||
|
||||
|
||||
def createKeySite(self,item):
|
||||
"""
|
||||
Create a site for a key. It contains the header/navigator, a heading,
|
||||
the description, links to related keys and to the groups.
|
||||
"""
|
||||
|
||||
return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
|
||||
<html><head><title>Key %s</title></head>
|
||||
<link rel="stylesheet" href="style.css" type="text/css">
|
||||
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
|
||||
%s
|
||||
<h2><span class="refentrytitle">%s</span></h2>
|
||||
|
||||
<div class="refsynopsisdiv">
|
||||
<h2>Synopsis</h2>
|
||||
<p>
|
||||
%s
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="refsynopsisdiv">
|
||||
<h2>Related Keys</h2>
|
||||
<p>
|
||||
%s
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="refsynopsisdiv">
|
||||
<h2>Groups</h2>
|
||||
<p>
|
||||
%s
|
||||
</p>
|
||||
</div>
|
||||
|
||||
|
||||
</body>
|
||||
""" % (item.name(), self.createNavigator(), item.name(),
|
||||
self.escape(item.description()), self.relatedKeys(item), self.groups(item))
|
||||
|
||||
def createGroupsSite(self, doc):
|
||||
"""
|
||||
Create the Group Overview site
|
||||
"""
|
||||
|
||||
groups = ""
|
||||
sorted_groups = doc.groups()
|
||||
sorted_groups.sort()
|
||||
for group in sorted_groups:
|
||||
groups += """<a href="group%s.html">%s</a><br>""" % (group, group)
|
||||
|
||||
return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
|
||||
<html><head><title>Group overview</title></head>
|
||||
<link rel="stylesheet" href="style.css" type="text/css">
|
||||
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
|
||||
%s
|
||||
<h2>Available Groups</h2>
|
||||
%s
|
||||
</body>
|
||||
""" % (self.createNavigator(), groups)
|
||||
|
||||
def createIndex(self):
|
||||
"""
|
||||
Create the index file
|
||||
"""
|
||||
|
||||
return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
|
||||
<html><head><title>Bitbake Documentation</title></head>
|
||||
<link rel="stylesheet" href="style.css" type="text/css">
|
||||
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
|
||||
%s
|
||||
<h2>Documentation Entrance</h2>
|
||||
<a href="all_groups.html">All available groups</a><br>
|
||||
<a href="all_keys.html">All available keys</a><br>
|
||||
</body>
|
||||
""" % self.createNavigator()
|
||||
|
||||
def createKeysSite(self, doc):
|
||||
"""
|
||||
Create Overview of all avilable keys
|
||||
"""
|
||||
keys = ""
|
||||
sorted_keys = doc.doc_keys()
|
||||
sorted_keys.sort()
|
||||
for key in sorted_keys:
|
||||
keys += """<a href="key%s.html">%s</a><br>""" % (key, key)
|
||||
|
||||
return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
|
||||
<html><head><title>Key overview</title></head>
|
||||
<link rel="stylesheet" href="style.css" type="text/css">
|
||||
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
|
||||
%s
|
||||
<h2>Available Keys</h2>
|
||||
%s
|
||||
</body>
|
||||
""" % (self.createNavigator(), keys)
|
||||
|
||||
def createGroupSite(self, gr, items, _description = None):
|
||||
"""
|
||||
Create a site for a group:
|
||||
Group the name of the group, items contain the name of the keys
|
||||
inside this group
|
||||
"""
|
||||
groups = ""
|
||||
description = ""
|
||||
|
||||
# create a section with the group descriptions
|
||||
if _description:
|
||||
description += "<h2 Description of Grozp %s</h2>" % gr
|
||||
description += _description
|
||||
|
||||
items.sort(lambda x,y:cmp(x.name(),y.name()))
|
||||
for group in items:
|
||||
groups += """<a href="key%s.html">%s</a><br>""" % (group.name(), group.name())
|
||||
|
||||
return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
|
||||
<html><head><title>Group %s</title></head>
|
||||
<link rel="stylesheet" href="style.css" type="text/css">
|
||||
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
|
||||
%s
|
||||
%s
|
||||
<div class="refsynopsisdiv">
|
||||
<h2>Keys in Group %s</h2>
|
||||
<pre class="synopsis">
|
||||
%s
|
||||
</pre>
|
||||
</div>
|
||||
</body>
|
||||
""" % (gr, self.createNavigator(), description, gr, groups)
|
||||
|
||||
|
||||
|
||||
def createCSS(self):
|
||||
"""
|
||||
Create the CSS file
|
||||
"""
|
||||
return """.synopsis, .classsynopsis
|
||||
{
|
||||
background: #eeeeee;
|
||||
border: solid 1px #aaaaaa;
|
||||
padding: 0.5em;
|
||||
}
|
||||
.programlisting
|
||||
{
|
||||
background: #eeeeff;
|
||||
border: solid 1px #aaaaff;
|
||||
padding: 0.5em;
|
||||
}
|
||||
.variablelist
|
||||
{
|
||||
padding: 4px;
|
||||
margin-left: 3em;
|
||||
}
|
||||
.variablelist td:first-child
|
||||
{
|
||||
vertical-align: top;
|
||||
}
|
||||
table.navigation
|
||||
{
|
||||
background: #ffeeee;
|
||||
border: solid 1px #ffaaaa;
|
||||
margin-top: 0.5em;
|
||||
margin-bottom: 0.5em;
|
||||
}
|
||||
.navigation a
|
||||
{
|
||||
color: #770000;
|
||||
}
|
||||
.navigation a:visited
|
||||
{
|
||||
color: #550000;
|
||||
}
|
||||
.navigation .title
|
||||
{
|
||||
font-size: 200%;
|
||||
}
|
||||
div.refnamediv
|
||||
{
|
||||
margin-top: 2em;
|
||||
}
|
||||
div.gallery-float
|
||||
{
|
||||
float: left;
|
||||
padding: 10px;
|
||||
}
|
||||
div.gallery-float img
|
||||
{
|
||||
border-style: none;
|
||||
}
|
||||
div.gallery-spacer
|
||||
{
|
||||
clear: both;
|
||||
}
|
||||
a
|
||||
{
|
||||
text-decoration: none;
|
||||
}
|
||||
a:hover
|
||||
{
|
||||
text-decoration: underline;
|
||||
color: #FF0000;
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
|
||||
class DocumentationItem:
|
||||
"""
|
||||
A class to hold information about a configuration
|
||||
item. It contains the key name, description, a list of related names,
|
||||
and the group this item is contained in.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._groups = []
|
||||
self._related = []
|
||||
self._name = ""
|
||||
self._desc = ""
|
||||
|
||||
def groups(self):
|
||||
return self._groups
|
||||
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
def description(self):
|
||||
return self._desc
|
||||
|
||||
def related(self):
|
||||
return self._related
|
||||
|
||||
def setName(self, name):
|
||||
self._name = name
|
||||
|
||||
def setDescription(self, desc):
|
||||
self._desc = desc
|
||||
|
||||
def addGroup(self, group):
|
||||
self._groups.append(group)
|
||||
|
||||
def addRelation(self,relation):
|
||||
self._related.append(relation)
|
||||
|
||||
def sort(self):
|
||||
self._related.sort()
|
||||
self._groups.sort()
|
||||
|
||||
|
||||
class Documentation:
|
||||
"""
|
||||
Holds the documentation... with mappings from key to items...
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.__keys = {}
|
||||
self.__groups = {}
|
||||
|
||||
def insert_doc_item(self, item):
|
||||
"""
|
||||
Insert the Doc Item into the internal list
|
||||
of representation
|
||||
"""
|
||||
item.sort()
|
||||
self.__keys[item.name()] = item
|
||||
|
||||
for group in item.groups():
|
||||
if not group in self.__groups:
|
||||
self.__groups[group] = []
|
||||
self.__groups[group].append(item)
|
||||
self.__groups[group].sort()
|
||||
|
||||
|
||||
def doc_item(self, key):
|
||||
"""
|
||||
Return the DocumentationInstance describing the key
|
||||
"""
|
||||
try:
|
||||
return self.__keys[key]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def doc_keys(self):
|
||||
"""
|
||||
Return the documented KEYS (names)
|
||||
"""
|
||||
return self.__keys.keys()
|
||||
|
||||
def groups(self):
|
||||
"""
|
||||
Return the names of available groups
|
||||
"""
|
||||
return self.__groups.keys()
|
||||
|
||||
def group_content(self,group_name):
|
||||
"""
|
||||
Return a list of keys/names that are in a specefic
|
||||
group or the empty list
|
||||
"""
|
||||
try:
|
||||
return self.__groups[group_name]
|
||||
except KeyError:
|
||||
return []
|
||||
|
||||
|
||||
def parse_cmdline(args):
|
||||
"""
|
||||
Parse the CMD line and return the result as a n-tuple
|
||||
"""
|
||||
|
||||
parser = optparse.OptionParser( version = "Bitbake Documentation Tool Core version %s, %%prog version %s" % (bb.__version__,__version__))
|
||||
usage = """%prog [options]
|
||||
|
||||
Create a set of html pages (documentation) for a bitbake.conf....
|
||||
"""
|
||||
|
||||
# Add the needed options
|
||||
parser.add_option( "-c", "--config", help = "Use the specified configuration file as source",
|
||||
action = "store", dest = "config", default = os.path.join("conf", "documentation.conf") )
|
||||
|
||||
parser.add_option( "-o", "--output", help = "Output directory for html files",
|
||||
action = "store", dest = "output", default = "html/" )
|
||||
|
||||
parser.add_option( "-D", "--debug", help = "Increase the debug level",
|
||||
action = "count", dest = "debug", default = 0 )
|
||||
|
||||
parser.add_option( "-v","--verbose", help = "output more chit-char to the terminal",
|
||||
action = "store_true", dest = "verbose", default = False )
|
||||
|
||||
options, args = parser.parse_args( sys.argv )
|
||||
|
||||
if options.debug:
|
||||
bb.msg.set_debug_level(options.debug)
|
||||
|
||||
return options.config, options.output
|
||||
|
||||
def main():
|
||||
"""
|
||||
The main Method
|
||||
"""
|
||||
|
||||
(config_file,output_dir) = parse_cmdline( sys.argv )
|
||||
|
||||
# right to let us load the file now
|
||||
try:
|
||||
documentation = bb.parse.handle( config_file, bb.data.init() )
|
||||
except IOError:
|
||||
bb.fatal( "Unable to open %s" % config_file )
|
||||
except bb.parse.ParseError:
|
||||
bb.fatal( "Unable to parse %s" % config_file )
|
||||
|
||||
if isinstance(documentation, dict):
|
||||
documentation = documentation[""]
|
||||
|
||||
# Assuming we've the file loaded now, we will initialize the 'tree'
|
||||
doc = Documentation()
|
||||
|
||||
# defined states
|
||||
state_begin = 0
|
||||
state_see = 1
|
||||
state_group = 2
|
||||
|
||||
for key in bb.data.keys(documentation):
|
||||
data = bb.data.getVarFlag(key, "doc", documentation)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
# The Documentation now starts
|
||||
doc_ins = DocumentationItem()
|
||||
doc_ins.setName(key)
|
||||
|
||||
|
||||
tokens = data.split(' ')
|
||||
state = state_begin
|
||||
string= ""
|
||||
for token in tokens:
|
||||
token = token.strip(',')
|
||||
|
||||
if not state == state_see and token == "@see":
|
||||
state = state_see
|
||||
continue
|
||||
elif not state == state_group and token == "@group":
|
||||
state = state_group
|
||||
continue
|
||||
|
||||
if state == state_begin:
|
||||
string += " %s" % token
|
||||
elif state == state_see:
|
||||
doc_ins.addRelation(token)
|
||||
elif state == state_group:
|
||||
doc_ins.addGroup(token)
|
||||
|
||||
# set the description
|
||||
doc_ins.setDescription(string)
|
||||
doc.insert_doc_item(doc_ins)
|
||||
|
||||
# let us create the HTML now
|
||||
bb.mkdirhier(output_dir)
|
||||
os.chdir(output_dir)
|
||||
|
||||
# Let us create the sites now. We do it in the following order
|
||||
# Start with the index.html. It will point to sites explaining all
|
||||
# keys and groups
|
||||
html_slave = HTMLFormatter()
|
||||
|
||||
f = file('style.css', 'w')
|
||||
print >> f, html_slave.createCSS()
|
||||
|
||||
f = file('index.html', 'w')
|
||||
print >> f, html_slave.createIndex()
|
||||
|
||||
f = file('all_groups.html', 'w')
|
||||
print >> f, html_slave.createGroupsSite(doc)
|
||||
|
||||
f = file('all_keys.html', 'w')
|
||||
print >> f, html_slave.createKeysSite(doc)
|
||||
|
||||
# now for each group create the site
|
||||
for group in doc.groups():
|
||||
f = file('group%s.html' % group, 'w')
|
||||
print >> f, html_slave.createGroupSite(group, doc.group_content(group))
|
||||
|
||||
# now for the keys
|
||||
for key in doc.doc_keys():
|
||||
f = file('key%s.html' % doc.doc_item(key).name(), 'w')
|
||||
print >> f, html_slave.createKeySite(doc.doc_item(key))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
318
bitbake-dev/lib/bb/COW.py
Normal file
318
bitbake-dev/lib/bb/COW.py
Normal file
@@ -0,0 +1,318 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
|
||||
#
|
||||
# Copyright (C) 2006 Tim Amsell
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
#Please Note:
|
||||
# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
|
||||
# Assign a file to __warn__ to get warnings about slow operations.
|
||||
#
|
||||
|
||||
import copy
|
||||
import types
|
||||
types.ImmutableTypes = tuple([ \
|
||||
types.BooleanType, \
|
||||
types.ComplexType, \
|
||||
types.FloatType, \
|
||||
types.IntType, \
|
||||
types.LongType, \
|
||||
types.NoneType, \
|
||||
types.TupleType, \
|
||||
frozenset] + \
|
||||
list(types.StringTypes))
|
||||
|
||||
MUTABLE = "__mutable__"
|
||||
|
||||
class COWMeta(type):
|
||||
pass
|
||||
|
||||
class COWDictMeta(COWMeta):
|
||||
__warn__ = False
|
||||
__hasmutable__ = False
|
||||
__marker__ = tuple()
|
||||
|
||||
def __str__(cls):
|
||||
# FIXME: I have magic numbers!
|
||||
return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
|
||||
__repr__ = __str__
|
||||
|
||||
def cow(cls):
|
||||
class C(cls):
|
||||
__count__ = cls.__count__ + 1
|
||||
return C
|
||||
copy = cow
|
||||
__call__ = cow
|
||||
|
||||
def __setitem__(cls, key, value):
|
||||
if not isinstance(value, types.ImmutableTypes):
|
||||
if not isinstance(value, COWMeta):
|
||||
cls.__hasmutable__ = True
|
||||
key += MUTABLE
|
||||
setattr(cls, key, value)
|
||||
|
||||
def __getmutable__(cls, key, readonly=False):
|
||||
nkey = key + MUTABLE
|
||||
try:
|
||||
return cls.__dict__[nkey]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
value = getattr(cls, nkey)
|
||||
if readonly:
|
||||
return value
|
||||
|
||||
if not cls.__warn__ is False and not isinstance(value, COWMeta):
|
||||
print >> cls.__warn__, "Warning: Doing a copy because %s is a mutable type." % key
|
||||
try:
|
||||
value = value.copy()
|
||||
except AttributeError, e:
|
||||
value = copy.copy(value)
|
||||
setattr(cls, nkey, value)
|
||||
return value
|
||||
|
||||
__getmarker__ = []
|
||||
def __getreadonly__(cls, key, default=__getmarker__):
|
||||
"""\
|
||||
Get a value (even if mutable) which you promise not to change.
|
||||
"""
|
||||
return cls.__getitem__(key, default, True)
|
||||
|
||||
def __getitem__(cls, key, default=__getmarker__, readonly=False):
|
||||
try:
|
||||
try:
|
||||
value = getattr(cls, key)
|
||||
except AttributeError:
|
||||
value = cls.__getmutable__(key, readonly)
|
||||
|
||||
# This is for values which have been deleted
|
||||
if value is cls.__marker__:
|
||||
raise AttributeError("key %s does not exist." % key)
|
||||
|
||||
return value
|
||||
except AttributeError, e:
|
||||
if not default is cls.__getmarker__:
|
||||
return default
|
||||
|
||||
raise KeyError(str(e))
|
||||
|
||||
def __delitem__(cls, key):
|
||||
cls.__setitem__(key, cls.__marker__)
|
||||
|
||||
def __revertitem__(cls, key):
|
||||
if not cls.__dict__.has_key(key):
|
||||
key += MUTABLE
|
||||
delattr(cls, key)
|
||||
|
||||
def has_key(cls, key):
|
||||
value = cls.__getreadonly__(key, cls.__marker__)
|
||||
if value is cls.__marker__:
|
||||
return False
|
||||
return True
|
||||
|
||||
def iter(cls, type, readonly=False):
|
||||
for key in dir(cls):
|
||||
if key.startswith("__"):
|
||||
continue
|
||||
|
||||
if key.endswith(MUTABLE):
|
||||
key = key[:-len(MUTABLE)]
|
||||
|
||||
if type == "keys":
|
||||
yield key
|
||||
|
||||
try:
|
||||
if readonly:
|
||||
value = cls.__getreadonly__(key)
|
||||
else:
|
||||
value = cls[key]
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
if type == "values":
|
||||
yield value
|
||||
if type == "items":
|
||||
yield (key, value)
|
||||
raise StopIteration()
|
||||
|
||||
def iterkeys(cls):
|
||||
return cls.iter("keys")
|
||||
def itervalues(cls, readonly=False):
|
||||
if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
|
||||
print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True."
|
||||
return cls.iter("values", readonly)
|
||||
def iteritems(cls, readonly=False):
|
||||
if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
|
||||
print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True."
|
||||
return cls.iter("items", readonly)
|
||||
|
||||
class COWSetMeta(COWDictMeta):
|
||||
def __str__(cls):
|
||||
# FIXME: I have magic numbers!
|
||||
return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
|
||||
__repr__ = __str__
|
||||
|
||||
def cow(cls):
|
||||
class C(cls):
|
||||
__count__ = cls.__count__ + 1
|
||||
return C
|
||||
|
||||
def add(cls, value):
|
||||
COWDictMeta.__setitem__(cls, repr(hash(value)), value)
|
||||
|
||||
def remove(cls, value):
|
||||
COWDictMeta.__delitem__(cls, repr(hash(value)))
|
||||
|
||||
def __in__(cls, value):
|
||||
return COWDictMeta.has_key(repr(hash(value)))
|
||||
|
||||
def iterkeys(cls):
|
||||
raise TypeError("sets don't have keys")
|
||||
|
||||
def iteritems(cls):
|
||||
raise TypeError("sets don't have 'items'")
|
||||
|
||||
# These are the actual classes you use!
|
||||
class COWDictBase(object):
|
||||
__metaclass__ = COWDictMeta
|
||||
__count__ = 0
|
||||
|
||||
class COWSetBase(object):
|
||||
__metaclass__ = COWSetMeta
|
||||
__count__ = 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
COWDictBase.__warn__ = sys.stderr
|
||||
a = COWDictBase()
|
||||
print "a", a
|
||||
|
||||
a['a'] = 'a'
|
||||
a['b'] = 'b'
|
||||
a['dict'] = {}
|
||||
|
||||
b = a.copy()
|
||||
print "b", b
|
||||
b['c'] = 'b'
|
||||
|
||||
print
|
||||
|
||||
print "a", a
|
||||
for x in a.iteritems():
|
||||
print x
|
||||
print "--"
|
||||
print "b", b
|
||||
for x in b.iteritems():
|
||||
print x
|
||||
print
|
||||
|
||||
b['dict']['a'] = 'b'
|
||||
b['a'] = 'c'
|
||||
|
||||
print "a", a
|
||||
for x in a.iteritems():
|
||||
print x
|
||||
print "--"
|
||||
print "b", b
|
||||
for x in b.iteritems():
|
||||
print x
|
||||
print
|
||||
|
||||
try:
|
||||
b['dict2']
|
||||
except KeyError, e:
|
||||
print "Okay!"
|
||||
|
||||
a['set'] = COWSetBase()
|
||||
a['set'].add("o1")
|
||||
a['set'].add("o1")
|
||||
a['set'].add("o2")
|
||||
|
||||
print "a", a
|
||||
for x in a['set'].itervalues():
|
||||
print x
|
||||
print "--"
|
||||
print "b", b
|
||||
for x in b['set'].itervalues():
|
||||
print x
|
||||
print
|
||||
|
||||
b['set'].add('o3')
|
||||
|
||||
print "a", a
|
||||
for x in a['set'].itervalues():
|
||||
print x
|
||||
print "--"
|
||||
print "b", b
|
||||
for x in b['set'].itervalues():
|
||||
print x
|
||||
print
|
||||
|
||||
a['set2'] = set()
|
||||
a['set2'].add("o1")
|
||||
a['set2'].add("o1")
|
||||
a['set2'].add("o2")
|
||||
|
||||
print "a", a
|
||||
for x in a.iteritems():
|
||||
print x
|
||||
print "--"
|
||||
print "b", b
|
||||
for x in b.iteritems(readonly=True):
|
||||
print x
|
||||
print
|
||||
|
||||
del b['b']
|
||||
try:
|
||||
print b['b']
|
||||
except KeyError:
|
||||
print "Yay! deleted key raises error"
|
||||
|
||||
if b.has_key('b'):
|
||||
print "Boo!"
|
||||
else:
|
||||
print "Yay - has_key with delete works!"
|
||||
|
||||
print "a", a
|
||||
for x in a.iteritems():
|
||||
print x
|
||||
print "--"
|
||||
print "b", b
|
||||
for x in b.iteritems(readonly=True):
|
||||
print x
|
||||
print
|
||||
|
||||
b.__revertitem__('b')
|
||||
|
||||
print "a", a
|
||||
for x in a.iteritems():
|
||||
print x
|
||||
print "--"
|
||||
print "b", b
|
||||
for x in b.iteritems(readonly=True):
|
||||
print x
|
||||
print
|
||||
|
||||
b.__revertitem__('dict')
|
||||
print "a", a
|
||||
for x in a.iteritems():
|
||||
print x
|
||||
print "--"
|
||||
print "b", b
|
||||
for x in b.iteritems(readonly=True):
|
||||
print x
|
||||
print
|
||||
1134
bitbake-dev/lib/bb/__init__.py
Normal file
1134
bitbake-dev/lib/bb/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
394
bitbake-dev/lib/bb/build.py
Normal file
394
bitbake-dev/lib/bb/build.py
Normal file
@@ -0,0 +1,394 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake 'Build' implementation
|
||||
#
|
||||
# Core code for function execution and task handling in the
|
||||
# BitBake build tools.
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# Based on Gentoo's portage.py.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
from bb import data, event, mkdirhier, utils
|
||||
import bb, os, sys
|
||||
|
||||
# When we execute a python function we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__
|
||||
# If we do not do this and use the exec globals, they will
|
||||
# not be available to subfunctions.
|
||||
__builtins__['bb'] = bb
|
||||
__builtins__['os'] = os
|
||||
|
||||
# events
|
||||
class FuncFailed(Exception):
|
||||
"""
|
||||
Executed function failed
|
||||
First parameter a message
|
||||
Second paramter is a logfile (optional)
|
||||
"""
|
||||
|
||||
class EventException(Exception):
|
||||
"""Exception which is associated with an Event."""
|
||||
|
||||
def __init__(self, msg, event):
|
||||
self.args = msg, event
|
||||
|
||||
class TaskBase(event.Event):
|
||||
"""Base class for task events"""
|
||||
|
||||
def __init__(self, t, d ):
|
||||
self._task = t
|
||||
self._package = bb.data.getVar("PF", d, 1)
|
||||
event.Event.__init__(self, d)
|
||||
self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:])
|
||||
|
||||
def getTask(self):
|
||||
return self._task
|
||||
|
||||
def setTask(self, task):
|
||||
self._task = task
|
||||
|
||||
task = property(getTask, setTask, None, "task property")
|
||||
|
||||
class TaskStarted(TaskBase):
|
||||
"""Task execution started"""
|
||||
|
||||
class TaskSucceeded(TaskBase):
|
||||
"""Task execution completed"""
|
||||
|
||||
class TaskFailed(TaskBase):
|
||||
"""Task execution failed"""
|
||||
def __init__(self, msg, logfile, t, d ):
|
||||
self.logfile = logfile
|
||||
self.msg = msg
|
||||
TaskBase.__init__(self, t, d)
|
||||
|
||||
class InvalidTask(TaskBase):
|
||||
"""Invalid Task"""
|
||||
|
||||
# functions
|
||||
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute an BB 'function'"""
|
||||
|
||||
body = data.getVar(func, d)
|
||||
if not body:
|
||||
return
|
||||
|
||||
flags = data.getVarFlags(func, d)
|
||||
for item in ['deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs', 'lockfiles', 'fakeroot']:
|
||||
if not item in flags:
|
||||
flags[item] = None
|
||||
|
||||
ispython = flags['python']
|
||||
|
||||
cleandirs = (data.expand(flags['cleandirs'], d) or "").split()
|
||||
for cdir in cleandirs:
|
||||
os.system("rm -rf %s" % cdir)
|
||||
|
||||
if dirs:
|
||||
dirs = data.expand(dirs, d)
|
||||
else:
|
||||
dirs = (data.expand(flags['dirs'], d) or "").split()
|
||||
for adir in dirs:
|
||||
mkdirhier(adir)
|
||||
|
||||
if len(dirs) > 0:
|
||||
adir = dirs[-1]
|
||||
else:
|
||||
adir = data.getVar('B', d, 1)
|
||||
|
||||
# Save current directory
|
||||
try:
|
||||
prevdir = os.getcwd()
|
||||
except OSError:
|
||||
prevdir = data.getVar('TOPDIR', d, True)
|
||||
|
||||
# Setup logfiles
|
||||
t = data.getVar('T', d, 1)
|
||||
if not t:
|
||||
bb.msg.fatal(bb.msg.domain.Build, "T not set")
|
||||
mkdirhier(t)
|
||||
# Gross hack, FIXME
|
||||
import random
|
||||
logfile = "%s/log.%s.%s.%s" % (t, func, str(os.getpid()),random.random())
|
||||
runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))
|
||||
|
||||
# Change to correct directory (if specified)
|
||||
if adir and os.access(adir, os.F_OK):
|
||||
os.chdir(adir)
|
||||
|
||||
# Handle logfiles
|
||||
si = file('/dev/null', 'r')
|
||||
try:
|
||||
if bb.msg.debug_level['default'] > 0 or ispython:
|
||||
so = os.popen("tee \"%s\"" % logfile, "w")
|
||||
else:
|
||||
so = file(logfile, 'w')
|
||||
except OSError, e:
|
||||
bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
|
||||
pass
|
||||
|
||||
se = so
|
||||
|
||||
# Dup the existing fds so we dont lose them
|
||||
osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
|
||||
oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
|
||||
ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
|
||||
|
||||
# Replace those fds with our own
|
||||
os.dup2(si.fileno(), osi[1])
|
||||
os.dup2(so.fileno(), oso[1])
|
||||
os.dup2(se.fileno(), ose[1])
|
||||
|
||||
locks = []
|
||||
lockfiles = (data.expand(flags['lockfiles'], d) or "").split()
|
||||
for lock in lockfiles:
|
||||
locks.append(bb.utils.lockfile(lock))
|
||||
|
||||
try:
|
||||
# Run the function
|
||||
if ispython:
|
||||
exec_func_python(func, d, runfile, logfile)
|
||||
else:
|
||||
exec_func_shell(func, d, runfile, logfile, flags)
|
||||
|
||||
# Restore original directory
|
||||
try:
|
||||
os.chdir(prevdir)
|
||||
except:
|
||||
pass
|
||||
|
||||
finally:
|
||||
|
||||
# Unlock any lockfiles
|
||||
for lock in locks:
|
||||
bb.utils.unlockfile(lock)
|
||||
|
||||
# Restore the backup fds
|
||||
os.dup2(osi[0], osi[1])
|
||||
os.dup2(oso[0], oso[1])
|
||||
os.dup2(ose[0], ose[1])
|
||||
|
||||
# Close our logs
|
||||
si.close()
|
||||
so.close()
|
||||
se.close()
|
||||
|
||||
if os.path.exists(logfile) and os.path.getsize(logfile) == 0:
|
||||
bb.msg.debug(2, bb.msg.domain.Build, "Zero size logfile %s, removing" % logfile)
|
||||
os.remove(logfile)
|
||||
|
||||
# Close the backup fds
|
||||
os.close(osi[0])
|
||||
os.close(oso[0])
|
||||
os.close(ose[0])
|
||||
|
||||
def exec_func_python(func, d, runfile, logfile):
|
||||
"""Execute a python BB 'function'"""
|
||||
import re, os
|
||||
|
||||
bbfile = bb.data.getVar('FILE', d, 1)
|
||||
tmp = "def " + func + "():\n%s" % data.getVar(func, d)
|
||||
tmp += '\n' + func + '()'
|
||||
|
||||
f = open(runfile, "w")
|
||||
f.write(tmp)
|
||||
comp = utils.better_compile(tmp, func, bbfile)
|
||||
g = {} # globals
|
||||
g['d'] = d
|
||||
try:
|
||||
utils.better_exec(comp, g, tmp, bbfile)
|
||||
except:
|
||||
(t,value,tb) = sys.exc_info()
|
||||
|
||||
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
|
||||
raise
|
||||
bb.msg.error(bb.msg.domain.Build, "Function %s failed" % func)
|
||||
raise FuncFailed("function %s failed" % func, logfile)
|
||||
|
||||
def exec_func_shell(func, d, runfile, logfile, flags):
|
||||
"""Execute a shell BB 'function' Returns true if execution was successful.
|
||||
|
||||
For this, it creates a bash shell script in the tmp dectory, writes the local
|
||||
data into it and finally executes. The output of the shell will end in a log file and stdout.
|
||||
|
||||
Note on directory behavior. The 'dirs' varflag should contain a list
|
||||
of the directories you need created prior to execution. The last
|
||||
item in the list is where we will chdir/cd to.
|
||||
"""
|
||||
|
||||
deps = flags['deps']
|
||||
check = flags['check']
|
||||
if check in globals():
|
||||
if globals()[check](func, deps):
|
||||
return
|
||||
|
||||
f = open(runfile, "w")
|
||||
f.write("#!/bin/sh -e\n")
|
||||
if bb.msg.debug_level['default'] > 0: f.write("set -x\n")
|
||||
data.emit_env(f, d)
|
||||
|
||||
f.write("cd %s\n" % os.getcwd())
|
||||
if func: f.write("%s\n" % func)
|
||||
f.close()
|
||||
os.chmod(runfile, 0775)
|
||||
if not func:
|
||||
bb.msg.error(bb.msg.domain.Build, "Function not specified")
|
||||
raise FuncFailed("Function not specified for exec_func_shell")
|
||||
|
||||
# execute function
|
||||
if flags['fakeroot']:
|
||||
maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1)
|
||||
else:
|
||||
maybe_fakeroot = ''
|
||||
lang_environment = "LC_ALL=C "
|
||||
ret = os.system('%s%ssh -e %s' % (lang_environment, maybe_fakeroot, runfile))
|
||||
|
||||
if ret == 0:
|
||||
return
|
||||
|
||||
bb.msg.error(bb.msg.domain.Build, "Function %s failed" % func)
|
||||
raise FuncFailed("function %s failed" % func, logfile)
|
||||
|
||||
|
||||
def exec_task(task, d):
|
||||
"""Execute an BB 'task'
|
||||
|
||||
The primary difference between executing a task versus executing
|
||||
a function is that a task exists in the task digraph, and therefore
|
||||
has dependencies amongst other tasks."""
|
||||
|
||||
# Check whther this is a valid task
|
||||
if not data.getVarFlag(task, 'task', d):
|
||||
raise EventException("No such task", InvalidTask(task, d))
|
||||
|
||||
try:
|
||||
bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task)
|
||||
old_overrides = data.getVar('OVERRIDES', d, 0)
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
|
||||
data.update_data(localdata)
|
||||
data.expandKeys(localdata)
|
||||
event.fire(TaskStarted(task, localdata))
|
||||
exec_func(task, localdata)
|
||||
event.fire(TaskSucceeded(task, localdata))
|
||||
except FuncFailed, message:
|
||||
# Try to extract the optional logfile
|
||||
try:
|
||||
(msg, logfile) = message
|
||||
except:
|
||||
logfile = None
|
||||
msg = message
|
||||
bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message )
|
||||
failedevent = TaskFailed(msg, logfile, task, d)
|
||||
event.fire(failedevent)
|
||||
raise EventException("Function failed in task: %s" % message, failedevent)
|
||||
|
||||
# make stamp, or cause event and raise exception
|
||||
if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d):
|
||||
make_stamp(task, d)
|
||||
|
||||
def extract_stamp(d, fn):
|
||||
"""
|
||||
Extracts stamp format which is either a data dictonary (fn unset)
|
||||
or a dataCache entry (fn set).
|
||||
"""
|
||||
if fn:
|
||||
return d.stamp[fn]
|
||||
return data.getVar('STAMP', d, 1)
|
||||
|
||||
def stamp_internal(task, d, file_name):
|
||||
"""
|
||||
Internal stamp helper function
|
||||
Removes any stamp for the given task
|
||||
Makes sure the stamp directory exists
|
||||
Returns the stamp path+filename
|
||||
"""
|
||||
stamp = extract_stamp(d, file_name)
|
||||
if not stamp:
|
||||
return
|
||||
stamp = "%s.%s" % (stamp, task)
|
||||
mkdirhier(os.path.dirname(stamp))
|
||||
# Remove the file and recreate to force timestamp
|
||||
# change on broken NFS filesystems
|
||||
if os.access(stamp, os.F_OK):
|
||||
os.remove(stamp)
|
||||
return stamp
|
||||
|
||||
def make_stamp(task, d, file_name = None):
|
||||
"""
|
||||
Creates/updates a stamp for a given task
|
||||
(d can be a data dict or dataCache)
|
||||
"""
|
||||
stamp = stamp_internal(task, d, file_name)
|
||||
if stamp:
|
||||
f = open(stamp, "w")
|
||||
f.close()
|
||||
|
||||
def del_stamp(task, d, file_name = None):
|
||||
"""
|
||||
Removes a stamp for a given task
|
||||
(d can be a data dict or dataCache)
|
||||
"""
|
||||
stamp_internal(task, d, file_name)
|
||||
|
||||
def add_tasks(tasklist, d):
|
||||
task_deps = data.getVar('_task_deps', d)
|
||||
if not task_deps:
|
||||
task_deps = {}
|
||||
if not 'tasks' in task_deps:
|
||||
task_deps['tasks'] = []
|
||||
if not 'parents' in task_deps:
|
||||
task_deps['parents'] = {}
|
||||
|
||||
for task in tasklist:
|
||||
task = data.expand(task, d)
|
||||
data.setVarFlag(task, 'task', 1, d)
|
||||
|
||||
if not task in task_deps['tasks']:
|
||||
task_deps['tasks'].append(task)
|
||||
|
||||
flags = data.getVarFlags(task, d)
|
||||
def getTask(name):
|
||||
if not name in task_deps:
|
||||
task_deps[name] = {}
|
||||
if name in flags:
|
||||
deptask = data.expand(flags[name], d)
|
||||
task_deps[name][task] = deptask
|
||||
getTask('depends')
|
||||
getTask('deptask')
|
||||
getTask('rdeptask')
|
||||
getTask('recrdeptask')
|
||||
getTask('nostamp')
|
||||
task_deps['parents'][task] = []
|
||||
for dep in flags['deps']:
|
||||
dep = data.expand(dep, d)
|
||||
task_deps['parents'][task].append(dep)
|
||||
|
||||
# don't assume holding a reference
|
||||
data.setVar('_task_deps', task_deps, d)
|
||||
|
||||
def remove_task(task, kill, d):
|
||||
"""Remove an BB 'task'.
|
||||
|
||||
If kill is 1, also remove tasks that depend on this task."""
|
||||
|
||||
data.delVarFlag(task, 'task', d)
|
||||
|
||||
534
bitbake-dev/lib/bb/cache.py
Normal file
534
bitbake-dev/lib/bb/cache.py
Normal file
@@ -0,0 +1,534 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake 'Event' implementation
|
||||
#
|
||||
# Caching of bitbake variables before task execution
|
||||
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
|
||||
# but small sections based on code from bin/bitbake:
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
import os, re
|
||||
import bb.data
|
||||
import bb.utils
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "130"
|
||||
|
||||
class Cache:
|
||||
"""
|
||||
BitBake Cache implementation
|
||||
"""
|
||||
def __init__(self, cooker):
|
||||
|
||||
|
||||
self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
|
||||
self.clean = {}
|
||||
self.checked = {}
|
||||
self.depends_cache = {}
|
||||
self.data = None
|
||||
self.data_fn = None
|
||||
self.cacheclean = True
|
||||
|
||||
if self.cachedir in [None, '']:
|
||||
self.has_cache = False
|
||||
bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
|
||||
return
|
||||
|
||||
self.has_cache = True
|
||||
self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
|
||||
|
||||
bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
|
||||
try:
|
||||
os.stat( self.cachedir )
|
||||
except OSError:
|
||||
bb.mkdirhier( self.cachedir )
|
||||
|
||||
# If any of configuration.data's dependencies are newer than the
|
||||
# cache there isn't even any point in loading it...
|
||||
newest_mtime = 0
|
||||
deps = bb.data.getVar("__depends", cooker.configuration.data, True)
|
||||
for f,old_mtime in deps:
|
||||
if old_mtime > newest_mtime:
|
||||
newest_mtime = old_mtime
|
||||
|
||||
if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime:
|
||||
try:
|
||||
p = pickle.Unpickler(file(self.cachefile, "rb"))
|
||||
self.depends_cache, version_data = p.load()
|
||||
if version_data['CACHE_VER'] != __cache_version__:
|
||||
raise ValueError, 'Cache Version Mismatch'
|
||||
if version_data['BITBAKE_VER'] != bb.__version__:
|
||||
raise ValueError, 'Bitbake Version Mismatch'
|
||||
except EOFError:
|
||||
bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...")
|
||||
self.depends_cache = {}
|
||||
except:
|
||||
bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
|
||||
self.depends_cache = {}
|
||||
else:
|
||||
try:
|
||||
os.stat( self.cachefile )
|
||||
bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...")
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def getVar(self, var, fn, exp = 0):
|
||||
"""
|
||||
Gets the value of a variable
|
||||
(similar to getVar in the data class)
|
||||
|
||||
There are two scenarios:
|
||||
1. We have cached data - serve from depends_cache[fn]
|
||||
2. We're learning what data to cache - serve from data
|
||||
backend but add a copy of the data to the cache.
|
||||
"""
|
||||
if fn in self.clean:
|
||||
return self.depends_cache[fn][var]
|
||||
|
||||
if not fn in self.depends_cache:
|
||||
self.depends_cache[fn] = {}
|
||||
|
||||
if fn != self.data_fn:
|
||||
# We're trying to access data in the cache which doesn't exist
|
||||
# yet setData hasn't been called to setup the right access. Very bad.
|
||||
bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
|
||||
|
||||
self.cacheclean = False
|
||||
result = bb.data.getVar(var, self.data, exp)
|
||||
self.depends_cache[fn][var] = result
|
||||
return result
|
||||
|
||||
def setData(self, virtualfn, fn, data):
|
||||
"""
|
||||
Called to prime bb_cache ready to learn which variables to cache.
|
||||
Will be followed by calls to self.getVar which aren't cached
|
||||
but can be fulfilled from self.data.
|
||||
"""
|
||||
self.data_fn = virtualfn
|
||||
self.data = data
|
||||
|
||||
# Make sure __depends makes the depends_cache
|
||||
self.getVar("__depends", virtualfn, True)
|
||||
self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
|
||||
|
||||
def virtualfn2realfn(self, virtualfn):
|
||||
"""
|
||||
Convert a virtual file name to a real one + the associated subclass keyword
|
||||
"""
|
||||
|
||||
fn = virtualfn
|
||||
cls = ""
|
||||
if virtualfn.startswith('virtual:'):
|
||||
cls = virtualfn.split(':', 2)[1]
|
||||
fn = virtualfn.replace('virtual:' + cls + ':', '')
|
||||
#bb.msg.debug(2, bb.msg.domain.Cache, "virtualfn2realfn %s to %s %s" % (virtualfn, fn, cls))
|
||||
return (fn, cls)
|
||||
|
||||
def realfn2virtual(self, realfn, cls):
|
||||
"""
|
||||
Convert a real filename + the associated subclass keyword to a virtual filename
|
||||
"""
|
||||
if cls == "":
|
||||
#bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and '%s' to %s" % (realfn, cls, realfn))
|
||||
return realfn
|
||||
#bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and %s to %s" % (realfn, cls, "virtual:" + cls + ":" + realfn))
|
||||
return "virtual:" + cls + ":" + realfn
|
||||
|
||||
def loadDataFull(self, virtualfn, cfgData):
|
||||
"""
|
||||
Return a complete set of data for fn.
|
||||
To do this, we need to parse the file.
|
||||
"""
|
||||
|
||||
(fn, cls) = self.virtualfn2realfn(virtualfn)
|
||||
|
||||
bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn)
|
||||
|
||||
bb_data, skipped = self.load_bbfile(fn, cfgData)
|
||||
if isinstance(bb_data, dict):
|
||||
return bb_data[cls]
|
||||
|
||||
return bb_data
|
||||
|
||||
def loadData(self, fn, cfgData, cacheData):
|
||||
"""
|
||||
Load a subset of data for fn.
|
||||
If the cached data is valid we do nothing,
|
||||
To do this, we need to parse the file and set the system
|
||||
to record the variables accessed.
|
||||
Return the cache status and whether the file was skipped when parsed
|
||||
"""
|
||||
if fn not in self.checked:
|
||||
self.cacheValidUpdate(fn)
|
||||
if self.cacheValid(fn):
|
||||
if "SKIPPED" in self.depends_cache[fn]:
|
||||
return True, True
|
||||
self.handle_data(fn, cacheData)
|
||||
multi = self.getVar('BBCLASSEXTEND', fn, True)
|
||||
if multi:
|
||||
for cls in multi.split():
|
||||
virtualfn = self.realfn2virtual(fn, cls)
|
||||
# Pretend we're clean so getVar works
|
||||
self.clean[virtualfn] = ""
|
||||
self.handle_data(virtualfn, cacheData)
|
||||
return True, False
|
||||
|
||||
bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn)
|
||||
|
||||
bb_data, skipped = self.load_bbfile(fn, cfgData)
|
||||
|
||||
if skipped:
|
||||
if isinstance(bb_data, dict):
|
||||
self.setData(fn, fn, bb_data[""])
|
||||
else:
|
||||
self.setData(fn, fn, bb_data)
|
||||
return False, skipped
|
||||
|
||||
if isinstance(bb_data, dict):
|
||||
for data in bb_data:
|
||||
virtualfn = self.realfn2virtual(fn, data)
|
||||
self.setData(virtualfn, fn, bb_data[data])
|
||||
self.handle_data(virtualfn, cacheData)
|
||||
return False, skipped
|
||||
|
||||
self.setData(fn, fn, bb_data)
|
||||
self.handle_data(fn, cacheData)
|
||||
return False, skipped
|
||||
|
||||
def cacheValid(self, fn):
|
||||
"""
|
||||
Is the cache valid for fn?
|
||||
Fast version, no timestamps checked.
|
||||
"""
|
||||
# Is cache enabled?
|
||||
if not self.has_cache:
|
||||
return False
|
||||
if fn in self.clean:
|
||||
return True
|
||||
return False
|
||||
|
||||
def cacheValidUpdate(self, fn):
|
||||
"""
|
||||
Is the cache valid for fn?
|
||||
Make thorough (slower) checks including timestamps.
|
||||
"""
|
||||
# Is cache enabled?
|
||||
if not self.has_cache:
|
||||
return False
|
||||
|
||||
self.checked[fn] = ""
|
||||
|
||||
# Pretend we're clean so getVar works
|
||||
self.clean[fn] = ""
|
||||
|
||||
# File isn't in depends_cache
|
||||
if not fn in self.depends_cache:
|
||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
mtime = bb.parse.cached_mtime_noerror(fn)
|
||||
|
||||
# Check file still exists
|
||||
if mtime == 0:
|
||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
# Check the file's timestamp
|
||||
if mtime != self.getVar("CACHETIMESTAMP", fn, True):
|
||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
# Check dependencies are still valid
|
||||
depends = self.getVar("__depends", fn, True)
|
||||
if depends:
|
||||
for f,old_mtime in depends:
|
||||
fmtime = bb.parse.cached_mtime_noerror(f)
|
||||
# Check if file still exists
|
||||
if old_mtime != 0 and fmtime == 0:
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
if (fmtime != old_mtime):
|
||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
#bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
|
||||
if not fn in self.clean:
|
||||
self.clean[fn] = ""
|
||||
|
||||
return True
|
||||
|
||||
def skip(self, fn):
|
||||
"""
|
||||
Mark a fn as skipped
|
||||
Called from the parser
|
||||
"""
|
||||
if not fn in self.depends_cache:
|
||||
self.depends_cache[fn] = {}
|
||||
self.depends_cache[fn]["SKIPPED"] = "1"
|
||||
|
||||
def remove(self, fn):
|
||||
"""
|
||||
Remove a fn from the cache
|
||||
Called from the parser in error cases
|
||||
"""
|
||||
bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
|
||||
if fn in self.depends_cache:
|
||||
del self.depends_cache[fn]
|
||||
if fn in self.clean:
|
||||
del self.clean[fn]
|
||||
|
||||
def sync(self):
|
||||
"""
|
||||
Save the cache
|
||||
Called from the parser when complete (or exiting)
|
||||
"""
|
||||
import copy
|
||||
|
||||
if not self.has_cache:
|
||||
return
|
||||
|
||||
if self.cacheclean:
|
||||
bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.")
|
||||
return
|
||||
|
||||
version_data = {}
|
||||
version_data['CACHE_VER'] = __cache_version__
|
||||
version_data['BITBAKE_VER'] = bb.__version__
|
||||
|
||||
cache_data = copy.deepcopy(self.depends_cache)
|
||||
for fn in self.depends_cache:
|
||||
if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']:
|
||||
bb.msg.debug(2, bb.msg.domain.Cache, "Not caching %s, marked as not cacheable" % fn)
|
||||
del cache_data[fn]
|
||||
elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']:
|
||||
bb.msg.error(bb.msg.domain.Cache, "Not caching %s as it had SRCREVINACTION in PV. Please report this bug" % fn)
|
||||
del cache_data[fn]
|
||||
|
||||
p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
|
||||
p.dump([cache_data, version_data])
|
||||
|
||||
def mtime(self, cachefile):
|
||||
return bb.parse.cached_mtime_noerror(cachefile)
|
||||
|
||||
def handle_data(self, file_name, cacheData):
|
||||
"""
|
||||
Save data we need into the cache
|
||||
"""
|
||||
|
||||
pn = self.getVar('PN', file_name, True)
|
||||
pe = self.getVar('PE', file_name, True) or "0"
|
||||
pv = self.getVar('PV', file_name, True)
|
||||
if 'SRCREVINACTION' in pv:
|
||||
bb.note("Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name))
|
||||
pr = self.getVar('PR', file_name, True)
|
||||
dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
|
||||
depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
|
||||
packages = (self.getVar('PACKAGES', file_name, True) or "").split()
|
||||
packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
|
||||
rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
|
||||
|
||||
cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
|
||||
|
||||
# build PackageName to FileName lookup table
|
||||
if pn not in cacheData.pkg_pn:
|
||||
cacheData.pkg_pn[pn] = []
|
||||
cacheData.pkg_pn[pn].append(file_name)
|
||||
|
||||
cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True)
|
||||
|
||||
# build FileName to PackageName lookup table
|
||||
cacheData.pkg_fn[file_name] = pn
|
||||
cacheData.pkg_pepvpr[file_name] = (pe,pv,pr)
|
||||
cacheData.pkg_dp[file_name] = dp
|
||||
|
||||
provides = [pn]
|
||||
for provide in (self.getVar("PROVIDES", file_name, True) or "").split():
|
||||
if provide not in provides:
|
||||
provides.append(provide)
|
||||
|
||||
# Build forward and reverse provider hashes
|
||||
# Forward: virtual -> [filenames]
|
||||
# Reverse: PN -> [virtuals]
|
||||
if pn not in cacheData.pn_provides:
|
||||
cacheData.pn_provides[pn] = []
|
||||
|
||||
cacheData.fn_provides[file_name] = provides
|
||||
for provide in provides:
|
||||
if provide not in cacheData.providers:
|
||||
cacheData.providers[provide] = []
|
||||
cacheData.providers[provide].append(file_name)
|
||||
if not provide in cacheData.pn_provides[pn]:
|
||||
cacheData.pn_provides[pn].append(provide)
|
||||
|
||||
cacheData.deps[file_name] = []
|
||||
for dep in depends:
|
||||
if not dep in cacheData.deps[file_name]:
|
||||
cacheData.deps[file_name].append(dep)
|
||||
if not dep in cacheData.all_depends:
|
||||
cacheData.all_depends.append(dep)
|
||||
|
||||
# Build reverse hash for PACKAGES, so runtime dependencies
|
||||
# can be be resolved (RDEPENDS, RRECOMMENDS etc.)
|
||||
for package in packages:
|
||||
if not package in cacheData.packages:
|
||||
cacheData.packages[package] = []
|
||||
cacheData.packages[package].append(file_name)
|
||||
rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
|
||||
|
||||
for package in packages_dynamic:
|
||||
if not package in cacheData.packages_dynamic:
|
||||
cacheData.packages_dynamic[package] = []
|
||||
cacheData.packages_dynamic[package].append(file_name)
|
||||
|
||||
for rprovide in rprovides:
|
||||
if not rprovide in cacheData.rproviders:
|
||||
cacheData.rproviders[rprovide] = []
|
||||
cacheData.rproviders[rprovide].append(file_name)
|
||||
|
||||
# Build hash of runtime depends and rececommends
|
||||
|
||||
if not file_name in cacheData.rundeps:
|
||||
cacheData.rundeps[file_name] = {}
|
||||
if not file_name in cacheData.runrecs:
|
||||
cacheData.runrecs[file_name] = {}
|
||||
|
||||
rdepends = self.getVar('RDEPENDS', file_name, True) or ""
|
||||
rrecommends = self.getVar('RRECOMMENDS', file_name, True) or ""
|
||||
for package in packages + [pn]:
|
||||
if not package in cacheData.rundeps[file_name]:
|
||||
cacheData.rundeps[file_name][package] = []
|
||||
if not package in cacheData.runrecs[file_name]:
|
||||
cacheData.runrecs[file_name][package] = []
|
||||
|
||||
cacheData.rundeps[file_name][package] = rdepends + " " + (self.getVar("RDEPENDS_%s" % package, file_name, True) or "")
|
||||
cacheData.runrecs[file_name][package] = rrecommends + " " + (self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")
|
||||
|
||||
# Collect files we may need for possible world-dep
|
||||
# calculations
|
||||
if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True):
|
||||
cacheData.possible_world.append(file_name)
|
||||
|
||||
# Touch this to make sure its in the cache
|
||||
self.getVar('__BB_DONT_CACHE', file_name, True)
|
||||
self.getVar('BBCLASSEXTEND', file_name, True)
|
||||
|
||||
def load_bbfile( self, bbfile , config):
|
||||
"""
|
||||
Load and parse one .bb build file
|
||||
Return the data and whether parsing resulted in the file being skipped
|
||||
"""
|
||||
|
||||
import bb
|
||||
from bb import utils, data, parse, debug, event, fatal
|
||||
|
||||
# expand tmpdir to include this topdir
|
||||
data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
|
||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||
oldpath = os.path.abspath(os.getcwd())
|
||||
if bb.parse.cached_mtime_noerror(bbfile_loc):
|
||||
os.chdir(bbfile_loc)
|
||||
bb_data = data.init_db(config)
|
||||
try:
|
||||
bb_data = parse.handle(bbfile, bb_data) # read .bb data
|
||||
os.chdir(oldpath)
|
||||
return bb_data, False
|
||||
except bb.parse.SkipPackage:
|
||||
os.chdir(oldpath)
|
||||
return bb_data, True
|
||||
except:
|
||||
os.chdir(oldpath)
|
||||
raise
|
||||
|
||||
def init(cooker):
|
||||
"""
|
||||
The Objective: Cache the minimum amount of data possible yet get to the
|
||||
stage of building packages (i.e. tryBuild) without reparsing any .bb files.
|
||||
|
||||
To do this, we intercept getVar calls and only cache the variables we see
|
||||
being accessed. We rely on the cache getVar calls being made for all
|
||||
variables bitbake might need to use to reach this stage. For each cached
|
||||
file we need to track:
|
||||
|
||||
* Its mtime
|
||||
* The mtimes of all its dependencies
|
||||
* Whether it caused a parse.SkipPackage exception
|
||||
|
||||
Files causing parsing errors are evicted from the cache.
|
||||
|
||||
"""
|
||||
return Cache(cooker)
|
||||
|
||||
|
||||
|
||||
#============================================================================#
|
||||
# CacheData
|
||||
#============================================================================#
|
||||
class CacheData:
|
||||
"""
|
||||
The data structures we compile from the cached data
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Direct cache variables
|
||||
(from Cache.handle_data)
|
||||
"""
|
||||
self.providers = {}
|
||||
self.rproviders = {}
|
||||
self.packages = {}
|
||||
self.packages_dynamic = {}
|
||||
self.possible_world = []
|
||||
self.pkg_pn = {}
|
||||
self.pkg_fn = {}
|
||||
self.pkg_pepvpr = {}
|
||||
self.pkg_dp = {}
|
||||
self.pn_provides = {}
|
||||
self.fn_provides = {}
|
||||
self.all_depends = []
|
||||
self.deps = {}
|
||||
self.rundeps = {}
|
||||
self.runrecs = {}
|
||||
self.task_queues = {}
|
||||
self.task_deps = {}
|
||||
self.stamp = {}
|
||||
self.preferred = {}
|
||||
|
||||
"""
|
||||
Indirect Cache variables
|
||||
(set elsewhere)
|
||||
"""
|
||||
self.ignored_dependencies = []
|
||||
self.world_target = set()
|
||||
self.bbfile_priority = {}
|
||||
self.bbfile_config_priorities = []
|
||||
272
bitbake-dev/lib/bb/command.py
Normal file
272
bitbake-dev/lib/bb/command.py
Normal file
@@ -0,0 +1,272 @@
|
||||
"""
|
||||
BitBake 'Command' module
|
||||
|
||||
Provide an interface to interact with the bitbake server through 'commands'
|
||||
"""
|
||||
|
||||
# Copyright (C) 2006-2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""
|
||||
The bitbake server takes 'commands' from its UI/commandline.
|
||||
Commands are either synchronous or asynchronous.
|
||||
Async commands return data to the client in the form of events.
|
||||
Sync commands must only return data through the function return value
|
||||
and must not trigger events, directly or indirectly.
|
||||
Commands are queued in a CommandQueue
|
||||
"""
|
||||
|
||||
import bb
|
||||
|
||||
async_cmds = {}
|
||||
sync_cmds = {}
|
||||
|
||||
class Command:
|
||||
"""
|
||||
A queue of asynchronous commands for bitbake
|
||||
"""
|
||||
def __init__(self, cooker):
|
||||
|
||||
self.cooker = cooker
|
||||
self.cmds_sync = CommandsSync()
|
||||
self.cmds_async = CommandsAsync()
|
||||
|
||||
# FIXME Add lock for this
|
||||
self.currentAsyncCommand = None
|
||||
|
||||
for attr in CommandsSync.__dict__:
|
||||
command = attr[:].lower()
|
||||
method = getattr(CommandsSync, attr)
|
||||
sync_cmds[command] = (method)
|
||||
|
||||
for attr in CommandsAsync.__dict__:
|
||||
command = attr[:].lower()
|
||||
method = getattr(CommandsAsync, attr)
|
||||
async_cmds[command] = (method)
|
||||
|
||||
def runCommand(self, commandline):
|
||||
bb.debug("Running command %s" % commandline)
|
||||
try:
|
||||
command = commandline.pop(0)
|
||||
if command in CommandsSync.__dict__:
|
||||
# Can run synchronous commands straight away
|
||||
return getattr(CommandsSync, command)(self.cmds_sync, self, commandline)
|
||||
if self.currentAsyncCommand is not None:
|
||||
return "Busy (%s in progress)" % self.currentAsyncCommand[0]
|
||||
if command not in CommandsAsync.__dict__:
|
||||
return "No such command"
|
||||
self.currentAsyncCommand = (command, commandline)
|
||||
self.cooker.server.register_idle_function(self.cooker.runCommands, self.cooker)
|
||||
return True
|
||||
except:
|
||||
import traceback
|
||||
return traceback.format_exc()
|
||||
|
||||
def runAsyncCommand(self):
|
||||
try:
|
||||
if self.currentAsyncCommand is not None:
|
||||
(command, options) = self.currentAsyncCommand
|
||||
commandmethod = getattr(CommandsAsync, command)
|
||||
needcache = getattr( commandmethod, "needcache" )
|
||||
if needcache and self.cooker.cookerState != bb.cooker.cookerParsed:
|
||||
self.cooker.updateCache()
|
||||
return True
|
||||
else:
|
||||
commandmethod(self.cmds_async, self, options)
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
except:
|
||||
import traceback
|
||||
self.finishAsyncCommand(traceback.format_exc())
|
||||
return False
|
||||
|
||||
def finishAsyncCommand(self, error = None):
|
||||
if error:
|
||||
bb.event.fire(bb.command.CookerCommandFailed(self.cooker.configuration.event_data, error))
|
||||
else:
|
||||
bb.event.fire(bb.command.CookerCommandCompleted(self.cooker.configuration.event_data))
|
||||
self.currentAsyncCommand = None
|
||||
|
||||
|
||||
class CommandsSync:
|
||||
"""
|
||||
A class of synchronous commands
|
||||
These should run quickly so as not to hurt interactive performance.
|
||||
These must not influence any running synchronous command.
|
||||
"""
|
||||
|
||||
def stateShutdown(self, command, params):
|
||||
"""
|
||||
Trigger cooker 'shutdown' mode
|
||||
"""
|
||||
command.cooker.cookerAction = bb.cooker.cookerShutdown
|
||||
|
||||
def stateStop(self, command, params):
|
||||
"""
|
||||
Stop the cooker
|
||||
"""
|
||||
command.cooker.cookerAction = bb.cooker.cookerStop
|
||||
|
||||
def getCmdLineAction(self, command, params):
|
||||
"""
|
||||
Get any command parsed from the commandline
|
||||
"""
|
||||
return command.cooker.commandlineAction
|
||||
|
||||
def getVariable(self, command, params):
|
||||
"""
|
||||
Read the value of a variable from configuration.data
|
||||
"""
|
||||
varname = params[0]
|
||||
expand = True
|
||||
if len(params) > 1:
|
||||
expand = params[1]
|
||||
|
||||
return bb.data.getVar(varname, command.cooker.configuration.data, expand)
|
||||
|
||||
def setVariable(self, command, params):
|
||||
"""
|
||||
Set the value of variable in configuration.data
|
||||
"""
|
||||
varname = params[0]
|
||||
value = params[1]
|
||||
bb.data.setVar(varname, value, command.cooker.configuration.data)
|
||||
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
A class of asynchronous commands
|
||||
These functions communicate via generated events.
|
||||
Any function that requires metadata parsing should be here.
|
||||
"""
|
||||
|
||||
def buildFile(self, command, params):
|
||||
"""
|
||||
Build a single specified .bb file
|
||||
"""
|
||||
bfile = params[0]
|
||||
task = params[1]
|
||||
|
||||
command.cooker.buildFile(bfile, task)
|
||||
buildFile.needcache = False
|
||||
|
||||
def buildTargets(self, command, params):
|
||||
"""
|
||||
Build a set of targets
|
||||
"""
|
||||
pkgs_to_build = params[0]
|
||||
task = params[1]
|
||||
|
||||
command.cooker.buildTargets(pkgs_to_build, task)
|
||||
buildTargets.needcache = True
|
||||
|
||||
def generateDepTreeEvent(self, command, params):
|
||||
"""
|
||||
Generate an event containing the dependency information
|
||||
"""
|
||||
pkgs_to_build = params[0]
|
||||
task = params[1]
|
||||
|
||||
command.cooker.generateDepTreeEvent(pkgs_to_build, task)
|
||||
command.finishAsyncCommand()
|
||||
generateDepTreeEvent.needcache = True
|
||||
|
||||
def generateDotGraph(self, command, params):
|
||||
"""
|
||||
Dump dependency information to disk as .dot files
|
||||
"""
|
||||
pkgs_to_build = params[0]
|
||||
task = params[1]
|
||||
|
||||
command.cooker.generateDotGraphFiles(pkgs_to_build, task)
|
||||
command.finishAsyncCommand()
|
||||
generateDotGraph.needcache = True
|
||||
|
||||
def showVersions(self, command, params):
|
||||
"""
|
||||
Show the currently selected versions
|
||||
"""
|
||||
command.cooker.showVersions()
|
||||
command.finishAsyncCommand()
|
||||
showVersions.needcache = True
|
||||
|
||||
def showEnvironmentTarget(self, command, params):
|
||||
"""
|
||||
Print the environment of a target recipe
|
||||
(needs the cache to work out which recipe to use)
|
||||
"""
|
||||
pkg = params[0]
|
||||
|
||||
command.cooker.showEnvironment(None, pkg)
|
||||
command.finishAsyncCommand()
|
||||
showEnvironmentTarget.needcache = True
|
||||
|
||||
def showEnvironment(self, command, params):
|
||||
"""
|
||||
Print the standard environment
|
||||
or if specified the environment for a specified recipe
|
||||
"""
|
||||
bfile = params[0]
|
||||
|
||||
command.cooker.showEnvironment(bfile)
|
||||
command.finishAsyncCommand()
|
||||
showEnvironment.needcache = False
|
||||
|
||||
def parseFiles(self, command, params):
|
||||
"""
|
||||
Parse the .bb files
|
||||
"""
|
||||
command.cooker.updateCache()
|
||||
command.finishAsyncCommand()
|
||||
parseFiles.needcache = True
|
||||
|
||||
def compareRevisions(self, command, params):
|
||||
"""
|
||||
Parse the .bb files
|
||||
"""
|
||||
command.cooker.compareRevisions()
|
||||
command.finishAsyncCommand()
|
||||
compareRevisions.needcache = True
|
||||
|
||||
#
|
||||
# Events
|
||||
#
|
||||
class CookerCommandCompleted(bb.event.Event):
|
||||
"""
|
||||
Cooker command completed
|
||||
"""
|
||||
def __init__(self, data):
|
||||
bb.event.Event.__init__(self, data)
|
||||
|
||||
|
||||
class CookerCommandFailed(bb.event.Event):
|
||||
"""
|
||||
Cooker command completed
|
||||
"""
|
||||
def __init__(self, data, error):
|
||||
bb.event.Event.__init__(self, data)
|
||||
self.error = error
|
||||
|
||||
class CookerCommandSetExitCode(bb.event.Event):
|
||||
"""
|
||||
Set the exit code for a cooker command
|
||||
"""
|
||||
def __init__(self, data, exitcode):
|
||||
bb.event.Event.__init__(self, data)
|
||||
self.exitcode = int(exitcode)
|
||||
|
||||
|
||||
|
||||
977
bitbake-dev/lib/bb/cooker.py
Normal file
977
bitbake-dev/lib/bb/cooker.py
Normal file
@@ -0,0 +1,977 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
# Copyright (C) 2006 - 2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys, os, getopt, glob, copy, os.path, re, time
|
||||
import bb
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
|
||||
from bb import xmlrpcserver, command
|
||||
import itertools, sre_constants
|
||||
|
||||
class MultipleMatches(Exception):
|
||||
"""
|
||||
Exception raised when multiple file matches are found
|
||||
"""
|
||||
|
||||
class ParsingErrorsFound(Exception):
|
||||
"""
|
||||
Exception raised when parsing errors are found
|
||||
"""
|
||||
|
||||
class NothingToBuild(Exception):
|
||||
"""
|
||||
Exception raised when there is nothing to build
|
||||
"""
|
||||
|
||||
|
||||
# Different states cooker can be in
|
||||
cookerClean = 1
|
||||
cookerParsing = 2
|
||||
cookerParsed = 3
|
||||
|
||||
# Different action states the cooker can be in
|
||||
cookerRun = 1 # Cooker is running normally
|
||||
cookerShutdown = 2 # Active tasks should be brought to a controlled stop
|
||||
cookerStop = 3 # Stop, now!
|
||||
|
||||
#============================================================================#
|
||||
# BBCooker
|
||||
#============================================================================#
|
||||
class BBCooker:
|
||||
"""
|
||||
Manages one bitbake build run
|
||||
"""
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.status = None
|
||||
|
||||
self.cache = None
|
||||
self.bb_cache = None
|
||||
|
||||
self.server = bb.xmlrpcserver.BitBakeXMLRPCServer(self)
|
||||
#self.server.register_function(self.showEnvironment)
|
||||
|
||||
self.configuration = configuration
|
||||
|
||||
if self.configuration.verbose:
|
||||
bb.msg.set_verbose(True)
|
||||
|
||||
if self.configuration.debug:
|
||||
bb.msg.set_debug_level(self.configuration.debug)
|
||||
else:
|
||||
bb.msg.set_debug_level(0)
|
||||
|
||||
if self.configuration.debug_domains:
|
||||
bb.msg.set_debug_domains(self.configuration.debug_domains)
|
||||
|
||||
self.configuration.data = bb.data.init()
|
||||
|
||||
bb.data.inheritFromOS(self.configuration.data)
|
||||
|
||||
for f in self.configuration.file:
|
||||
self.parseConfigurationFile( f )
|
||||
|
||||
self.parseConfigurationFile( os.path.join( "conf", "bitbake.conf" ) )
|
||||
|
||||
if not self.configuration.cmd:
|
||||
self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
|
||||
|
||||
bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
|
||||
if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
|
||||
self.configuration.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
#
|
||||
# Special updated configuration we use for firing events
|
||||
#
|
||||
self.configuration.event_data = bb.data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(self.configuration.event_data)
|
||||
|
||||
# TOSTOP must not be set or our children will hang when they output
|
||||
fd = sys.stdout.fileno()
|
||||
if os.isatty(fd):
|
||||
import termios
|
||||
tcattr = termios.tcgetattr(fd)
|
||||
if tcattr[3] & termios.TOSTOP:
|
||||
bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...")
|
||||
tcattr[3] = tcattr[3] & ~termios.TOSTOP
|
||||
termios.tcsetattr(fd, termios.TCSANOW, tcattr)
|
||||
|
||||
self.command = bb.command.Command(self)
|
||||
self.cookerState = cookerClean
|
||||
self.cookerAction = cookerRun
|
||||
|
||||
def parseConfiguration(self):
|
||||
|
||||
|
||||
# Change nice level if we're asked to
|
||||
nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
|
||||
if nice:
|
||||
curnice = os.nice(0)
|
||||
nice = int(nice) - curnice
|
||||
bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice))
|
||||
|
||||
def parseCommandLine(self):
|
||||
# Parse any commandline into actions
|
||||
if self.configuration.show_environment:
|
||||
self.commandlineAction = None
|
||||
|
||||
if 'world' in self.configuration.pkgs_to_build:
|
||||
bb.error("'world' is not a valid target for --environment.")
|
||||
elif len(self.configuration.pkgs_to_build) > 1:
|
||||
bb.error("Only one target can be used with the --environment option.")
|
||||
elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
|
||||
bb.error("No target should be used with the --environment and --buildfile options.")
|
||||
elif len(self.configuration.pkgs_to_build) > 0:
|
||||
self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
|
||||
else:
|
||||
self.commandlineAction = ["showEnvironment", self.configuration.buildfile]
|
||||
elif self.configuration.buildfile is not None:
|
||||
self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd]
|
||||
elif self.configuration.revisions_changed:
|
||||
self.commandlineAction = ["compareRevisions"]
|
||||
elif self.configuration.show_versions:
|
||||
self.commandlineAction = ["showVersions"]
|
||||
elif self.configuration.parse_only:
|
||||
self.commandlineAction = ["parseFiles"]
|
||||
# FIXME - implement
|
||||
#elif self.configuration.interactive:
|
||||
# self.interactiveMode()
|
||||
elif self.configuration.dot_graph:
|
||||
if self.configuration.pkgs_to_build:
|
||||
self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
|
||||
else:
|
||||
self.commandlineAction = None
|
||||
bb.error("Please specify a package name for dependency graph generation.")
|
||||
else:
|
||||
if self.configuration.pkgs_to_build:
|
||||
self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
|
||||
else:
|
||||
self.commandlineAction = None
|
||||
bb.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
|
||||
def runCommands(self, server, data, abort):
|
||||
"""
|
||||
Run any queued asynchronous command
|
||||
This is done by the idle handler so it runs in true context rather than
|
||||
tied to any UI.
|
||||
"""
|
||||
|
||||
return self.command.runAsyncCommand()
|
||||
|
||||
def tryBuildPackage(self, fn, item, task, the_data):
|
||||
"""
|
||||
Build one task of a package, optionally build following task depends
|
||||
"""
|
||||
try:
|
||||
if not self.configuration.dry_run:
|
||||
bb.build.exec_task('do_%s' % task, the_data)
|
||||
return True
|
||||
except bb.build.FuncFailed:
|
||||
bb.msg.error(bb.msg.domain.Build, "task stack execution failed")
|
||||
raise
|
||||
except bb.build.EventException, e:
|
||||
event = e.args[1]
|
||||
bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event))
|
||||
raise
|
||||
|
||||
def tryBuild(self, fn, task):
|
||||
"""
|
||||
Build a provider and its dependencies.
|
||||
build_depends is a list of previous build dependencies (not runtime)
|
||||
If build_depends is empty, we're dealing with a runtime depends
|
||||
"""
|
||||
|
||||
the_data = self.bb_cache.loadDataFull(fn, self.configuration.data)
|
||||
|
||||
item = self.status.pkg_fn[fn]
|
||||
|
||||
#if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data):
|
||||
# return True
|
||||
|
||||
return self.tryBuildPackage(fn, item, task, the_data)
|
||||
|
||||
def showVersions(self):
|
||||
|
||||
# Need files parsed
|
||||
self.updateCache()
|
||||
|
||||
pkg_pn = self.status.pkg_pn
|
||||
preferred_versions = {}
|
||||
latest_versions = {}
|
||||
|
||||
# Sort by priority
|
||||
for pn in pkg_pn.keys():
|
||||
(last_ver,last_file,pref_ver,pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status)
|
||||
preferred_versions[pn] = (pref_ver, pref_file)
|
||||
latest_versions[pn] = (last_ver, last_file)
|
||||
|
||||
pkg_list = pkg_pn.keys()
|
||||
pkg_list.sort()
|
||||
|
||||
bb.msg.plain("%-35s %25s %25s" % ("Package Name", "Latest Version", "Preferred Version"))
|
||||
bb.msg.plain("%-35s %25s %25s\n" % ("============", "==============", "================="))
|
||||
|
||||
for p in pkg_list:
|
||||
pref = preferred_versions[p]
|
||||
latest = latest_versions[p]
|
||||
|
||||
prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
|
||||
lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
|
||||
|
||||
if pref == latest:
|
||||
prefstr = ""
|
||||
|
||||
bb.msg.plain("%-35s %25s %25s" % (p, lateststr, prefstr))
|
||||
|
||||
def compareRevisions(self):
|
||||
ret = bb.fetch.fetcher_compare_revisons(self.configuration.data)
|
||||
bb.event.fire(bb.command.CookerCommandSetExitCode(self.configuration.event_data, ret))
|
||||
|
||||
def showEnvironment(self, buildfile = None, pkgs_to_build = []):
|
||||
"""
|
||||
Show the outer or per-package environment
|
||||
"""
|
||||
fn = None
|
||||
envdata = None
|
||||
|
||||
if buildfile:
|
||||
self.cb = None
|
||||
self.bb_cache = bb.cache.init(self)
|
||||
fn = self.matchFile(buildfile)
|
||||
elif len(pkgs_to_build) == 1:
|
||||
self.updateCache()
|
||||
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
taskdata.add_provider(localdata, self.status, pkgs_to_build[0])
|
||||
taskdata.add_unresolved(localdata, self.status)
|
||||
|
||||
targetid = taskdata.getbuild_id(pkgs_to_build[0])
|
||||
fnid = taskdata.build_targets[targetid][0]
|
||||
fn = taskdata.fn_index[fnid]
|
||||
else:
|
||||
envdata = self.configuration.data
|
||||
|
||||
if fn:
|
||||
try:
|
||||
envdata = self.bb_cache.loadDataFull(fn, self.configuration.data)
|
||||
except IOError, e:
|
||||
bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e))
|
||||
raise
|
||||
except Exception, e:
|
||||
bb.msg.error(bb.msg.domain.Parsing, "%s" % e)
|
||||
raise
|
||||
|
||||
class dummywrite:
|
||||
def __init__(self):
|
||||
self.writebuf = ""
|
||||
def write(self, output):
|
||||
self.writebuf = self.writebuf + output
|
||||
|
||||
# emit variables and shell functions
|
||||
try:
|
||||
data.update_data(envdata)
|
||||
wb = dummywrite()
|
||||
data.emit_env(wb, envdata, True)
|
||||
bb.msg.plain(wb.writebuf)
|
||||
except Exception, e:
|
||||
bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
|
||||
# emit the metadata which isnt valid shell
|
||||
data.expandKeys(envdata)
|
||||
for e in envdata.keys():
|
||||
if data.getVarFlag( e, 'python', envdata ):
|
||||
bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1)))
|
||||
|
||||
def generateDepTreeData(self, pkgs_to_build, task):
|
||||
"""
|
||||
Create a dependency tree of pkgs_to_build, returning the data.
|
||||
"""
|
||||
|
||||
# Need files parsed
|
||||
self.updateCache()
|
||||
|
||||
# If we are told to do the None task then query the default task
|
||||
if (task == None):
|
||||
task = self.configuration.cmd
|
||||
|
||||
pkgs_to_build = self.checkPackages(pkgs_to_build)
|
||||
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
|
||||
runlist = []
|
||||
for k in pkgs_to_build:
|
||||
taskdata.add_provider(localdata, self.status, k)
|
||||
runlist.append([k, "do_%s" % task])
|
||||
taskdata.add_unresolved(localdata, self.status)
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
|
||||
rq.prepare_runqueue()
|
||||
|
||||
seen_fnids = []
|
||||
depend_tree = {}
|
||||
depend_tree["depends"] = {}
|
||||
depend_tree["tdepends"] = {}
|
||||
depend_tree["pn"] = {}
|
||||
depend_tree["rdepends-pn"] = {}
|
||||
depend_tree["packages"] = {}
|
||||
depend_tree["rdepends-pkg"] = {}
|
||||
depend_tree["rrecs-pkg"] = {}
|
||||
|
||||
for task in range(len(rq.runq_fnid)):
|
||||
taskname = rq.runq_task[task]
|
||||
fnid = rq.runq_fnid[task]
|
||||
fn = taskdata.fn_index[fnid]
|
||||
pn = self.status.pkg_fn[fn]
|
||||
version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
|
||||
if pn not in depend_tree["pn"]:
|
||||
depend_tree["pn"][pn] = {}
|
||||
depend_tree["pn"][pn]["filename"] = fn
|
||||
depend_tree["pn"][pn]["version"] = version
|
||||
for dep in rq.runq_depends[task]:
|
||||
depfn = taskdata.fn_index[rq.runq_fnid[dep]]
|
||||
deppn = self.status.pkg_fn[depfn]
|
||||
dotname = "%s.%s" % (pn, rq.runq_task[task])
|
||||
if not dotname in depend_tree["tdepends"]:
|
||||
depend_tree["tdepends"][dotname] = []
|
||||
depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.runq_task[dep]))
|
||||
if fnid not in seen_fnids:
|
||||
seen_fnids.append(fnid)
|
||||
packages = []
|
||||
|
||||
depend_tree["depends"][pn] = []
|
||||
for dep in taskdata.depids[fnid]:
|
||||
depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
|
||||
|
||||
depend_tree["rdepends-pn"][pn] = []
|
||||
for rdep in taskdata.rdepids[fnid]:
|
||||
depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
|
||||
|
||||
rdepends = self.status.rundeps[fn]
|
||||
for package in rdepends:
|
||||
depend_tree["rdepends-pkg"][package] = []
|
||||
for rdepend in rdepends[package]:
|
||||
depend_tree["rdepends-pkg"][package].append(rdepend)
|
||||
packages.append(package)
|
||||
|
||||
rrecs = self.status.runrecs[fn]
|
||||
for package in rrecs:
|
||||
depend_tree["rrecs-pkg"][package] = []
|
||||
for rdepend in rrecs[package]:
|
||||
depend_tree["rrecs-pkg"][package].append(rdepend)
|
||||
if not package in packages:
|
||||
packages.append(package)
|
||||
|
||||
for package in packages:
|
||||
if package not in depend_tree["packages"]:
|
||||
depend_tree["packages"][package] = {}
|
||||
depend_tree["packages"][package]["pn"] = pn
|
||||
depend_tree["packages"][package]["filename"] = fn
|
||||
depend_tree["packages"][package]["version"] = version
|
||||
|
||||
return depend_tree
|
||||
|
||||
|
||||
def generateDepTreeEvent(self, pkgs_to_build, task):
|
||||
"""
|
||||
Create a task dependency graph of pkgs_to_build.
|
||||
Generate an event with the result
|
||||
"""
|
||||
depgraph = self.generateDepTreeData(pkgs_to_build, task)
|
||||
bb.event.fire(bb.event.DepTreeGenerated(self.configuration.data, depgraph))
|
||||
|
||||
def generateDotGraphFiles(self, pkgs_to_build, task):
|
||||
"""
|
||||
Create a task dependency graph of pkgs_to_build.
|
||||
Save the result to a set of .dot files.
|
||||
"""
|
||||
|
||||
depgraph = self.generateDepTreeData(pkgs_to_build, task)
|
||||
|
||||
# Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
|
||||
depends_file = file('pn-depends.dot', 'w' )
|
||||
print >> depends_file, "digraph depends {"
|
||||
for pn in depgraph["pn"]:
|
||||
fn = depgraph["pn"][pn]["filename"]
|
||||
version = depgraph["pn"][pn]["version"]
|
||||
print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn)
|
||||
for pn in depgraph["depends"]:
|
||||
for depend in depgraph["depends"][pn]:
|
||||
print >> depends_file, '"%s" -> "%s"' % (pn, depend)
|
||||
for pn in depgraph["rdepends-pn"]:
|
||||
for rdepend in depgraph["rdepends-pn"][pn]:
|
||||
print >> depends_file, '"%s" -> "%s" [style=dashed]' % (pn, rdepend)
|
||||
print >> depends_file, "}"
|
||||
bb.msg.plain("PN dependencies saved to 'pn-depends.dot'")
|
||||
|
||||
depends_file = file('package-depends.dot', 'w' )
|
||||
print >> depends_file, "digraph depends {"
|
||||
for package in depgraph["packages"]:
|
||||
pn = depgraph["packages"][package]["pn"]
|
||||
fn = depgraph["packages"][package]["filename"]
|
||||
version = depgraph["packages"][package]["version"]
|
||||
if package == pn:
|
||||
print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn)
|
||||
else:
|
||||
print >> depends_file, '"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn)
|
||||
for depend in depgraph["depends"][pn]:
|
||||
print >> depends_file, '"%s" -> "%s"' % (package, depend)
|
||||
for package in depgraph["rdepends-pkg"]:
|
||||
for rdepend in depgraph["rdepends-pkg"][package]:
|
||||
print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend)
|
||||
for package in depgraph["rrecs-pkg"]:
|
||||
for rdepend in depgraph["rrecs-pkg"][package]:
|
||||
print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend)
|
||||
print >> depends_file, "}"
|
||||
bb.msg.plain("Package dependencies saved to 'package-depends.dot'")
|
||||
|
||||
tdepends_file = file('task-depends.dot', 'w' )
|
||||
print >> tdepends_file, "digraph depends {"
|
||||
for task in depgraph["tdepends"]:
|
||||
(pn, taskname) = task.rsplit(".", 1)
|
||||
fn = depgraph["pn"][pn]["filename"]
|
||||
version = depgraph["pn"][pn]["version"]
|
||||
print >> tdepends_file, '"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn)
|
||||
for dep in depgraph["tdepends"][task]:
|
||||
print >> tdepends_file, '"%s" -> "%s"' % (task, dep)
|
||||
print >> tdepends_file, "}"
|
||||
bb.msg.plain("Task dependencies saved to 'task-depends.dot'")
|
||||
|
||||
def buildDepgraph( self ):
|
||||
all_depends = self.status.all_depends
|
||||
pn_provides = self.status.pn_provides
|
||||
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
def calc_bbfile_priority(filename):
|
||||
for (regex, pri) in self.status.bbfile_config_priorities:
|
||||
if regex.match(filename):
|
||||
return pri
|
||||
return 0
|
||||
|
||||
# Handle PREFERRED_PROVIDERS
|
||||
for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
|
||||
try:
|
||||
(providee, provider) = p.split(':')
|
||||
except:
|
||||
bb.msg.fatal(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
|
||||
continue
|
||||
if providee in self.status.preferred and self.status.preferred[providee] != provider:
|
||||
bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee]))
|
||||
self.status.preferred[providee] = provider
|
||||
|
||||
# Calculate priorities for each file
|
||||
for p in self.status.pkg_fn.keys():
|
||||
self.status.bbfile_priority[p] = calc_bbfile_priority(p)
|
||||
|
||||
def buildWorldTargetList(self):
|
||||
"""
|
||||
Build package list for "bitbake world"
|
||||
"""
|
||||
all_depends = self.status.all_depends
|
||||
pn_provides = self.status.pn_provides
|
||||
bb.msg.debug(1, bb.msg.domain.Parsing, "collating packages for \"world\"")
|
||||
for f in self.status.possible_world:
|
||||
terminal = True
|
||||
pn = self.status.pkg_fn[f]
|
||||
|
||||
for p in pn_provides[pn]:
|
||||
if p.startswith('virtual/'):
|
||||
bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to %s provider starting with virtual/" % (f, p))
|
||||
terminal = False
|
||||
break
|
||||
for pf in self.status.providers[p]:
|
||||
if self.status.pkg_fn[pf] != pn:
|
||||
bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to both us and %s providing %s" % (f, pf, p))
|
||||
terminal = False
|
||||
break
|
||||
if terminal:
|
||||
self.status.world_target.add(pn)
|
||||
|
||||
# drop reference count now
|
||||
self.status.possible_world = None
|
||||
self.status.all_depends = None
|
||||
|
||||
def interactiveMode( self ):
|
||||
"""Drop off into a shell"""
|
||||
try:
|
||||
from bb import shell
|
||||
except ImportError, details:
|
||||
bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details )
|
||||
else:
|
||||
shell.start( self )
|
||||
|
||||
def parseConfigurationFile( self, afile ):
|
||||
try:
|
||||
self.configuration.data = bb.parse.handle( afile, self.configuration.data )
|
||||
|
||||
# Handle any INHERITs and inherit the base class
|
||||
inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
|
||||
for inherit in inherits:
|
||||
self.configuration.data = bb.parse.handle(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
|
||||
|
||||
# Nomally we only register event handlers at the end of parsing .bb files
|
||||
# We register any handlers we've found so far here...
|
||||
for var in data.getVar('__BBHANDLERS', self.configuration.data) or []:
|
||||
bb.event.register(var,bb.data.getVar(var, self.configuration.data))
|
||||
|
||||
bb.fetch.fetcher_init(self.configuration.data)
|
||||
|
||||
bb.event.fire(bb.event.ConfigParsed(self.configuration.data))
|
||||
|
||||
except IOError, e:
|
||||
bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (afile, str(e)))
|
||||
except bb.parse.ParseError, details:
|
||||
bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (afile, details) )
|
||||
|
||||
def handleCollections( self, collections ):
|
||||
"""Handle collections"""
|
||||
if collections:
|
||||
collection_list = collections.split()
|
||||
for c in collection_list:
|
||||
regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
|
||||
if regex == None:
|
||||
bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s not defined" % c)
|
||||
continue
|
||||
priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
|
||||
if priority == None:
|
||||
bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PRIORITY_%s not defined" % c)
|
||||
continue
|
||||
try:
|
||||
cre = re.compile(regex)
|
||||
except re.error:
|
||||
bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex))
|
||||
continue
|
||||
try:
|
||||
pri = int(priority)
|
||||
self.status.bbfile_config_priorities.append((cre, pri))
|
||||
except ValueError:
|
||||
bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority))
|
||||
|
||||
def buildSetVars(self):
|
||||
"""
|
||||
Setup any variables needed before starting a build
|
||||
"""
|
||||
if not bb.data.getVar("BUILDNAME", self.configuration.data):
|
||||
bb.data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), self.configuration.data)
|
||||
bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S',time.gmtime()), self.configuration.data)
|
||||
|
||||
def matchFiles(self, buildfile):
|
||||
"""
|
||||
Find the .bb files which match the expression in 'buildfile'.
|
||||
"""
|
||||
|
||||
bf = os.path.abspath(buildfile)
|
||||
try:
|
||||
os.stat(bf)
|
||||
return [bf]
|
||||
except OSError:
|
||||
(filelist, masked) = self.collect_bbfiles()
|
||||
regexp = re.compile(buildfile)
|
||||
matches = []
|
||||
for f in filelist:
|
||||
if regexp.search(f) and os.path.isfile(f):
|
||||
bf = f
|
||||
matches.append(f)
|
||||
return matches
|
||||
|
||||
def matchFile(self, buildfile):
|
||||
"""
|
||||
Find the .bb file which matches the expression in 'buildfile'.
|
||||
Raise an error if multiple files
|
||||
"""
|
||||
matches = self.matchFiles(buildfile)
|
||||
if len(matches) != 1:
|
||||
bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches)))
|
||||
for f in matches:
|
||||
bb.msg.error(bb.msg.domain.Parsing, " %s" % f)
|
||||
raise MultipleMatches
|
||||
return matches[0]
|
||||
|
||||
def buildFile(self, buildfile, task):
|
||||
"""
|
||||
Build the file matching regexp buildfile
|
||||
"""
|
||||
|
||||
# Parse the configuration here. We need to do it explicitly here since
|
||||
# buildFile() doesn't use the cache
|
||||
self.parseConfiguration()
|
||||
|
||||
# If we are told to do the None task then query the default task
|
||||
if (task == None):
|
||||
task = self.configuration.cmd
|
||||
|
||||
fn = self.matchFile(buildfile)
|
||||
self.buildSetVars()
|
||||
|
||||
# Load data into the cache for fn and parse the loaded cache data
|
||||
self.bb_cache = bb.cache.init(self)
|
||||
self.status = bb.cache.CacheData()
|
||||
self.bb_cache.loadData(fn, self.configuration.data, self.status)
|
||||
|
||||
# Tweak some variables
|
||||
item = self.bb_cache.getVar('PN', fn, True)
|
||||
self.status.ignored_dependencies = set()
|
||||
self.status.bbfile_priority[fn] = 1
|
||||
|
||||
# Remove external dependencies
|
||||
self.status.task_deps[fn]['depends'] = {}
|
||||
self.status.deps[fn] = []
|
||||
self.status.rundeps[fn] = []
|
||||
self.status.runrecs[fn] = []
|
||||
|
||||
# Remove stamp for target if force mode active
|
||||
if self.configuration.force:
|
||||
bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (task, fn))
|
||||
bb.build.del_stamp('do_%s' % task, self.status, fn)
|
||||
|
||||
# Setup taskdata structure
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
taskdata.add_provider(self.configuration.data, self.status, item)
|
||||
|
||||
buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item], self.configuration.event_data))
|
||||
|
||||
# Execute the runqueue
|
||||
runlist = [[item, "do_%s" % task]]
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
|
||||
|
||||
def buildFileIdle(server, rq, abort):
|
||||
|
||||
if abort or self.cookerAction == cookerStop:
|
||||
rq.finish_runqueue(True)
|
||||
elif self.cookerAction == cookerShutdown:
|
||||
rq.finish_runqueue(False)
|
||||
failures = 0
|
||||
try:
|
||||
retval = rq.execute_runqueue()
|
||||
except runqueue.TaskFailure, fnids:
|
||||
for fnid in fnids:
|
||||
bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
|
||||
failures = failures + 1
|
||||
retval = False
|
||||
if not retval:
|
||||
self.command.finishAsyncCommand()
|
||||
bb.event.fire(bb.event.BuildCompleted(buildname, targets, self.configuration.event_data, failures))
|
||||
return False
|
||||
return 0.5
|
||||
|
||||
self.server.register_idle_function(buildFileIdle, rq)
|
||||
|
||||
def buildTargets(self, targets, task):
|
||||
"""
|
||||
Attempt to build the targets specified
|
||||
"""
|
||||
|
||||
# Need files parsed
|
||||
self.updateCache()
|
||||
|
||||
# If we are told to do the NULL task then query the default task
|
||||
if (task == None):
|
||||
task = self.configuration.cmd
|
||||
|
||||
targets = self.checkPackages(targets)
|
||||
|
||||
def buildTargetsIdle(server, rq, abort):
|
||||
|
||||
if abort or self.cookerAction == cookerStop:
|
||||
rq.finish_runqueue(True)
|
||||
elif self.cookerAction == cookerShutdown:
|
||||
rq.finish_runqueue(False)
|
||||
failures = 0
|
||||
try:
|
||||
retval = rq.execute_runqueue()
|
||||
except runqueue.TaskFailure, fnids:
|
||||
for fnid in fnids:
|
||||
bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
|
||||
failures = failures + 1
|
||||
retval = False
|
||||
if not retval:
|
||||
self.command.finishAsyncCommand()
|
||||
bb.event.fire(bb.event.BuildCompleted(buildname, targets, self.configuration.event_data, failures))
|
||||
return None
|
||||
return 0.5
|
||||
|
||||
self.buildSetVars()
|
||||
|
||||
buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, targets, self.configuration.event_data))
|
||||
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
|
||||
runlist = []
|
||||
for k in targets:
|
||||
taskdata.add_provider(localdata, self.status, k)
|
||||
runlist.append([k, "do_%s" % task])
|
||||
taskdata.add_unresolved(localdata, self.status)
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
|
||||
|
||||
self.server.register_idle_function(buildTargetsIdle, rq)
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
if self.cookerState == cookerParsed:
|
||||
return
|
||||
|
||||
if self.cookerState != cookerParsing:
|
||||
|
||||
self.parseConfiguration ()
|
||||
|
||||
# Import Psyco if available and not disabled
|
||||
import platform
|
||||
if platform.machine() in ['i386', 'i486', 'i586', 'i686']:
|
||||
if not self.configuration.disable_psyco:
|
||||
try:
|
||||
import psyco
|
||||
except ImportError:
|
||||
bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
|
||||
else:
|
||||
psyco.bind( CookerParser.parse_next )
|
||||
else:
|
||||
bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.")
|
||||
|
||||
self.status = bb.cache.CacheData()
|
||||
|
||||
ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
|
||||
self.status.ignored_dependencies = set(ignore.split())
|
||||
|
||||
for dep in self.configuration.extra_assume_provided:
|
||||
self.status.ignored_dependencies.add(dep)
|
||||
|
||||
self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
|
||||
|
||||
bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files")
|
||||
(filelist, masked) = self.collect_bbfiles()
|
||||
bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
|
||||
|
||||
self.parser = CookerParser(self, filelist, masked)
|
||||
self.cookerState = cookerParsing
|
||||
|
||||
if not self.parser.parse_next():
|
||||
bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete")
|
||||
self.buildDepgraph()
|
||||
self.cookerState = cookerParsed
|
||||
return None
|
||||
|
||||
return 0.00001
|
||||
|
||||
def checkPackages(self, pkgs_to_build):
|
||||
|
||||
if len(pkgs_to_build) == 0:
|
||||
raise NothingToBuild
|
||||
|
||||
if 'world' in pkgs_to_build:
|
||||
self.buildWorldTargetList()
|
||||
pkgs_to_build.remove('world')
|
||||
for t in self.status.world_target:
|
||||
pkgs_to_build.append(t)
|
||||
|
||||
return pkgs_to_build
|
||||
|
||||
def get_bbfiles( self, path = os.getcwd() ):
|
||||
"""Get list of default .bb files by reading out the current directory"""
|
||||
contents = os.listdir(path)
|
||||
bbfiles = []
|
||||
for f in contents:
|
||||
(root, ext) = os.path.splitext(f)
|
||||
if ext == ".bb":
|
||||
bbfiles.append(os.path.abspath(os.path.join(os.getcwd(),f)))
|
||||
return bbfiles
|
||||
|
||||
def find_bbfiles( self, path ):
|
||||
"""Find all the .bb files in a directory"""
|
||||
from os.path import join
|
||||
|
||||
found = []
|
||||
for dir, dirs, files in os.walk(path):
|
||||
for ignored in ('SCCS', 'CVS', '.svn'):
|
||||
if ignored in dirs:
|
||||
dirs.remove(ignored)
|
||||
found += [join(dir,f) for f in files if f.endswith('.bb')]
|
||||
|
||||
return found
|
||||
|
||||
def collect_bbfiles( self ):
|
||||
"""Collect all available .bb build files"""
|
||||
parsed, cached, skipped, masked = 0, 0, 0, 0
|
||||
self.bb_cache = bb.cache.init(self)
|
||||
|
||||
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
|
||||
data.setVar("BBFILES", " ".join(files), self.configuration.data)
|
||||
|
||||
if not len(files):
|
||||
files = self.get_bbfiles()
|
||||
|
||||
if not len(files):
|
||||
bb.msg.error(bb.msg.domain.Collection, "no files to build.")
|
||||
|
||||
newfiles = []
|
||||
for f in files:
|
||||
if os.path.isdir(f):
|
||||
dirfiles = self.find_bbfiles(f)
|
||||
if dirfiles:
|
||||
newfiles += dirfiles
|
||||
continue
|
||||
else:
|
||||
globbed = glob.glob(f)
|
||||
if not globbed and os.path.exists(f):
|
||||
globbed = [f]
|
||||
newfiles += globbed
|
||||
|
||||
bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
|
||||
|
||||
if not bbmask:
|
||||
return (newfiles, 0)
|
||||
|
||||
try:
|
||||
bbmask_compiled = re.compile(bbmask)
|
||||
except sre_constants.error:
|
||||
bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.")
|
||||
|
||||
finalfiles = []
|
||||
for f in newfiles:
|
||||
if bbmask_compiled.search(f):
|
||||
bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f)
|
||||
masked += 1
|
||||
continue
|
||||
finalfiles.append(f)
|
||||
|
||||
return (finalfiles, masked)
|
||||
|
||||
def serve(self):
|
||||
|
||||
# Empty the environment. The environment will be populated as
|
||||
# necessary from the data store.
|
||||
bb.utils.empty_environment()
|
||||
|
||||
if self.configuration.profile:
|
||||
try:
|
||||
import cProfile as profile
|
||||
except:
|
||||
import profile
|
||||
|
||||
profile.runctx("self.server.serve_forever()", globals(), locals(), "profile.log")
|
||||
|
||||
# Redirect stdout to capture profile information
|
||||
pout = open('profile.log.processed', 'w')
|
||||
so = sys.stdout.fileno()
|
||||
os.dup2(pout.fileno(), so)
|
||||
|
||||
import pstats
|
||||
p = pstats.Stats('profile.log')
|
||||
p.sort_stats('time')
|
||||
p.print_stats()
|
||||
p.print_callers()
|
||||
p.sort_stats('cumulative')
|
||||
p.print_stats()
|
||||
|
||||
os.dup2(so, pout.fileno())
|
||||
pout.flush()
|
||||
pout.close()
|
||||
else:
|
||||
self.server.serve_forever()
|
||||
|
||||
bb.event.fire(CookerExit(self.configuration.event_data))
|
||||
|
||||
class CookerExit(bb.event.Event):
|
||||
"""
|
||||
Notify clients of the Cooker shutdown
|
||||
"""
|
||||
|
||||
def __init__(self, d):
|
||||
bb.event.Event.__init__(self, d)
|
||||
|
||||
class CookerParser:
|
||||
def __init__(self, cooker, filelist, masked):
|
||||
# Internal data
|
||||
self.filelist = filelist
|
||||
self.cooker = cooker
|
||||
|
||||
# Accounting statistics
|
||||
self.parsed = 0
|
||||
self.cached = 0
|
||||
self.skipped = 0
|
||||
self.error = 0
|
||||
self.masked = masked
|
||||
self.total = len(filelist)
|
||||
|
||||
# Pointer to the next file to parse
|
||||
self.pointer = 0
|
||||
|
||||
def parse_next(self):
|
||||
print "Pointer %d" % self.pointer
|
||||
|
||||
if self.pointer < len(self.filelist):
|
||||
f = self.filelist[self.pointer]
|
||||
cooker = self.cooker
|
||||
|
||||
try:
|
||||
fromCache, skip = cooker.bb_cache.loadData(f, cooker.configuration.data, cooker.status)
|
||||
if skip:
|
||||
self.skipped += 1
|
||||
bb.msg.debug(2, bb.msg.domain.Collection, "skipping %s" % f)
|
||||
cooker.bb_cache.skip(f)
|
||||
elif fromCache: self.cached += 1
|
||||
else: self.parsed += 1
|
||||
|
||||
except IOError, e:
|
||||
self.error += 1
|
||||
cooker.bb_cache.remove(f)
|
||||
bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e))
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
cooker.bb_cache.remove(f)
|
||||
cooker.bb_cache.sync()
|
||||
raise
|
||||
except Exception, e:
|
||||
self.error += 1
|
||||
cooker.bb_cache.remove(f)
|
||||
bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f))
|
||||
except:
|
||||
cooker.bb_cache.remove(f)
|
||||
raise
|
||||
finally:
|
||||
bb.event.fire(bb.event.ParseProgress(cooker.configuration.event_data, self.cached, self.parsed, self.skipped, self.masked, self.error, self.total))
|
||||
|
||||
self.pointer += 1
|
||||
|
||||
if self.pointer >= self.total:
|
||||
cooker.bb_cache.sync()
|
||||
if self.error > 0:
|
||||
raise ParsingErrorsFound
|
||||
return False
|
||||
return True
|
||||
|
||||
191
bitbake-dev/lib/bb/daemonize.py
Normal file
191
bitbake-dev/lib/bb/daemonize.py
Normal file
@@ -0,0 +1,191 @@
|
||||
"""
|
||||
Python Deamonizing helper
|
||||
|
||||
Configurable daemon behaviors:
|
||||
|
||||
1.) The current working directory set to the "/" directory.
|
||||
2.) The current file creation mode mask set to 0.
|
||||
3.) Close all open files (1024).
|
||||
4.) Redirect standard I/O streams to "/dev/null".
|
||||
|
||||
A failed call to fork() now raises an exception.
|
||||
|
||||
References:
|
||||
1) Advanced Programming in the Unix Environment: W. Richard Stevens
|
||||
2) Unix Programming Frequently Asked Questions:
|
||||
http://www.erlenstar.demon.co.uk/unix/faq_toc.html
|
||||
|
||||
Modified to allow a function to be daemonized and return for
|
||||
bitbake use by Richard Purdie
|
||||
"""
|
||||
|
||||
__author__ = "Chad J. Schroeder"
|
||||
__copyright__ = "Copyright (C) 2005 Chad J. Schroeder"
|
||||
__version__ = "0.2"
|
||||
|
||||
# Standard Python modules.
|
||||
import os # Miscellaneous OS interfaces.
|
||||
import sys # System-specific parameters and functions.
|
||||
|
||||
# Default daemon parameters.
|
||||
# File mode creation mask of the daemon.
|
||||
# For BitBake's children, we do want to inherit the parent umask.
|
||||
UMASK = None
|
||||
|
||||
# Default maximum for the number of available file descriptors.
|
||||
MAXFD = 1024
|
||||
|
||||
# The standard I/O file descriptors are redirected to /dev/null by default.
|
||||
if (hasattr(os, "devnull")):
|
||||
REDIRECT_TO = os.devnull
|
||||
else:
|
||||
REDIRECT_TO = "/dev/null"
|
||||
|
||||
def createDaemon(function, logfile):
|
||||
"""
|
||||
Detach a process from the controlling terminal and run it in the
|
||||
background as a daemon, returning control to the caller.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Fork a child process so the parent can exit. This returns control to
|
||||
# the command-line or shell. It also guarantees that the child will not
|
||||
# be a process group leader, since the child receives a new process ID
|
||||
# and inherits the parent's process group ID. This step is required
|
||||
# to insure that the next call to os.setsid is successful.
|
||||
pid = os.fork()
|
||||
except OSError, e:
|
||||
raise Exception, "%s [%d]" % (e.strerror, e.errno)
|
||||
|
||||
if (pid == 0): # The first child.
|
||||
# To become the session leader of this new session and the process group
|
||||
# leader of the new process group, we call os.setsid(). The process is
|
||||
# also guaranteed not to have a controlling terminal.
|
||||
os.setsid()
|
||||
|
||||
# Is ignoring SIGHUP necessary?
|
||||
#
|
||||
# It's often suggested that the SIGHUP signal should be ignored before
|
||||
# the second fork to avoid premature termination of the process. The
|
||||
# reason is that when the first child terminates, all processes, e.g.
|
||||
# the second child, in the orphaned group will be sent a SIGHUP.
|
||||
#
|
||||
# "However, as part of the session management system, there are exactly
|
||||
# two cases where SIGHUP is sent on the death of a process:
|
||||
#
|
||||
# 1) When the process that dies is the session leader of a session that
|
||||
# is attached to a terminal device, SIGHUP is sent to all processes
|
||||
# in the foreground process group of that terminal device.
|
||||
# 2) When the death of a process causes a process group to become
|
||||
# orphaned, and one or more processes in the orphaned group are
|
||||
# stopped, then SIGHUP and SIGCONT are sent to all members of the
|
||||
# orphaned group." [2]
|
||||
#
|
||||
# The first case can be ignored since the child is guaranteed not to have
|
||||
# a controlling terminal. The second case isn't so easy to dismiss.
|
||||
# The process group is orphaned when the first child terminates and
|
||||
# POSIX.1 requires that every STOPPED process in an orphaned process
|
||||
# group be sent a SIGHUP signal followed by a SIGCONT signal. Since the
|
||||
# second child is not STOPPED though, we can safely forego ignoring the
|
||||
# SIGHUP signal. In any case, there are no ill-effects if it is ignored.
|
||||
#
|
||||
# import signal # Set handlers for asynchronous events.
|
||||
# signal.signal(signal.SIGHUP, signal.SIG_IGN)
|
||||
|
||||
try:
|
||||
# Fork a second child and exit immediately to prevent zombies. This
|
||||
# causes the second child process to be orphaned, making the init
|
||||
# process responsible for its cleanup. And, since the first child is
|
||||
# a session leader without a controlling terminal, it's possible for
|
||||
# it to acquire one by opening a terminal in the future (System V-
|
||||
# based systems). This second fork guarantees that the child is no
|
||||
# longer a session leader, preventing the daemon from ever acquiring
|
||||
# a controlling terminal.
|
||||
pid = os.fork() # Fork a second child.
|
||||
except OSError, e:
|
||||
raise Exception, "%s [%d]" % (e.strerror, e.errno)
|
||||
|
||||
if (pid == 0): # The second child.
|
||||
# We probably don't want the file mode creation mask inherited from
|
||||
# the parent, so we give the child complete control over permissions.
|
||||
if UMASK is not None:
|
||||
os.umask(UMASK)
|
||||
else:
|
||||
# Parent (the first child) of the second child.
|
||||
os._exit(0)
|
||||
else:
|
||||
# exit() or _exit()?
|
||||
# _exit is like exit(), but it doesn't call any functions registered
|
||||
# with atexit (and on_exit) or any registered signal handlers. It also
|
||||
# closes any open file descriptors. Using exit() may cause all stdio
|
||||
# streams to be flushed twice and any temporary files may be unexpectedly
|
||||
# removed. It's therefore recommended that child branches of a fork()
|
||||
# and the parent branch(es) of a daemon use _exit().
|
||||
return
|
||||
|
||||
# Close all open file descriptors. This prevents the child from keeping
|
||||
# open any file descriptors inherited from the parent. There is a variety
|
||||
# of methods to accomplish this task. Three are listed below.
|
||||
#
|
||||
# Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
|
||||
# number of open file descriptors to close. If it doesn't exists, use
|
||||
# the default value (configurable).
|
||||
#
|
||||
# try:
|
||||
# maxfd = os.sysconf("SC_OPEN_MAX")
|
||||
# except (AttributeError, ValueError):
|
||||
# maxfd = MAXFD
|
||||
#
|
||||
# OR
|
||||
#
|
||||
# if (os.sysconf_names.has_key("SC_OPEN_MAX")):
|
||||
# maxfd = os.sysconf("SC_OPEN_MAX")
|
||||
# else:
|
||||
# maxfd = MAXFD
|
||||
#
|
||||
# OR
|
||||
#
|
||||
# Use the getrlimit method to retrieve the maximum file descriptor number
|
||||
# that can be opened by this process. If there is not limit on the
|
||||
# resource, use the default value.
|
||||
#
|
||||
import resource # Resource usage information.
|
||||
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
|
||||
if (maxfd == resource.RLIM_INFINITY):
|
||||
maxfd = MAXFD
|
||||
|
||||
# Iterate through and close all file descriptors.
|
||||
# for fd in range(0, maxfd):
|
||||
# try:
|
||||
# os.close(fd)
|
||||
# except OSError: # ERROR, fd wasn't open to begin with (ignored)
|
||||
# pass
|
||||
|
||||
# Redirect the standard I/O file descriptors to the specified file. Since
|
||||
# the daemon has no controlling terminal, most daemons redirect stdin,
|
||||
# stdout, and stderr to /dev/null. This is done to prevent side-effects
|
||||
# from reads and writes to the standard I/O file descriptors.
|
||||
|
||||
# This call to open is guaranteed to return the lowest file descriptor,
|
||||
# which will be 0 (stdin), since it was closed above.
|
||||
# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
|
||||
|
||||
# Duplicate standard input to standard output and standard error.
|
||||
# os.dup2(0, 1) # standard output (1)
|
||||
# os.dup2(0, 2) # standard error (2)
|
||||
|
||||
|
||||
si = file('/dev/null', 'r')
|
||||
so = file(logfile, 'w')
|
||||
se = so
|
||||
|
||||
|
||||
# Replace those fds with our own
|
||||
os.dup2(si.fileno(), sys.stdin.fileno())
|
||||
os.dup2(so.fileno(), sys.stdout.fileno())
|
||||
os.dup2(se.fileno(), sys.stderr.fileno())
|
||||
|
||||
function()
|
||||
|
||||
os._exit(0)
|
||||
|
||||
562
bitbake-dev/lib/bb/data.py
Normal file
562
bitbake-dev/lib/bb/data.py
Normal file
@@ -0,0 +1,562 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Data' implementations
|
||||
|
||||
Functions for interacting with the data structure used by the
|
||||
BitBake build tools.
|
||||
|
||||
The expandData and update_data are the most expensive
|
||||
operations. At night the cookie monster came by and
|
||||
suggested 'give me cookies on setting the variables and
|
||||
things will work out'. Taking this suggestion into account
|
||||
applying the skills from the not yet passed 'Entwurf und
|
||||
Analyse von Algorithmen' lecture and the cookie
|
||||
monster seems to be right. We will track setVar more carefully
|
||||
to have faster update_data and expandKeys operations.
|
||||
|
||||
This is a treade-off between speed and memory again but
|
||||
the speed is more critical here.
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import sys, os, re, types
|
||||
if sys.argv[0][-5:] == "pydoc":
|
||||
path = os.path.dirname(os.path.dirname(sys.argv[1]))
|
||||
else:
|
||||
path = os.path.dirname(os.path.dirname(sys.argv[0]))
|
||||
sys.path.insert(0,path)
|
||||
|
||||
from bb import data_smart
|
||||
import bb
|
||||
|
||||
_dict_type = data_smart.DataSmart
|
||||
|
||||
def init():
|
||||
return _dict_type()
|
||||
|
||||
def init_db(parent = None):
|
||||
if parent:
|
||||
return parent.createCopy()
|
||||
else:
|
||||
return _dict_type()
|
||||
|
||||
def createCopy(source):
|
||||
"""Link the source set to the destination
|
||||
If one does not find the value in the destination set,
|
||||
search will go on to the source set to get the value.
|
||||
Value from source are copy-on-write. i.e. any try to
|
||||
modify one of them will end up putting the modified value
|
||||
in the destination set.
|
||||
"""
|
||||
return source.createCopy()
|
||||
|
||||
def initVar(var, d):
|
||||
"""Non-destructive var init for data structure"""
|
||||
d.initVar(var)
|
||||
|
||||
|
||||
def setVar(var, value, d):
|
||||
"""Set a variable to a given value
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVar('TEST', 'testcontents', d)
|
||||
>>> print getVar('TEST', d)
|
||||
testcontents
|
||||
"""
|
||||
d.setVar(var,value)
|
||||
|
||||
|
||||
def getVar(var, d, exp = 0):
|
||||
"""Gets the value of a variable
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVar('TEST', 'testcontents', d)
|
||||
>>> print getVar('TEST', d)
|
||||
testcontents
|
||||
"""
|
||||
return d.getVar(var,exp)
|
||||
|
||||
|
||||
def renameVar(key, newkey, d):
|
||||
"""Renames a variable from key to newkey
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVar('TEST', 'testcontents', d)
|
||||
>>> renameVar('TEST', 'TEST2', d)
|
||||
>>> print getVar('TEST2', d)
|
||||
testcontents
|
||||
"""
|
||||
d.renameVar(key, newkey)
|
||||
|
||||
def delVar(var, d):
|
||||
"""Removes a variable from the data set
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVar('TEST', 'testcontents', d)
|
||||
>>> print getVar('TEST', d)
|
||||
testcontents
|
||||
>>> delVar('TEST', d)
|
||||
>>> print getVar('TEST', d)
|
||||
None
|
||||
"""
|
||||
d.delVar(var)
|
||||
|
||||
def setVarFlag(var, flag, flagvalue, d):
|
||||
"""Set a flag for a given variable to a given value
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVarFlag('TEST', 'python', 1, d)
|
||||
>>> print getVarFlag('TEST', 'python', d)
|
||||
1
|
||||
"""
|
||||
d.setVarFlag(var,flag,flagvalue)
|
||||
|
||||
def getVarFlag(var, flag, d):
|
||||
"""Gets given flag from given var
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVarFlag('TEST', 'python', 1, d)
|
||||
>>> print getVarFlag('TEST', 'python', d)
|
||||
1
|
||||
"""
|
||||
return d.getVarFlag(var,flag)
|
||||
|
||||
def delVarFlag(var, flag, d):
|
||||
"""Removes a given flag from the variable's flags
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVarFlag('TEST', 'testflag', 1, d)
|
||||
>>> print getVarFlag('TEST', 'testflag', d)
|
||||
1
|
||||
>>> delVarFlag('TEST', 'testflag', d)
|
||||
>>> print getVarFlag('TEST', 'testflag', d)
|
||||
None
|
||||
|
||||
"""
|
||||
d.delVarFlag(var,flag)
|
||||
|
||||
def setVarFlags(var, flags, d):
|
||||
"""Set the flags for a given variable
|
||||
|
||||
Note:
|
||||
setVarFlags will not clear previous
|
||||
flags. Think of this method as
|
||||
addVarFlags
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> myflags = {}
|
||||
>>> myflags['test'] = 'blah'
|
||||
>>> setVarFlags('TEST', myflags, d)
|
||||
>>> print getVarFlag('TEST', 'test', d)
|
||||
blah
|
||||
"""
|
||||
d.setVarFlags(var,flags)
|
||||
|
||||
def getVarFlags(var, d):
|
||||
"""Gets a variable's flags
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVarFlag('TEST', 'test', 'blah', d)
|
||||
>>> print getVarFlags('TEST', d)['test']
|
||||
blah
|
||||
"""
|
||||
return d.getVarFlags(var)
|
||||
|
||||
def delVarFlags(var, d):
|
||||
"""Removes a variable's flags
|
||||
|
||||
Example:
|
||||
>>> data = init()
|
||||
>>> setVarFlag('TEST', 'testflag', 1, data)
|
||||
>>> print getVarFlag('TEST', 'testflag', data)
|
||||
1
|
||||
>>> delVarFlags('TEST', data)
|
||||
>>> print getVarFlags('TEST', data)
|
||||
None
|
||||
|
||||
"""
|
||||
d.delVarFlags(var)
|
||||
|
||||
def keys(d):
|
||||
"""Return a list of keys in d
|
||||
|
||||
Example:
|
||||
>>> d = init()
|
||||
>>> setVar('TEST', 1, d)
|
||||
>>> setVar('MOO' , 2, d)
|
||||
>>> setVarFlag('TEST', 'test', 1, d)
|
||||
>>> keys(d)
|
||||
['TEST', 'MOO']
|
||||
"""
|
||||
return d.keys()
|
||||
|
||||
def getData(d):
|
||||
"""Returns the data object used"""
|
||||
return d
|
||||
|
||||
def setData(newData, d):
|
||||
"""Sets the data object to the supplied value"""
|
||||
d = newData
|
||||
|
||||
|
||||
##
|
||||
## Cookie Monsters' query functions
|
||||
##
|
||||
def _get_override_vars(d, override):
|
||||
"""
|
||||
Internal!!!
|
||||
|
||||
Get the Names of Variables that have a specific
|
||||
override. This function returns a iterable
|
||||
Set or an empty list
|
||||
"""
|
||||
return []
|
||||
|
||||
def _get_var_flags_triple(d):
|
||||
"""
|
||||
Internal!!!
|
||||
|
||||
"""
|
||||
return []
|
||||
|
||||
__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
|
||||
__expand_python_regexp__ = re.compile(r"\${@.+?}")
|
||||
|
||||
def expand(s, d, varname = None):
|
||||
"""Variable expansion using the data store.
|
||||
|
||||
Example:
|
||||
Standard expansion:
|
||||
>>> d = init()
|
||||
>>> setVar('A', 'sshd', d)
|
||||
>>> print expand('/usr/bin/${A}', d)
|
||||
/usr/bin/sshd
|
||||
|
||||
Python expansion:
|
||||
>>> d = init()
|
||||
>>> print expand('result: ${@37 * 72}', d)
|
||||
result: 2664
|
||||
|
||||
Shell expansion:
|
||||
>>> d = init()
|
||||
>>> print expand('${TARGET_MOO}', d)
|
||||
${TARGET_MOO}
|
||||
>>> setVar('TARGET_MOO', 'yupp', d)
|
||||
>>> print expand('${TARGET_MOO}',d)
|
||||
yupp
|
||||
>>> setVar('SRC_URI', 'http://somebug.${TARGET_MOO}', d)
|
||||
>>> delVar('TARGET_MOO', d)
|
||||
>>> print expand('${SRC_URI}', d)
|
||||
http://somebug.${TARGET_MOO}
|
||||
"""
|
||||
return d.expand(s, varname)
|
||||
|
||||
def expandKeys(alterdata, readdata = None):
|
||||
if readdata == None:
|
||||
readdata = alterdata
|
||||
|
||||
todolist = {}
|
||||
for key in keys(alterdata):
|
||||
if not '${' in key:
|
||||
continue
|
||||
|
||||
ekey = expand(key, readdata)
|
||||
if key == ekey:
|
||||
continue
|
||||
todolist[key] = ekey
|
||||
|
||||
# These two for loops are split for performance to maximise the
|
||||
# usefulness of the expand cache
|
||||
|
||||
for key in todolist:
|
||||
ekey = todolist[key]
|
||||
renameVar(key, ekey, alterdata)
|
||||
|
||||
def expandData(alterdata, readdata = None):
|
||||
"""For each variable in alterdata, expand it, and update the var contents.
|
||||
Replacements use data from readdata.
|
||||
|
||||
Example:
|
||||
>>> a=init()
|
||||
>>> b=init()
|
||||
>>> setVar("dlmsg", "dl_dir is ${DL_DIR}", a)
|
||||
>>> setVar("DL_DIR", "/path/to/whatever", b)
|
||||
>>> expandData(a, b)
|
||||
>>> print getVar("dlmsg", a)
|
||||
dl_dir is /path/to/whatever
|
||||
"""
|
||||
if readdata == None:
|
||||
readdata = alterdata
|
||||
|
||||
for key in keys(alterdata):
|
||||
val = getVar(key, alterdata)
|
||||
if type(val) is not types.StringType:
|
||||
continue
|
||||
expanded = expand(val, readdata)
|
||||
# print "key is %s, val is %s, expanded is %s" % (key, val, expanded)
|
||||
if val != expanded:
|
||||
setVar(key, expanded, alterdata)
|
||||
|
||||
def inheritFromOS(d):
|
||||
"""Inherit variables from the environment."""
|
||||
for s in os.environ.keys():
|
||||
try:
|
||||
setVar(s, os.environ[s], d)
|
||||
setVarFlag(s, "export", True, d)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emit a variable to be sourced by a shell."""
|
||||
if getVarFlag(var, "python", d):
|
||||
return 0
|
||||
|
||||
export = getVarFlag(var, "export", d)
|
||||
unexport = getVarFlag(var, "unexport", d)
|
||||
func = getVarFlag(var, "func", d)
|
||||
if not all and not export and not unexport and not func:
|
||||
return 0
|
||||
|
||||
try:
|
||||
if all:
|
||||
oval = getVar(var, d, 0)
|
||||
val = getVar(var, d, 1)
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
excname = str(sys.exc_info()[0])
|
||||
if excname == "bb.build.FuncFailed":
|
||||
raise
|
||||
o.write('# expansion of %s threw %s\n' % (var, excname))
|
||||
return 0
|
||||
|
||||
if all:
|
||||
o.write('# %s=%s\n' % (var, oval))
|
||||
|
||||
if type(val) is not types.StringType:
|
||||
return 0
|
||||
|
||||
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
|
||||
return 0
|
||||
|
||||
varExpanded = expand(var, d)
|
||||
|
||||
if unexport:
|
||||
o.write('unset %s\n' % varExpanded)
|
||||
return 1
|
||||
|
||||
val.rstrip()
|
||||
if not val:
|
||||
return 0
|
||||
|
||||
if func:
|
||||
# NOTE: should probably check for unbalanced {} within the var
|
||||
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
|
||||
return 1
|
||||
|
||||
if export:
|
||||
o.write('export ')
|
||||
|
||||
# if we're going to output this within doublequotes,
|
||||
# to a shell, we need to escape the quotes in the var
|
||||
alter = re.sub('"', '\\"', val.strip())
|
||||
o.write('%s="%s"\n' % (varExpanded, alter))
|
||||
return 1
|
||||
|
||||
|
||||
def emit_env(o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
env = keys(d)
|
||||
|
||||
for e in env:
|
||||
if getVarFlag(e, "func", d):
|
||||
continue
|
||||
emit_var(e, o, d, all) and o.write('\n')
|
||||
|
||||
for e in env:
|
||||
if not getVarFlag(e, "func", d):
|
||||
continue
|
||||
emit_var(e, o, d) and o.write('\n')
|
||||
|
||||
def update_data(d):
|
||||
"""Modifies the environment vars according to local overrides and commands.
|
||||
Examples:
|
||||
Appending to a variable:
|
||||
>>> d = init()
|
||||
>>> setVar('TEST', 'this is a', d)
|
||||
>>> setVar('TEST_append', ' test', d)
|
||||
>>> setVar('TEST_append', ' of the emergency broadcast system.', d)
|
||||
>>> update_data(d)
|
||||
>>> print getVar('TEST', d)
|
||||
this is a test of the emergency broadcast system.
|
||||
|
||||
Prepending to a variable:
|
||||
>>> setVar('TEST', 'virtual/libc', d)
|
||||
>>> setVar('TEST_prepend', 'virtual/tmake ', d)
|
||||
>>> setVar('TEST_prepend', 'virtual/patcher ', d)
|
||||
>>> update_data(d)
|
||||
>>> print getVar('TEST', d)
|
||||
virtual/patcher virtual/tmake virtual/libc
|
||||
|
||||
Overrides:
|
||||
>>> setVar('TEST_arm', 'target', d)
|
||||
>>> setVar('TEST_ramses', 'machine', d)
|
||||
>>> setVar('TEST_local', 'local', d)
|
||||
>>> setVar('OVERRIDES', 'arm', d)
|
||||
|
||||
>>> setVar('TEST', 'original', d)
|
||||
>>> update_data(d)
|
||||
>>> print getVar('TEST', d)
|
||||
target
|
||||
|
||||
>>> setVar('OVERRIDES', 'arm:ramses:local', d)
|
||||
>>> setVar('TEST', 'original', d)
|
||||
>>> update_data(d)
|
||||
>>> print getVar('TEST', d)
|
||||
local
|
||||
|
||||
CopyMonster:
|
||||
>>> e = d.createCopy()
|
||||
>>> setVar('TEST_foo', 'foo', e)
|
||||
>>> update_data(e)
|
||||
>>> print getVar('TEST', e)
|
||||
local
|
||||
|
||||
>>> setVar('OVERRIDES', 'arm:ramses:local:foo', e)
|
||||
>>> update_data(e)
|
||||
>>> print getVar('TEST', e)
|
||||
foo
|
||||
|
||||
>>> f = d.createCopy()
|
||||
>>> setVar('TEST_moo', 'something', f)
|
||||
>>> setVar('OVERRIDES', 'moo:arm:ramses:local:foo', e)
|
||||
>>> update_data(e)
|
||||
>>> print getVar('TEST', e)
|
||||
foo
|
||||
|
||||
|
||||
>>> h = init()
|
||||
>>> setVar('SRC_URI', 'file://append.foo;patch=1 ', h)
|
||||
>>> g = h.createCopy()
|
||||
>>> setVar('SRC_URI_append_arm', 'file://other.foo;patch=1', g)
|
||||
>>> setVar('OVERRIDES', 'arm:moo', g)
|
||||
>>> update_data(g)
|
||||
>>> print getVar('SRC_URI', g)
|
||||
file://append.foo;patch=1 file://other.foo;patch=1
|
||||
|
||||
"""
|
||||
bb.msg.debug(2, bb.msg.domain.Data, "update_data()")
|
||||
|
||||
# now ask the cookie monster for help
|
||||
#print "Cookie Monster"
|
||||
#print "Append/Prepend %s" % d._special_values
|
||||
#print "Overrides %s" % d._seen_overrides
|
||||
|
||||
overrides = (getVar('OVERRIDES', d, 1) or "").split(':') or []
|
||||
|
||||
#
|
||||
# Well let us see what breaks here. We used to iterate
|
||||
# over each variable and apply the override and then
|
||||
# do the line expanding.
|
||||
# If we have bad luck - which we will have - the keys
|
||||
# where in some order that is so important for this
|
||||
# method which we don't have anymore.
|
||||
# Anyway we will fix that and write test cases this
|
||||
# time.
|
||||
|
||||
#
|
||||
# First we apply all overrides
|
||||
# Then we will handle _append and _prepend
|
||||
#
|
||||
|
||||
for o in overrides:
|
||||
# calculate '_'+override
|
||||
l = len(o)+1
|
||||
|
||||
# see if one should even try
|
||||
if not d._seen_overrides.has_key(o):
|
||||
continue
|
||||
|
||||
vars = d._seen_overrides[o]
|
||||
for var in vars:
|
||||
name = var[:-l]
|
||||
try:
|
||||
d[name] = d[var]
|
||||
except:
|
||||
bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar")
|
||||
|
||||
# now on to the appends and prepends
|
||||
if d._special_values.has_key('_append'):
|
||||
appends = d._special_values['_append'] or []
|
||||
for append in appends:
|
||||
for (a, o) in getVarFlag(append, '_append', d) or []:
|
||||
# maybe the OVERRIDE was not yet added so keep the append
|
||||
if (o and o in overrides) or not o:
|
||||
delVarFlag(append, '_append', d)
|
||||
if o and not o in overrides:
|
||||
continue
|
||||
|
||||
sval = getVar(append,d) or ""
|
||||
sval+=a
|
||||
setVar(append, sval, d)
|
||||
|
||||
|
||||
if d._special_values.has_key('_prepend'):
|
||||
prepends = d._special_values['_prepend'] or []
|
||||
|
||||
for prepend in prepends:
|
||||
for (a, o) in getVarFlag(prepend, '_prepend', d) or []:
|
||||
# maybe the OVERRIDE was not yet added so keep the prepend
|
||||
if (o and o in overrides) or not o:
|
||||
delVarFlag(prepend, '_prepend', d)
|
||||
if o and not o in overrides:
|
||||
continue
|
||||
|
||||
sval = a + (getVar(prepend,d) or "")
|
||||
setVar(prepend, sval, d)
|
||||
|
||||
|
||||
def inherits_class(klass, d):
|
||||
val = getVar('__inherit_cache', d) or []
|
||||
if os.path.join('classes', '%s.bbclass' % klass) in val:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _test():
|
||||
"""Start a doctest run on this module"""
|
||||
import doctest
|
||||
import bb
|
||||
from bb import data
|
||||
bb.msg.set_debug_level(0)
|
||||
doctest.testmod(data)
|
||||
|
||||
if __name__ == "__main__":
|
||||
_test()
|
||||
288
bitbake-dev/lib/bb/data_smart.py
Normal file
288
bitbake-dev/lib/bb/data_smart.py
Normal file
@@ -0,0 +1,288 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake Smart Dictionary Implementation
|
||||
|
||||
Functions for interacting with the data structure used by the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2004, 2005 Seb Frankengul
|
||||
# Copyright (C) 2005, 2006 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 Uli Luckas
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import copy, os, re, sys, time, types
|
||||
import bb
|
||||
from bb import utils, methodpool
|
||||
from COW import COWDictBase
|
||||
from new import classobj
|
||||
|
||||
|
||||
__setvar_keyword__ = ["_append","_prepend"]
|
||||
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
|
||||
__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
|
||||
__expand_python_regexp__ = re.compile(r"\${@.+?}")
|
||||
|
||||
|
||||
class DataSmart:
|
||||
def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
|
||||
self.dict = {}
|
||||
|
||||
# cookie monster tribute
|
||||
self._special_values = special
|
||||
self._seen_overrides = seen
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
def expand(self,s, varname):
|
||||
def var_sub(match):
|
||||
key = match.group()[2:-1]
|
||||
if varname and key:
|
||||
if varname == key:
|
||||
raise Exception("variable %s references itself!" % varname)
|
||||
var = self.getVar(key, 1)
|
||||
if var is not None:
|
||||
return var
|
||||
else:
|
||||
return match.group()
|
||||
|
||||
def python_sub(match):
|
||||
import bb
|
||||
code = match.group()[3:-1]
|
||||
locals()['d'] = self
|
||||
s = eval(code)
|
||||
if type(s) == types.IntType: s = str(s)
|
||||
return s
|
||||
|
||||
if type(s) is not types.StringType: # sanity check
|
||||
return s
|
||||
|
||||
if varname and varname in self.expand_cache:
|
||||
return self.expand_cache[varname]
|
||||
|
||||
while s.find('${') != -1:
|
||||
olds = s
|
||||
try:
|
||||
s = __expand_var_regexp__.sub(var_sub, s)
|
||||
s = __expand_python_regexp__.sub(python_sub, s)
|
||||
if s == olds: break
|
||||
if type(s) is not types.StringType: # sanity check
|
||||
bb.msg.error(bb.msg.domain.Data, 'expansion of %s returned non-string %s' % (olds, s))
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
bb.msg.note(1, bb.msg.domain.Data, "%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s))
|
||||
raise
|
||||
|
||||
if varname:
|
||||
self.expand_cache[varname] = s
|
||||
|
||||
return s
|
||||
|
||||
def initVar(self, var):
|
||||
self.expand_cache = {}
|
||||
if not var in self.dict:
|
||||
self.dict[var] = {}
|
||||
|
||||
def _findVar(self,var):
|
||||
_dest = self.dict
|
||||
|
||||
while (_dest and var not in _dest):
|
||||
if not "_data" in _dest:
|
||||
_dest = None
|
||||
break
|
||||
_dest = _dest["_data"]
|
||||
|
||||
if _dest and var in _dest:
|
||||
return _dest[var]
|
||||
return None
|
||||
|
||||
def _makeShadowCopy(self, var):
|
||||
if var in self.dict:
|
||||
return
|
||||
|
||||
local_var = self._findVar(var)
|
||||
|
||||
if local_var:
|
||||
self.dict[var] = copy.copy(local_var)
|
||||
else:
|
||||
self.initVar(var)
|
||||
|
||||
def setVar(self,var,value):
|
||||
self.expand_cache = {}
|
||||
match = __setvar_regexp__.match(var)
|
||||
if match and match.group("keyword") in __setvar_keyword__:
|
||||
base = match.group('base')
|
||||
keyword = match.group("keyword")
|
||||
override = match.group('add')
|
||||
l = self.getVarFlag(base, keyword) or []
|
||||
l.append([value, override])
|
||||
self.setVarFlag(base, keyword, l)
|
||||
|
||||
# todo make sure keyword is not __doc__ or __module__
|
||||
# pay the cookie monster
|
||||
try:
|
||||
self._special_values[keyword].add( base )
|
||||
except:
|
||||
self._special_values[keyword] = set()
|
||||
self._special_values[keyword].add( base )
|
||||
|
||||
return
|
||||
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
override = var[var.rfind('_')+1:]
|
||||
if not self._seen_overrides.has_key(override):
|
||||
self._seen_overrides[override] = set()
|
||||
self._seen_overrides[override].add( var )
|
||||
|
||||
# setting var
|
||||
self.dict[var]["content"] = value
|
||||
|
||||
def getVar(self,var,exp):
|
||||
value = self.getVarFlag(var,"content")
|
||||
|
||||
if exp and value:
|
||||
return self.expand(value,var)
|
||||
return value
|
||||
|
||||
def renameVar(self, key, newkey):
|
||||
"""
|
||||
Rename the variable key to newkey
|
||||
"""
|
||||
val = self.getVar(key, 0)
|
||||
if val is None:
|
||||
return
|
||||
|
||||
self.setVar(newkey, val)
|
||||
|
||||
for i in ('_append', '_prepend'):
|
||||
dest = self.getVarFlag(newkey, i) or []
|
||||
src = self.getVarFlag(key, i) or []
|
||||
dest.extend(src)
|
||||
self.setVarFlag(newkey, i, dest)
|
||||
|
||||
if self._special_values.has_key(i) and key in self._special_values[i]:
|
||||
self._special_values[i].remove(key)
|
||||
self._special_values[i].add(newkey)
|
||||
|
||||
self.delVar(key)
|
||||
|
||||
def delVar(self,var):
|
||||
self.expand_cache = {}
|
||||
self.dict[var] = {}
|
||||
|
||||
def setVarFlag(self,var,flag,flagvalue):
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
self.dict[var][flag] = flagvalue
|
||||
|
||||
def getVarFlag(self,var,flag):
|
||||
local_var = self._findVar(var)
|
||||
if local_var:
|
||||
if flag in local_var:
|
||||
return copy.copy(local_var[flag])
|
||||
return None
|
||||
|
||||
def delVarFlag(self,var,flag):
|
||||
local_var = self._findVar(var)
|
||||
if not local_var:
|
||||
return
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
if var in self.dict and flag in self.dict[var]:
|
||||
del self.dict[var][flag]
|
||||
|
||||
def setVarFlags(self,var,flags):
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
for i in flags.keys():
|
||||
if i == "content":
|
||||
continue
|
||||
self.dict[var][i] = flags[i]
|
||||
|
||||
def getVarFlags(self,var):
|
||||
local_var = self._findVar(var)
|
||||
flags = {}
|
||||
|
||||
if local_var:
|
||||
for i in local_var.keys():
|
||||
if i == "content":
|
||||
continue
|
||||
flags[i] = local_var[i]
|
||||
|
||||
if len(flags) == 0:
|
||||
return None
|
||||
return flags
|
||||
|
||||
|
||||
def delVarFlags(self,var):
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
if var in self.dict:
|
||||
content = None
|
||||
|
||||
# try to save the content
|
||||
if "content" in self.dict[var]:
|
||||
content = self.dict[var]["content"]
|
||||
self.dict[var] = {}
|
||||
self.dict[var]["content"] = content
|
||||
else:
|
||||
del self.dict[var]
|
||||
|
||||
|
||||
def createCopy(self):
|
||||
"""
|
||||
Create a copy of self by setting _data to self
|
||||
"""
|
||||
# we really want this to be a DataSmart...
|
||||
data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
|
||||
data.dict["_data"] = self.dict
|
||||
|
||||
return data
|
||||
|
||||
# Dictionary Methods
|
||||
def keys(self):
|
||||
def _keys(d, mykey):
|
||||
if "_data" in d:
|
||||
_keys(d["_data"],mykey)
|
||||
|
||||
for key in d.keys():
|
||||
if key != "_data":
|
||||
mykey[key] = None
|
||||
keytab = {}
|
||||
_keys(self.dict,keytab)
|
||||
return keytab.keys()
|
||||
|
||||
def __getitem__(self,item):
|
||||
#print "Warning deprecated"
|
||||
return self.getVar(item, False)
|
||||
|
||||
def __setitem__(self,var,data):
|
||||
#print "Warning deprecated"
|
||||
self.setVar(var,data)
|
||||
|
||||
|
||||
302
bitbake-dev/lib/bb/event.py
Normal file
302
bitbake-dev/lib/bb/event.py
Normal file
@@ -0,0 +1,302 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Event' implementation
|
||||
|
||||
Classes and functions for manipulating 'events' in the
|
||||
BitBake build tools.
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os, re
|
||||
import bb.utils
|
||||
|
||||
# This is the pid for which we should generate the event. This is set when
|
||||
# the runqueue forks off.
|
||||
worker_pid = 0
|
||||
|
||||
class Event:
|
||||
"""Base class for events"""
|
||||
type = "Event"
|
||||
|
||||
def __init__(self, d):
|
||||
self._data = d
|
||||
self.pid = worker_pid
|
||||
|
||||
def getData(self):
|
||||
return self._data
|
||||
|
||||
def setData(self, data):
|
||||
self._data = data
|
||||
|
||||
data = property(getData, setData, None, "data property")
|
||||
|
||||
NotHandled = 0
|
||||
Handled = 1
|
||||
|
||||
Registered = 10
|
||||
AlreadyRegistered = 14
|
||||
|
||||
# Internal
|
||||
_handlers = {}
|
||||
_ui_handlers = {}
|
||||
_ui_handler_seq = 0
|
||||
|
||||
def fire(event):
|
||||
"""Fire off an Event"""
|
||||
|
||||
for handler in _handlers:
|
||||
h = _handlers[handler]
|
||||
if type(h).__name__ == "code":
|
||||
exec(h)
|
||||
tmpHandler(event)
|
||||
else:
|
||||
h(event)
|
||||
|
||||
# Remove the event data elements for UI handlers - too much data otherwise
|
||||
# They can request data if they need it
|
||||
event.data = None
|
||||
event._data = None
|
||||
|
||||
errors = []
|
||||
for h in _ui_handlers:
|
||||
#print "Sending event %s" % event
|
||||
classid = "%s.%s" % (event.__class__.__module__, event.__class__.__name__)
|
||||
try:
|
||||
_ui_handlers[h].event.send((classid, event))
|
||||
except:
|
||||
errors.append(h)
|
||||
for h in errors:
|
||||
del _ui_handlers[h]
|
||||
|
||||
def register(name, handler):
|
||||
"""Register an Event handler"""
|
||||
|
||||
# already registered
|
||||
if name in _handlers:
|
||||
return AlreadyRegistered
|
||||
|
||||
if handler is not None:
|
||||
# handle string containing python code
|
||||
if type(handler).__name__ == "str":
|
||||
tmp = "def tmpHandler(e):\n%s" % handler
|
||||
comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode")
|
||||
_handlers[name] = comp
|
||||
else:
|
||||
_handlers[name] = handler
|
||||
|
||||
return Registered
|
||||
|
||||
def remove(name, handler):
|
||||
"""Remove an Event handler"""
|
||||
_handlers.pop(name)
|
||||
|
||||
def register_UIHhandler(handler):
|
||||
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
||||
_ui_handlers[_ui_handler_seq] = handler
|
||||
return _ui_handler_seq
|
||||
|
||||
def unregister_UIHhandler(handlerNum):
|
||||
if handlerNum in _ui_handlers:
|
||||
del _ui_handlers[handlerNum]
|
||||
return
|
||||
|
||||
def getName(e):
|
||||
"""Returns the name of a class or class instance"""
|
||||
if getattr(e, "__name__", None) == None:
|
||||
return e.__class__.__name__
|
||||
else:
|
||||
return e.__name__
|
||||
|
||||
class ConfigParsed(Event):
|
||||
"""Configuration Parsing Complete"""
|
||||
|
||||
class RecipeParsed(Event):
|
||||
""" Recipe Parsing Complete """
|
||||
|
||||
def __init__(self, fn, d):
|
||||
self.fn = fn
|
||||
Event.__init__(self, d)
|
||||
|
||||
class StampUpdate(Event):
|
||||
"""Trigger for any adjustment of the stamp files to happen"""
|
||||
|
||||
def __init__(self, targets, stampfns, d):
|
||||
self._targets = targets
|
||||
self._stampfns = stampfns
|
||||
Event.__init__(self, d)
|
||||
|
||||
def getStampPrefix(self):
|
||||
return self._stampfns
|
||||
|
||||
def getTargets(self):
|
||||
return self._targets
|
||||
|
||||
stampPrefix = property(getStampPrefix)
|
||||
targets = property(getTargets)
|
||||
|
||||
class PkgBase(Event):
|
||||
"""Base class for package events"""
|
||||
|
||||
def __init__(self, t, d):
|
||||
self._pkg = t
|
||||
Event.__init__(self, d)
|
||||
self._message = "package %s: %s" % (bb.data.getVar("P", d, 1), getName(self)[3:])
|
||||
|
||||
def getPkg(self):
|
||||
return self._pkg
|
||||
|
||||
def setPkg(self, pkg):
|
||||
self._pkg = pkg
|
||||
|
||||
pkg = property(getPkg, setPkg, None, "pkg property")
|
||||
|
||||
|
||||
class BuildBase(Event):
|
||||
"""Base class for bbmake run events"""
|
||||
|
||||
def __init__(self, n, p, c, failures = 0):
|
||||
self._name = n
|
||||
self._pkgs = p
|
||||
Event.__init__(self, c)
|
||||
self._failures = failures
|
||||
|
||||
def getPkgs(self):
|
||||
return self._pkgs
|
||||
|
||||
def setPkgs(self, pkgs):
|
||||
self._pkgs = pkgs
|
||||
|
||||
def getName(self):
|
||||
return self._name
|
||||
|
||||
def setName(self, name):
|
||||
self._name = name
|
||||
|
||||
def getCfg(self):
|
||||
return self.data
|
||||
|
||||
def setCfg(self, cfg):
|
||||
self.data = cfg
|
||||
|
||||
def getFailures(self):
|
||||
"""
|
||||
Return the number of failed packages
|
||||
"""
|
||||
return self._failures
|
||||
|
||||
pkgs = property(getPkgs, setPkgs, None, "pkgs property")
|
||||
name = property(getName, setName, None, "name property")
|
||||
cfg = property(getCfg, setCfg, None, "cfg property")
|
||||
|
||||
|
||||
class DepBase(PkgBase):
|
||||
"""Base class for dependency events"""
|
||||
|
||||
def __init__(self, t, data, d):
|
||||
self._dep = d
|
||||
PkgBase.__init__(self, t, data)
|
||||
|
||||
def getDep(self):
|
||||
return self._dep
|
||||
|
||||
def setDep(self, dep):
|
||||
self._dep = dep
|
||||
|
||||
dep = property(getDep, setDep, None, "dep property")
|
||||
|
||||
|
||||
class BuildStarted(BuildBase):
|
||||
"""bbmake build run started"""
|
||||
|
||||
|
||||
class BuildCompleted(BuildBase):
|
||||
"""bbmake build run completed"""
|
||||
|
||||
|
||||
class UnsatisfiedDep(DepBase):
|
||||
"""Unsatisfied Dependency"""
|
||||
|
||||
|
||||
class RecursiveDep(DepBase):
|
||||
"""Recursive Dependency"""
|
||||
|
||||
class NoProvider(Event):
|
||||
"""No Provider for an Event"""
|
||||
|
||||
def __init__(self, item, data, runtime=False):
|
||||
Event.__init__(self, data)
|
||||
self._item = item
|
||||
self._runtime = runtime
|
||||
|
||||
def getItem(self):
|
||||
return self._item
|
||||
|
||||
def isRuntime(self):
|
||||
return self._runtime
|
||||
|
||||
class MultipleProviders(Event):
|
||||
"""Multiple Providers"""
|
||||
|
||||
def __init__(self, item, candidates, data, runtime = False):
|
||||
Event.__init__(self, data)
|
||||
self._item = item
|
||||
self._candidates = candidates
|
||||
self._is_runtime = runtime
|
||||
|
||||
def isRuntime(self):
|
||||
"""
|
||||
Is this a runtime issue?
|
||||
"""
|
||||
return self._is_runtime
|
||||
|
||||
def getItem(self):
|
||||
"""
|
||||
The name for the to be build item
|
||||
"""
|
||||
return self._item
|
||||
|
||||
def getCandidates(self):
|
||||
"""
|
||||
Get the possible Candidates for a PROVIDER.
|
||||
"""
|
||||
return self._candidates
|
||||
|
||||
class ParseProgress(Event):
|
||||
"""
|
||||
Parsing Progress Event
|
||||
"""
|
||||
|
||||
def __init__(self, d, cached, parsed, skipped, masked, errors, total):
|
||||
Event.__init__(self, d)
|
||||
self.cached = cached
|
||||
self.parsed = parsed
|
||||
self.skipped = skipped
|
||||
self.masked = masked
|
||||
self.errors = errors
|
||||
self.sofar = cached + parsed + skipped
|
||||
self.total = total
|
||||
|
||||
class DepTreeGenerated(Event):
|
||||
"""
|
||||
Event when a dependency tree has been generated
|
||||
"""
|
||||
|
||||
def __init__(self, d, depgraph):
|
||||
Event.__init__(self, d)
|
||||
self._depgraph = depgraph
|
||||
|
||||
606
bitbake-dev/lib/bb/fetch/__init__.py
Normal file
606
bitbake-dev/lib/bb/fetch/__init__.py
Normal file
@@ -0,0 +1,606 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os, re
|
||||
import bb
|
||||
from bb import data
|
||||
from bb import persist_data
|
||||
|
||||
class FetchError(Exception):
|
||||
"""Exception raised when a download fails"""
|
||||
|
||||
class NoMethodError(Exception):
|
||||
"""Exception raised when there is no method to obtain a supplied url or set of urls"""
|
||||
|
||||
class MissingParameterError(Exception):
|
||||
"""Exception raised when a fetch method is missing a critical parameter in the url"""
|
||||
|
||||
class ParameterError(Exception):
|
||||
"""Exception raised when a url cannot be proccessed due to invalid parameters."""
|
||||
|
||||
class MD5SumError(Exception):
|
||||
"""Exception raised when a MD5SUM of a file does not match the expected one"""
|
||||
|
||||
class InvalidSRCREV(Exception):
|
||||
"""Exception raised when an invalid SRCREV is encountered"""
|
||||
|
||||
def uri_replace(uri, uri_find, uri_replace, d):
|
||||
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
|
||||
if not uri or not uri_find or not uri_replace:
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
|
||||
uri_decoded = list(bb.decodeurl(uri))
|
||||
uri_find_decoded = list(bb.decodeurl(uri_find))
|
||||
uri_replace_decoded = list(bb.decodeurl(uri_replace))
|
||||
result_decoded = ['','','','','',{}]
|
||||
for i in uri_find_decoded:
|
||||
loc = uri_find_decoded.index(i)
|
||||
result_decoded[loc] = uri_decoded[loc]
|
||||
import types
|
||||
if type(i) == types.StringType:
|
||||
if (re.match(i, uri_decoded[loc])):
|
||||
result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
|
||||
if uri_find_decoded.index(i) == 2:
|
||||
if d:
|
||||
localfn = bb.fetch.localpath(uri, d)
|
||||
if localfn:
|
||||
result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
|
||||
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
|
||||
else:
|
||||
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
|
||||
return uri
|
||||
# else:
|
||||
# for j in i.keys():
|
||||
# FIXME: apply replacements against options
|
||||
return bb.encodeurl(result_decoded)
|
||||
|
||||
methods = []
|
||||
urldata_cache = {}
|
||||
saved_headrevs = {}
|
||||
|
||||
def fetcher_init(d):
|
||||
"""
|
||||
Called to initilize the fetchers once the configuration data is known
|
||||
Calls before this must not hit the cache.
|
||||
"""
|
||||
pd = persist_data.PersistData(d)
|
||||
# When to drop SCM head revisions controled by user policy
|
||||
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
|
||||
if srcrev_policy == "cache":
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
|
||||
elif srcrev_policy == "clear":
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
|
||||
try:
|
||||
bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS")
|
||||
except:
|
||||
pass
|
||||
pd.delDomain("BB_URI_HEADREVS")
|
||||
else:
|
||||
bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
||||
# Make sure our domains exist
|
||||
pd.addDomain("BB_URI_HEADREVS")
|
||||
pd.addDomain("BB_URI_LOCALCOUNT")
|
||||
|
||||
def fetcher_compare_revisons(d):
|
||||
"""
|
||||
Compare the revisions in the persistant cache with current values and
|
||||
return true/false on whether they've changed.
|
||||
"""
|
||||
|
||||
pd = persist_data.PersistData(d)
|
||||
data = pd.getKeyValues("BB_URI_HEADREVS")
|
||||
data2 = bb.fetch.saved_headrevs
|
||||
|
||||
changed = False
|
||||
for key in data:
|
||||
if key not in data2 or data2[key] != data[key]:
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s changed" % key)
|
||||
changed = True
|
||||
return True
|
||||
else:
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "%s did not change" % key)
|
||||
return False
|
||||
|
||||
# Function call order is usually:
|
||||
# 1. init
|
||||
# 2. go
|
||||
# 3. localpaths
|
||||
# localpath can be called at any time
|
||||
|
||||
def init(urls, d, setup = True):
|
||||
urldata = {}
|
||||
fn = bb.data.getVar('FILE', d, 1)
|
||||
if fn in urldata_cache:
|
||||
urldata = urldata_cache[fn]
|
||||
|
||||
for url in urls:
|
||||
if url not in urldata:
|
||||
urldata[url] = FetchData(url, d)
|
||||
|
||||
if setup:
|
||||
for url in urldata:
|
||||
if not urldata[url].setup:
|
||||
urldata[url].setup_localpath(d)
|
||||
|
||||
urldata_cache[fn] = urldata
|
||||
return urldata
|
||||
|
||||
def go(d):
|
||||
"""
|
||||
Fetch all urls
|
||||
init must have previously been called
|
||||
"""
|
||||
urldata = init([], d, True)
|
||||
|
||||
for u in urldata:
|
||||
ud = urldata[u]
|
||||
m = ud.method
|
||||
if ud.localfile:
|
||||
if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
|
||||
# File already present along with md5 stamp file
|
||||
# Touch md5 file to show activity
|
||||
try:
|
||||
os.utime(ud.md5, None)
|
||||
except:
|
||||
# Errors aren't fatal here
|
||||
pass
|
||||
continue
|
||||
lf = bb.utils.lockfile(ud.lockfile)
|
||||
if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
|
||||
# If someone else fetched this before we got the lock,
|
||||
# notice and don't try again
|
||||
try:
|
||||
os.utime(ud.md5, None)
|
||||
except:
|
||||
# Errors aren't fatal here
|
||||
pass
|
||||
bb.utils.unlockfile(lf)
|
||||
continue
|
||||
m.go(u, ud, d)
|
||||
if ud.localfile:
|
||||
if not m.forcefetch(u, ud, d):
|
||||
Fetch.write_md5sum(u, ud, d)
|
||||
bb.utils.unlockfile(lf)
|
||||
|
||||
|
||||
def checkstatus(d):
|
||||
"""
|
||||
Check all urls exist upstream
|
||||
init must have previously been called
|
||||
"""
|
||||
urldata = init([], d, True)
|
||||
|
||||
for u in urldata:
|
||||
ud = urldata[u]
|
||||
m = ud.method
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Testing URL %s" % u)
|
||||
ret = m.checkstatus(u, ud, d)
|
||||
if not ret:
|
||||
bb.msg.fatal(bb.msg.domain.Fetcher, "URL %s doesn't work" % u)
|
||||
|
||||
def localpaths(d):
|
||||
"""
|
||||
Return a list of the local filenames, assuming successful fetch
|
||||
"""
|
||||
local = []
|
||||
urldata = init([], d, True)
|
||||
|
||||
for u in urldata:
|
||||
ud = urldata[u]
|
||||
local.append(ud.localpath)
|
||||
|
||||
return local
|
||||
|
||||
srcrev_internal_call = False
|
||||
|
||||
def get_srcrev(d):
|
||||
"""
|
||||
Return the version string for the current package
|
||||
(usually to be used as PV)
|
||||
Most packages usually only have one SCM so we just pass on the call.
|
||||
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
||||
have been set.
|
||||
"""
|
||||
|
||||
#
|
||||
# Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
|
||||
# could translate into a call to here. If it does, we need to catch this
|
||||
# and provide some way so it knows get_srcrev is active instead of being
|
||||
# some number etc. hence the srcrev_internal_call tracking and the magic
|
||||
# "SRCREVINACTION" return value.
|
||||
#
|
||||
# Neater solutions welcome!
|
||||
#
|
||||
if bb.fetch.srcrev_internal_call:
|
||||
return "SRCREVINACTION"
|
||||
|
||||
scms = []
|
||||
|
||||
# Only call setup_localpath on URIs which suppports_srcrev()
|
||||
urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
|
||||
for u in urldata:
|
||||
ud = urldata[u]
|
||||
if ud.method.suppports_srcrev():
|
||||
if not ud.setup:
|
||||
ud.setup_localpath(d)
|
||||
scms.append(u)
|
||||
|
||||
if len(scms) == 0:
|
||||
bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
|
||||
raise ParameterError
|
||||
|
||||
bb.data.setVar('__BB_DONT_CACHE','1', d)
|
||||
|
||||
if len(scms) == 1:
|
||||
return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
|
||||
|
||||
#
|
||||
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
|
||||
#
|
||||
format = bb.data.getVar('SRCREV_FORMAT', d, 1)
|
||||
if not format:
|
||||
bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
||||
raise ParameterError
|
||||
|
||||
for scm in scms:
|
||||
if 'name' in urldata[scm].parm:
|
||||
name = urldata[scm].parm["name"]
|
||||
rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
|
||||
format = format.replace(name, rev)
|
||||
|
||||
return format
|
||||
|
||||
def localpath(url, d, cache = True):
|
||||
"""
|
||||
Called from the parser with cache=False since the cache isn't ready
|
||||
at this point. Also called from classed in OE e.g. patch.bbclass
|
||||
"""
|
||||
ud = init([url], d)
|
||||
if ud[url].method:
|
||||
return ud[url].localpath
|
||||
return url
|
||||
|
||||
def runfetchcmd(cmd, d, quiet = False):
|
||||
"""
|
||||
Run cmd returning the command output
|
||||
Raise an error if interrupted or cmd fails
|
||||
Optionally echo command output to stdout
|
||||
"""
|
||||
|
||||
# Need to export PATH as binary could be in metadata paths
|
||||
# rather than host provided
|
||||
# Also include some other variables.
|
||||
# FIXME: Should really include all export varaiables?
|
||||
exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
|
||||
|
||||
for var in exportvars:
|
||||
val = data.getVar(var, d, True)
|
||||
if val:
|
||||
cmd = 'export ' + var + '=%s; %s' % (val, cmd)
|
||||
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
|
||||
|
||||
# redirect stderr to stdout
|
||||
stdout_handle = os.popen(cmd + " 2>&1", "r")
|
||||
output = ""
|
||||
|
||||
while 1:
|
||||
line = stdout_handle.readline()
|
||||
if not line:
|
||||
break
|
||||
if not quiet:
|
||||
print line,
|
||||
output += line
|
||||
|
||||
status = stdout_handle.close() or 0
|
||||
signal = status >> 8
|
||||
exitstatus = status & 0xff
|
||||
|
||||
if signal:
|
||||
raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
|
||||
elif status != 0:
|
||||
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
|
||||
|
||||
return output
|
||||
|
||||
class FetchData(object):
|
||||
"""
|
||||
A class which represents the fetcher state for a given URI.
|
||||
"""
|
||||
def __init__(self, url, d):
|
||||
self.localfile = ""
|
||||
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
|
||||
self.date = Fetch.getSRCDate(self, d)
|
||||
self.url = url
|
||||
if not self.user and "user" in self.parm:
|
||||
self.user = self.parm["user"]
|
||||
if not self.pswd and "pswd" in self.parm:
|
||||
self.pswd = self.parm["pswd"]
|
||||
self.setup = False
|
||||
for m in methods:
|
||||
if m.supports(url, self, d):
|
||||
self.method = m
|
||||
return
|
||||
raise NoMethodError("Missing implementation for url %s" % url)
|
||||
|
||||
def setup_localpath(self, d):
|
||||
self.setup = True
|
||||
if "localpath" in self.parm:
|
||||
# if user sets localpath for file, use it instead.
|
||||
self.localpath = self.parm["localpath"]
|
||||
else:
|
||||
try:
|
||||
bb.fetch.srcrev_internal_call = True
|
||||
self.localpath = self.method.localpath(self.url, self, d)
|
||||
finally:
|
||||
bb.fetch.srcrev_internal_call = False
|
||||
# We have to clear data's internal caches since the cached value of SRCREV is now wrong.
|
||||
# Horrible...
|
||||
bb.data.delVar("ISHOULDNEVEREXIST", d)
|
||||
self.md5 = self.localpath + '.md5'
|
||||
self.lockfile = self.localpath + '.lock'
|
||||
|
||||
|
||||
class Fetch(object):
|
||||
"""Base class for 'fetch'ing data"""
|
||||
|
||||
def __init__(self, urls = []):
|
||||
self.urls = []
|
||||
|
||||
def supports(self, url, urldata, d):
|
||||
"""
|
||||
Check to see if this fetch class supports a given url.
|
||||
"""
|
||||
return 0
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
Can also setup variables in urldata for use in go (saving code duplication
|
||||
and duplicate code execution)
|
||||
"""
|
||||
return url
|
||||
|
||||
def setUrls(self, urls):
|
||||
self.__urls = urls
|
||||
|
||||
def getUrls(self):
|
||||
return self.__urls
|
||||
|
||||
urls = property(getUrls, setUrls, None, "Urls property")
|
||||
|
||||
def forcefetch(self, url, urldata, d):
|
||||
"""
|
||||
Force a fetch, even if localpath exists?
|
||||
"""
|
||||
return False
|
||||
|
||||
def suppports_srcrev(self):
|
||||
"""
|
||||
The fetcher supports auto source revisions (SRCREV)
|
||||
"""
|
||||
return False
|
||||
|
||||
def go(self, url, urldata, d):
|
||||
"""
|
||||
Fetch urls
|
||||
Assumes localpath was called first
|
||||
"""
|
||||
raise NoMethodError("Missing implementation for url")
|
||||
|
||||
def checkstatus(self, url, urldata, d):
|
||||
"""
|
||||
Check the status of a URL
|
||||
Assumes localpath was called first
|
||||
"""
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url)
|
||||
return True
|
||||
|
||||
def getSRCDate(urldata, d):
|
||||
"""
|
||||
Return the SRC Date for the component
|
||||
|
||||
d the bb.data module
|
||||
"""
|
||||
if "srcdate" in urldata.parm:
|
||||
return urldata.parm['srcdate']
|
||||
|
||||
pn = data.getVar("PN", d, 1)
|
||||
|
||||
if pn:
|
||||
return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
|
||||
|
||||
return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
|
||||
getSRCDate = staticmethod(getSRCDate)
|
||||
|
||||
def srcrev_internal_helper(ud, d):
|
||||
"""
|
||||
Return:
|
||||
a) a source revision if specified
|
||||
b) True if auto srcrev is in action
|
||||
c) False otherwise
|
||||
"""
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
return ud.parm['rev']
|
||||
|
||||
if 'tag' in ud.parm:
|
||||
return ud.parm['tag']
|
||||
|
||||
rev = None
|
||||
if 'name' in ud.parm:
|
||||
pn = data.getVar("PN", d, 1)
|
||||
rev = data.getVar("SRCREV_pn-" + pn + "_" + ud.parm['name'], d, 1)
|
||||
if not rev:
|
||||
rev = data.getVar("SRCREV", d, 1)
|
||||
if rev == "INVALID":
|
||||
raise InvalidSRCREV("Please set SRCREV to a valid value")
|
||||
if not rev:
|
||||
return False
|
||||
if rev is "SRCREVINACTION":
|
||||
return True
|
||||
return rev
|
||||
|
||||
srcrev_internal_helper = staticmethod(srcrev_internal_helper)
|
||||
|
||||
def try_mirror(d, tarfn):
|
||||
"""
|
||||
Try to use a mirrored version of the sources. We do this
|
||||
to avoid massive loads on foreign cvs and svn servers.
|
||||
This method will be used by the different fetcher
|
||||
implementations.
|
||||
|
||||
d Is a bb.data instance
|
||||
tarfn is the name of the tarball
|
||||
"""
|
||||
tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
|
||||
if os.access(tarpath, os.R_OK):
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
|
||||
return True
|
||||
|
||||
pn = data.getVar('PN', d, True)
|
||||
src_tarball_stash = None
|
||||
if pn:
|
||||
src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split()
|
||||
|
||||
ld = d.createCopy()
|
||||
for stash in src_tarball_stash:
|
||||
url = stash + tarfn
|
||||
try:
|
||||
ud = FetchData(url, ld)
|
||||
except bb.fetch.NoMethodError:
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "No method for %s" % url)
|
||||
continue
|
||||
|
||||
ud.setup_localpath(ld)
|
||||
|
||||
try:
|
||||
ud.method.go(url, ud, ld)
|
||||
return True
|
||||
except (bb.fetch.MissingParameterError,
|
||||
bb.fetch.FetchError,
|
||||
bb.fetch.MD5SumError):
|
||||
import sys
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Tarball stash fetch failure: %s" % value)
|
||||
return False
|
||||
try_mirror = staticmethod(try_mirror)
|
||||
|
||||
def verify_md5sum(ud, got_sum):
|
||||
"""
|
||||
Verify the md5sum we wanted with the one we got
|
||||
"""
|
||||
wanted_sum = None
|
||||
if 'md5sum' in ud.parm:
|
||||
wanted_sum = ud.parm['md5sum']
|
||||
if not wanted_sum:
|
||||
return True
|
||||
|
||||
return wanted_sum == got_sum
|
||||
verify_md5sum = staticmethod(verify_md5sum)
|
||||
|
||||
def write_md5sum(url, ud, d):
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
# verify the md5sum
|
||||
if not Fetch.verify_md5sum(ud, md5data):
|
||||
raise MD5SumError(url)
|
||||
|
||||
md5out = file(ud.md5, 'w')
|
||||
md5out.write(md5data)
|
||||
md5out.close()
|
||||
write_md5sum = staticmethod(write_md5sum)
|
||||
|
||||
def latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Look in the cache for the latest revision, if not present ask the SCM.
|
||||
"""
|
||||
if not hasattr(self, "_latest_revision"):
|
||||
raise ParameterError
|
||||
|
||||
pd = persist_data.PersistData(d)
|
||||
key = self.generate_revision_key(url, ud, d)
|
||||
rev = pd.getValue("BB_URI_HEADREVS", key)
|
||||
if rev != None:
|
||||
return str(rev)
|
||||
|
||||
rev = self._latest_revision(url, ud, d)
|
||||
pd.setValue("BB_URI_HEADREVS", key, rev)
|
||||
return rev
|
||||
|
||||
def sortable_revision(self, url, ud, d):
|
||||
"""
|
||||
|
||||
"""
|
||||
has_sortable = hasattr(self, "_sortable_revision")
|
||||
if has_sortable:
|
||||
return self._sortable_revision(url, ud, d)
|
||||
|
||||
pd = persist_data.PersistData(d)
|
||||
key = self.generate_revision_key(url, ud, d)
|
||||
|
||||
latest_rev = self._build_revision(url, ud, d)
|
||||
last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
|
||||
count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
|
||||
|
||||
if last_rev == latest_rev:
|
||||
return str(count + "+" + latest_rev)
|
||||
|
||||
if count is None:
|
||||
count = "0"
|
||||
else:
|
||||
count = str(int(count) + 1)
|
||||
|
||||
pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
|
||||
pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
|
||||
|
||||
return str(count + "+" + latest_rev)
|
||||
|
||||
def generate_revision_key(self, url, ud, d):
|
||||
key = self._revision_key(url, ud, d)
|
||||
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||
|
||||
import cvs
|
||||
import git
|
||||
import local
|
||||
import svn
|
||||
import wget
|
||||
import svk
|
||||
import ssh
|
||||
import perforce
|
||||
import bzr
|
||||
import hg
|
||||
import osc
|
||||
|
||||
methods.append(local.Local())
|
||||
methods.append(wget.Wget())
|
||||
methods.append(svn.Svn())
|
||||
methods.append(git.Git())
|
||||
methods.append(cvs.Cvs())
|
||||
methods.append(svk.Svk())
|
||||
methods.append(ssh.SSH())
|
||||
methods.append(perforce.Perforce())
|
||||
methods.append(bzr.Bzr())
|
||||
methods.append(hg.Hg())
|
||||
methods.append(osc.Osc())
|
||||
153
bitbake-dev/lib/bb/fetch/bzr.py
Normal file
153
bitbake-dev/lib/bb/fetch/bzr.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""
|
||||
BitBake 'Fetch' implementation for bzr.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2007 Ross Burton
|
||||
# Copyright (C) 2007 Richard Purdie
|
||||
#
|
||||
# Classes for obtaining upstream sources for the
|
||||
# BitBake build tools.
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import runfetchcmd
|
||||
|
||||
class Bzr(Fetch):
|
||||
def supports(self, url, ud, d):
|
||||
return ud.type in ['bzr']
|
||||
|
||||
def localpath (self, url, ud, d):
|
||||
|
||||
# Create paths to bzr checkouts
|
||||
relpath = ud.path
|
||||
if relpath.startswith('/'):
|
||||
# Remove leading slash as os.path.join can't cope
|
||||
relpath = relpath[1:]
|
||||
ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
|
||||
|
||||
revision = Fetch.srcrev_internal_helper(ud, d)
|
||||
if revision is True:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
elif revision:
|
||||
ud.revision = revision
|
||||
|
||||
if not ud.revision:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
|
||||
ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _buildbzrcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an bzr commandline based on ud
|
||||
command is "fetch", "update", "revno"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_bzr}', d)
|
||||
|
||||
proto = "http"
|
||||
if "proto" in ud.parm:
|
||||
proto = ud.parm["proto"]
|
||||
|
||||
bzrroot = ud.host + ud.path
|
||||
|
||||
options = []
|
||||
|
||||
if command is "revno":
|
||||
bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||
else:
|
||||
if ud.revision:
|
||||
options.append("-r %s" % ud.revision)
|
||||
|
||||
if command is "fetch":
|
||||
bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||
elif command is "update":
|
||||
bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid bzr command %s" % command)
|
||||
|
||||
return bzrcmd
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
# try to use the tarball stash
|
||||
if Fetch.try_mirror(d, ud.localfile):
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping bzr checkout." % ud.localpath)
|
||||
return
|
||||
|
||||
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
|
||||
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Update %s" % loc)
|
||||
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
|
||||
runfetchcmd(bzrcmd, d)
|
||||
else:
|
||||
os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)))
|
||||
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Checkout %s" % loc)
|
||||
bb.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % bzrcmd)
|
||||
runfetchcmd(bzrcmd, d)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
# tar them up to a defined filename
|
||||
try:
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.pkgdir)), d)
|
||||
except:
|
||||
t, v, tb = sys.exc_info()
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise t, v, tb
|
||||
|
||||
def suppports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _revision_key(self, url, ud, d):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "bzr:" + ud.pkgdir
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Return the latest upstream revision number
|
||||
"""
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "BZR fetcher hitting network for %s" % url)
|
||||
|
||||
output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
|
||||
|
||||
return output.strip()
|
||||
|
||||
def _sortable_revision(self, url, ud, d):
|
||||
"""
|
||||
Return a sortable revision number which in our case is the revision number
|
||||
"""
|
||||
|
||||
return self._build_revision(url, ud, d)
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.revision
|
||||
|
||||
182
bitbake-dev/lib/bb/fetch/cvs.py
Normal file
182
bitbake-dev/lib/bb/fetch/cvs.py
Normal file
@@ -0,0 +1,182 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#
|
||||
|
||||
import os
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
|
||||
class Cvs(Fetch):
|
||||
"""
|
||||
Class to fetch a module or modules from cvs repositories
|
||||
"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with cvs.
|
||||
"""
|
||||
return ud.type in ['cvs', 'pserver']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("cvs method needs a 'module' parameter")
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
ud.tag = ""
|
||||
if 'tag' in ud.parm:
|
||||
ud.tag = ud.parm['tag']
|
||||
|
||||
# Override the default date in certain cases
|
||||
if 'date' in ud.parm:
|
||||
ud.date = ud.parm['date']
|
||||
elif ud.tag:
|
||||
ud.date = ""
|
||||
|
||||
norecurse = ''
|
||||
if 'norecurse' in ud.parm:
|
||||
norecurse = '_norecurse'
|
||||
|
||||
fullpath = ''
|
||||
if 'fullpath' in ud.parm:
|
||||
fullpath = '_fullpath'
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def forcefetch(self, url, ud, d):
|
||||
if (ud.date == "now"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
|
||||
# try to use the tarball stash
|
||||
if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath)
|
||||
return
|
||||
|
||||
method = "pserver"
|
||||
if "method" in ud.parm:
|
||||
method = ud.parm["method"]
|
||||
|
||||
localdir = ud.module
|
||||
if "localdir" in ud.parm:
|
||||
localdir = ud.parm["localdir"]
|
||||
|
||||
cvs_port = ""
|
||||
if "port" in ud.parm:
|
||||
cvs_port = ud.parm["port"]
|
||||
|
||||
cvs_rsh = None
|
||||
if method == "ext":
|
||||
if "rsh" in ud.parm:
|
||||
cvs_rsh = ud.parm["rsh"]
|
||||
|
||||
if method == "dir":
|
||||
cvsroot = ud.path
|
||||
else:
|
||||
cvsroot = ":" + method
|
||||
cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
|
||||
if cvsproxyhost:
|
||||
cvsroot += ";proxy=" + cvsproxyhost
|
||||
cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
|
||||
if cvsproxyport:
|
||||
cvsroot += ";proxyport=" + cvsproxyport
|
||||
cvsroot += ":" + ud.user
|
||||
if ud.pswd:
|
||||
cvsroot += ":" + ud.pswd
|
||||
cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
|
||||
|
||||
options = []
|
||||
if 'norecurse' in ud.parm:
|
||||
options.append("-l")
|
||||
if ud.date:
|
||||
# treat YYYYMMDDHHMM specially for CVS
|
||||
if len(ud.date) == 12:
|
||||
options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
|
||||
else:
|
||||
options.append("-D \"%s UTC\"" % ud.date)
|
||||
if ud.tag:
|
||||
options.append("-r %s" % ud.tag)
|
||||
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
data.setVar('CVSROOT', cvsroot, localdata)
|
||||
data.setVar('CVSCOOPTS', " ".join(options), localdata)
|
||||
data.setVar('CVSMODULE', ud.module, localdata)
|
||||
cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
|
||||
cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
|
||||
|
||||
if cvs_rsh:
|
||||
cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
|
||||
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
|
||||
|
||||
# create module directory
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
|
||||
pkg = data.expand('${PN}', d)
|
||||
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
||||
moddir = os.path.join(pkgdir,localdir)
|
||||
if os.access(os.path.join(moddir,'CVS'), os.R_OK):
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
|
||||
# update sources there
|
||||
os.chdir(moddir)
|
||||
myret = os.system(cvsupdatecmd)
|
||||
else:
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
||||
# check out sources there
|
||||
bb.mkdirhier(pkgdir)
|
||||
os.chdir(pkgdir)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd)
|
||||
myret = os.system(cvscmd)
|
||||
|
||||
if myret != 0 or not os.access(moddir, os.R_OK):
|
||||
try:
|
||||
os.rmdir(moddir)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(ud.module)
|
||||
|
||||
# tar them up to a defined filename
|
||||
if 'fullpath' in ud.parm:
|
||||
os.chdir(pkgdir)
|
||||
myret = os.system("tar -czf %s %s" % (ud.localpath, localdir))
|
||||
else:
|
||||
os.chdir(moddir)
|
||||
os.chdir('..')
|
||||
myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir)))
|
||||
|
||||
if myret != 0:
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(ud.module)
|
||||
188
bitbake-dev/lib/bb/fetch/git.py
Normal file
188
bitbake-dev/lib/bb/fetch/git.py
Normal file
@@ -0,0 +1,188 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' git implementation
|
||||
|
||||
"""
|
||||
|
||||
#Copyright (C) 2005 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import runfetchcmd
|
||||
|
||||
class Git(Fetch):
|
||||
"""Class to fetch a module or modules from git repositories"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with git.
|
||||
"""
|
||||
return ud.type in ['git']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
elif not ud.host:
|
||||
ud.proto = 'file'
|
||||
else:
|
||||
ud.proto = "rsync"
|
||||
|
||||
ud.branch = ud.parm.get("branch", "master")
|
||||
|
||||
tag = Fetch.srcrev_internal_helper(ud, d)
|
||||
if tag is True:
|
||||
ud.tag = self.latest_revision(url, ud, d)
|
||||
elif tag:
|
||||
ud.tag = tag
|
||||
|
||||
if not ud.tag or ud.tag == "master":
|
||||
ud.tag = self.latest_revision(url, ud, d)
|
||||
|
||||
ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if Fetch.try_mirror(d, ud.localfile):
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
|
||||
return
|
||||
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
|
||||
|
||||
repofilename = 'git_%s.tar.gz' % (gitsrcname)
|
||||
repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
|
||||
repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
|
||||
|
||||
coname = '%s' % (ud.tag)
|
||||
codir = os.path.join(repodir, coname)
|
||||
|
||||
if not os.path.exists(repodir):
|
||||
if Fetch.try_mirror(d, repofilename):
|
||||
bb.mkdirhier(repodir)
|
||||
os.chdir(repodir)
|
||||
runfetchcmd("tar -xzf %s" % (repofile), d)
|
||||
else:
|
||||
runfetchcmd("git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d)
|
||||
|
||||
os.chdir(repodir)
|
||||
# Remove all but the .git directory
|
||||
if not self._contains_ref(ud.tag, d):
|
||||
runfetchcmd("rm * -Rf", d)
|
||||
runfetchcmd("git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d)
|
||||
runfetchcmd("git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d)
|
||||
runfetchcmd("git prune-packed", d)
|
||||
runfetchcmd("git pack-redundant --all | xargs -r rm", d)
|
||||
|
||||
os.chdir(repodir)
|
||||
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
|
||||
if mirror_tarballs != "0":
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
|
||||
runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
|
||||
|
||||
if os.path.exists(codir):
|
||||
bb.utils.prunedir(codir)
|
||||
|
||||
bb.mkdirhier(codir)
|
||||
os.chdir(repodir)
|
||||
runfetchcmd("git read-tree %s" % (ud.tag), d)
|
||||
runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d)
|
||||
|
||||
os.chdir(codir)
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
|
||||
|
||||
os.chdir(repodir)
|
||||
bb.utils.prunedir(codir)
|
||||
|
||||
def suppports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _contains_ref(self, tag, d):
|
||||
output = runfetchcmd("git log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % tag, d, quiet=True)
|
||||
return output.split()[0] != "0"
|
||||
|
||||
def _revision_key(self, url, ud, d):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "git:" + ud.host + ud.path.replace('/', '.')
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Compute the HEAD revision for the url
|
||||
"""
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
output = runfetchcmd("git ls-remote %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d, True)
|
||||
return output.split()[0]
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.tag
|
||||
|
||||
def _want_sortable_revision(self, url, ud, d):
|
||||
return bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True) or False
|
||||
|
||||
def _sortable_revision_disabled(self, url, ud, d):
|
||||
"""
|
||||
This is only called when _want_sortable_revision called true
|
||||
|
||||
We will have to get the updated revision.
|
||||
"""
|
||||
gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
|
||||
repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
|
||||
|
||||
key = "GIT_CACHED_REVISION-%s-%s" % (gitsrcname, ud.tag)
|
||||
if bb.data.getVar(key, d):
|
||||
return bb.data.getVar(key, d)
|
||||
|
||||
|
||||
# Runtime warning on wrongly configured sources
|
||||
if ud.tag == "1":
|
||||
bb.msg.error(1, bb.msg.domain.Fetcher, "SRCREV is '1'. This indicates a configuration error of %s" % url)
|
||||
return "0+1"
|
||||
|
||||
cwd = os.getcwd()
|
||||
|
||||
# Check if we have the rev already
|
||||
if not os.path.exists(repodir):
|
||||
print "no repo"
|
||||
self.go(None, ud, d)
|
||||
|
||||
os.chdir(repodir)
|
||||
if not self._contains_ref(ud.tag, d):
|
||||
self.go(None, ud, d)
|
||||
|
||||
output = runfetchcmd("git rev-list %s -- 2> /dev/null | wc -l" % ud.tag, d, quiet=True)
|
||||
os.chdir(cwd)
|
||||
|
||||
sortable_revision = "%s+%s" % (output.split()[0], ud.tag)
|
||||
bb.data.setVar(key, sortable_revision, d)
|
||||
return sortable_revision
|
||||
|
||||
|
||||
178
bitbake-dev/lib/bb/fetch/hg.py
Normal file
178
bitbake-dev/lib/bb/fetch/hg.py
Normal file
@@ -0,0 +1,178 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementation for mercurial DRCS (hg).
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2004 Marcin Juszkiewicz
|
||||
# Copyright (C) 2007 Robert Schuster
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import sys
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
from bb.fetch import runfetchcmd
|
||||
|
||||
class Hg(Fetch):
|
||||
"""Class to fetch a from mercurial repositories"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with mercurial.
|
||||
"""
|
||||
return ud.type in ['hg']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("hg method needs a 'module' parameter")
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
# Create paths to mercurial checkouts
|
||||
relpath = ud.path
|
||||
if relpath.startswith('/'):
|
||||
# Remove leading slash as os.path.join can't cope
|
||||
relpath = relpath[1:]
|
||||
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
ud.revision = ud.parm['rev']
|
||||
else:
|
||||
tag = Fetch.srcrev_internal_helper(ud, d)
|
||||
if tag is True:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
elif tag:
|
||||
ud.revision = tag
|
||||
else:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _buildhgcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an hg commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = "http"
|
||||
if "proto" in ud.parm:
|
||||
proto = ud.parm["proto"]
|
||||
|
||||
host = ud.host
|
||||
if proto == "file":
|
||||
host = "/"
|
||||
ud.host = "localhost"
|
||||
|
||||
if not ud.user:
|
||||
hgroot = host + ud.path
|
||||
else:
|
||||
hgroot = ud.user + "@" + host + ud.path
|
||||
|
||||
if command is "info":
|
||||
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||
|
||||
options = [];
|
||||
if ud.revision:
|
||||
options.append("-r %s" % ud.revision)
|
||||
|
||||
if command is "fetch":
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
elif command is "pull":
|
||||
# do not pass options list; limiting pull to rev causes the local
|
||||
# repo not to contain it and immediately following "update" command
|
||||
# will crash
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command is "update":
|
||||
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command)
|
||||
|
||||
return cmd
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
# try to use the tarball stash
|
||||
if Fetch.try_mirror(d, ud.localfile):
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping hg checkout." % ud.localpath)
|
||||
return
|
||||
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
else:
|
||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
||||
# check out sources there
|
||||
bb.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd)
|
||||
runfetchcmd(fetchcmd, d)
|
||||
|
||||
# Even when we clone (fetch), we still need to update as hg's clone
|
||||
# won't checkout the specified revision if its on a branch
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
try:
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
|
||||
except:
|
||||
t, v, tb = sys.exc_info()
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise t, v, tb
|
||||
|
||||
def suppports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Compute tip revision for the url
|
||||
"""
|
||||
output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
|
||||
return output.strip()
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.revision
|
||||
|
||||
def _revision_key(self, url, ud, d):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "hg:" + ud.moddir
|
||||
|
||||
72
bitbake-dev/lib/bb/fetch/local.py
Normal file
72
bitbake-dev/lib/bb/fetch/local.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
|
||||
class Local(Fetch):
|
||||
def supports(self, url, urldata, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with cvs.
|
||||
"""
|
||||
return urldata.type in ['file','patch']
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
path = url.split("://")[1]
|
||||
path = path.split(";")[0]
|
||||
newpath = path
|
||||
if path[0] != "/":
|
||||
filespath = data.getVar('FILESPATH', d, 1)
|
||||
if filespath:
|
||||
newpath = bb.which(filespath, path)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, 1)
|
||||
if filesdir:
|
||||
newpath = os.path.join(filesdir, path)
|
||||
# We don't set localfile as for this fetcher the file is already local!
|
||||
return newpath
|
||||
|
||||
def go(self, url, urldata, d):
|
||||
"""Fetch urls (no-op for Local method)"""
|
||||
# no need to fetch local files, we'll deal with them in place.
|
||||
return 1
|
||||
|
||||
def checkstatus(self, url, urldata, d):
|
||||
"""
|
||||
Check the status of the url
|
||||
"""
|
||||
if urldata.localpath.find("*") != -1:
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url)
|
||||
return True
|
||||
if os.path.exists(urldata.localpath):
|
||||
return True
|
||||
return False
|
||||
155
bitbake-dev/lib/bb/fetch/osc.py
Normal file
155
bitbake-dev/lib/bb/fetch/osc.py
Normal file
@@ -0,0 +1,155 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
Bitbake "Fetch" implementation for osc (Opensuse build service client).
|
||||
Based on the svn "Fetch" implementation.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
from bb.fetch import runfetchcmd
|
||||
|
||||
class Osc(Fetch):
|
||||
"""Class to fetch a module or modules from Opensuse build server
|
||||
repositories."""
|
||||
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with osc.
|
||||
"""
|
||||
return ud.type in ['osc']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("osc method needs a 'module' parameter.")
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
# Create paths to osc checkouts
|
||||
relpath = ud.path
|
||||
if relpath.startswith('/'):
|
||||
# Remove leading slash as os.path.join can't cope
|
||||
relpath = relpath[1:]
|
||||
ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
|
||||
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
ud.revision = ud.parm['rev']
|
||||
else:
|
||||
pv = data.getVar("PV", d, 0)
|
||||
rev = Fetch.srcrev_internal_helper(ud, d)
|
||||
if rev and rev != True:
|
||||
ud.revision = rev
|
||||
else:
|
||||
ud.revision = ""
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _buildosccommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an ocs commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_osc}', d)
|
||||
|
||||
proto = "ocs"
|
||||
if "proto" in ud.parm:
|
||||
proto = ud.parm["proto"]
|
||||
|
||||
options = []
|
||||
|
||||
config = "-c %s" % self.generate_config(ud, d)
|
||||
|
||||
if ud.revision:
|
||||
options.append("-r %s" % ud.revision)
|
||||
|
||||
coroot = ud.path
|
||||
if coroot.startswith('/'):
|
||||
# Remove leading slash as os.path.join can't cope
|
||||
coroot= coroot[1:]
|
||||
|
||||
if command is "fetch":
|
||||
osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
|
||||
elif command is "update":
|
||||
osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid osc command %s" % command)
|
||||
|
||||
return osccmd
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""
|
||||
Fetch url
|
||||
"""
|
||||
|
||||
# Try to use the tarball stash
|
||||
if Fetch.try_mirror(d, ud.localfile):
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping osc checkout." % ud.localpath)
|
||||
return
|
||||
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
|
||||
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Update "+ loc)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd)
|
||||
runfetchcmd(oscupdatecmd, d)
|
||||
else:
|
||||
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
||||
# check out sources there
|
||||
bb.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd)
|
||||
runfetchcmd(oscfetchcmd, d)
|
||||
|
||||
os.chdir(os.path.join(ud.pkgdir + ud.path))
|
||||
# tar them up to a defined filename
|
||||
try:
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
|
||||
except:
|
||||
t, v, tb = sys.exc_info()
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise t, v, tb
|
||||
|
||||
def supports_srcrev(self):
|
||||
return False
|
||||
|
||||
def generate_config(self, ud, d):
|
||||
"""
|
||||
Generate a .oscrc to be used for this run.
|
||||
"""
|
||||
|
||||
config_path = "%s/oscrc" % data.expand('${OSCDIR}', d)
|
||||
if (os.path.exists(config_path)):
|
||||
os.remove(config_path)
|
||||
|
||||
f = open(config_path, 'w')
|
||||
f.write("[general]\n")
|
||||
f.write("apisrv = %s\n" % ud.host)
|
||||
f.write("scheme = http\n")
|
||||
f.write("su-wrapper = su -c\n")
|
||||
f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
|
||||
f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
|
||||
f.write("extra-pkgs = gzip\n")
|
||||
f.write("\n")
|
||||
f.write("[%s]\n" % ud.host)
|
||||
f.write("user = %s\n" % ud.parm["user"])
|
||||
f.write("pass = %s\n" % ud.parm["pswd"])
|
||||
f.close()
|
||||
|
||||
return config_path
|
||||
214
bitbake-dev/lib/bb/fetch/perforce.py
Normal file
214
bitbake-dev/lib/bb/fetch/perforce.py
Normal file
@@ -0,0 +1,214 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
|
||||
class Perforce(Fetch):
|
||||
def supports(self, url, ud, d):
|
||||
return ud.type in ['p4']
|
||||
|
||||
def doparse(url,d):
|
||||
parm = {}
|
||||
path = url.split("://")[1]
|
||||
delim = path.find("@");
|
||||
if delim != -1:
|
||||
(user,pswd,host,port) = path.split('@')[0].split(":")
|
||||
path = path.split('@')[1]
|
||||
else:
|
||||
(host,port) = data.getVar('P4PORT', d).split(':')
|
||||
user = ""
|
||||
pswd = ""
|
||||
|
||||
if path.find(";") != -1:
|
||||
keys=[]
|
||||
values=[]
|
||||
plist = path.split(';')
|
||||
for item in plist:
|
||||
if item.count('='):
|
||||
(key,value) = item.split('=')
|
||||
keys.append(key)
|
||||
values.append(value)
|
||||
|
||||
parm = dict(zip(keys,values))
|
||||
path = "//" + path.split(';')[0]
|
||||
host += ":%s" % (port)
|
||||
parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
return host,path,user,pswd,parm
|
||||
doparse = staticmethod(doparse)
|
||||
|
||||
def getcset(d, depot,host,user,pswd,parm):
|
||||
p4opt = ""
|
||||
if "cset" in parm:
|
||||
return parm["cset"];
|
||||
if user:
|
||||
p4opt += " -u %s" % (user)
|
||||
if pswd:
|
||||
p4opt += " -P %s" % (pswd)
|
||||
if host:
|
||||
p4opt += " -p %s" % (host)
|
||||
|
||||
p4date = data.getVar("P4DATE", d, 1)
|
||||
if "revision" in parm:
|
||||
depot += "#%s" % (parm["revision"])
|
||||
elif "label" in parm:
|
||||
depot += "@%s" % (parm["label"])
|
||||
elif p4date:
|
||||
depot += "@%s" % (p4date)
|
||||
|
||||
p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.readline().strip()
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
|
||||
if not cset:
|
||||
return -1
|
||||
|
||||
return cset.split(' ')[1]
|
||||
getcset = staticmethod(getcset)
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
|
||||
(host,path,user,pswd,parm) = Perforce.doparse(url,d)
|
||||
|
||||
# If a label is specified, we use that as our filename
|
||||
|
||||
if "label" in parm:
|
||||
ud.localfile = "%s.tar.gz" % (parm["label"])
|
||||
return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
|
||||
|
||||
base = path
|
||||
which = path.find('/...')
|
||||
if which != -1:
|
||||
base = path[:which]
|
||||
|
||||
if base[0] == "/":
|
||||
base = base[1:]
|
||||
|
||||
cset = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host,base.replace('/', '.'), cset), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""
|
||||
Fetch urls
|
||||
"""
|
||||
|
||||
# try to use the tarball stash
|
||||
if Fetch.try_mirror(d, ud.localfile):
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
|
||||
return
|
||||
|
||||
(host,depot,user,pswd,parm) = Perforce.doparse(loc, d)
|
||||
|
||||
if depot.find('/...') != -1:
|
||||
path = depot[:depot.find('/...')]
|
||||
else:
|
||||
path = depot
|
||||
|
||||
if "module" in parm:
|
||||
module = parm["module"]
|
||||
else:
|
||||
module = os.path.basename(path)
|
||||
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
# Get the p4 command
|
||||
p4opt = ""
|
||||
if user:
|
||||
p4opt += " -u %s" % (user)
|
||||
|
||||
if pswd:
|
||||
p4opt += " -P %s" % (pswd)
|
||||
|
||||
if host:
|
||||
p4opt += " -p %s" % (host)
|
||||
|
||||
p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
|
||||
|
||||
# create temp directory
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
|
||||
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
||||
raise FetchError(module)
|
||||
|
||||
if "label" in parm:
|
||||
depot = "%s@%s" % (depot,parm["label"])
|
||||
else:
|
||||
cset = Perforce.getcset(d, depot, host, user, pswd, parm)
|
||||
depot = "%s@%s" % (depot,cset)
|
||||
|
||||
os.chdir(tmpfile)
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
|
||||
if not p4file:
|
||||
bb.error("Fetch: unable to get the P4 files from %s" % (depot))
|
||||
raise FetchError(module)
|
||||
|
||||
count = 0
|
||||
|
||||
for file in p4file:
|
||||
list = file.split()
|
||||
|
||||
if list[2] == "delete":
|
||||
continue
|
||||
|
||||
dest = list[0][len(path)+1:]
|
||||
where = dest.find("#")
|
||||
|
||||
os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module,dest[:where],list[0]))
|
||||
count = count + 1
|
||||
|
||||
if count == 0:
|
||||
bb.error("Fetch: No files gathered from the P4 fetch")
|
||||
raise FetchError(module)
|
||||
|
||||
myret = os.system("tar -czf %s %s" % (ud.localpath, module))
|
||||
if myret != 0:
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(module)
|
||||
# cleanup
|
||||
os.system('rm -rf %s' % tmpfile)
|
||||
|
||||
|
||||
118
bitbake-dev/lib/bb/fetch/ssh.py
Normal file
118
bitbake-dev/lib/bb/fetch/ssh.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
'''
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
This implementation is for Secure Shell (SSH), and attempts to comply with the
|
||||
IETF secsh internet draft:
|
||||
http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
|
||||
|
||||
Currently does not support the sftp parameters, as this uses scp
|
||||
Also does not support the 'fingerprint' connection parameter.
|
||||
|
||||
'''
|
||||
|
||||
# Copyright (C) 2006 OpenedHand Ltd.
|
||||
#
|
||||
#
|
||||
# Based in part on svk.py:
|
||||
# Copyright (C) 2006 Holger Hans Peter Freyther
|
||||
# Based on svn.py:
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Based on functions from the base bb module:
|
||||
# Copyright 2003 Holger Schurig
|
||||
#
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import re, os
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
|
||||
|
||||
__pattern__ = re.compile(r'''
|
||||
\s* # Skip leading whitespace
|
||||
ssh:// # scheme
|
||||
( # Optional username/password block
|
||||
(?P<user>\S+) # username
|
||||
(:(?P<pass>\S+))? # colon followed by the password (optional)
|
||||
)?
|
||||
(?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
|
||||
@
|
||||
(?P<host>\S+?) # non-greedy match of the host
|
||||
(:(?P<port>[0-9]+))? # colon followed by the port (optional)
|
||||
/
|
||||
(?P<path>[^;]+) # path on the remote system, may be absolute or relative,
|
||||
# and may include the use of '~' to reference the remote home
|
||||
# directory
|
||||
(?P<sparam>(;[^;]+)*)? # parameters block (optional)
|
||||
$
|
||||
''', re.VERBOSE)
|
||||
|
||||
class SSH(Fetch):
|
||||
'''Class to fetch a module or modules via Secure Shell'''
|
||||
|
||||
def supports(self, url, urldata, d):
|
||||
return __pattern__.match(url) != None
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
m = __pattern__.match(url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
|
||||
return lpath
|
||||
|
||||
def go(self, url, urldata, d):
|
||||
dldir = data.getVar('DL_DIR', d, 1)
|
||||
|
||||
m = __pattern__.match(url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
port = m.group('port')
|
||||
user = m.group('user')
|
||||
password = m.group('pass')
|
||||
|
||||
ldir = os.path.join(dldir, host)
|
||||
lpath = os.path.join(ldir, os.path.basename(path))
|
||||
|
||||
if not os.path.exists(ldir):
|
||||
os.makedirs(ldir)
|
||||
|
||||
if port:
|
||||
port = '-P %s' % port
|
||||
else:
|
||||
port = ''
|
||||
|
||||
if user:
|
||||
fr = user
|
||||
if password:
|
||||
fr += ':%s' % password
|
||||
fr += '@%s' % host
|
||||
else:
|
||||
fr = host
|
||||
fr += ':%s' % path
|
||||
|
||||
|
||||
import commands
|
||||
cmd = 'scp -B -r %s %s %s/' % (
|
||||
port,
|
||||
commands.mkarg(fr),
|
||||
commands.mkarg(ldir)
|
||||
)
|
||||
|
||||
(exitstatus, output) = commands.getstatusoutput(cmd)
|
||||
if exitstatus != 0:
|
||||
print output
|
||||
raise FetchError('Unable to fetch %s' % url)
|
||||
109
bitbake-dev/lib/bb/fetch/svk.py
Normal file
109
bitbake-dev/lib/bb/fetch/svk.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
This implementation is for svk. It is based on the svn implementation
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2006 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
|
||||
class Svk(Fetch):
|
||||
"""Class to fetch a module or modules from svk repositories"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with cvs.
|
||||
"""
|
||||
return ud.type in ['svk']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("svk method needs a 'module' parameter")
|
||||
else:
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
ud.revision = ""
|
||||
if 'rev' in ud.parm:
|
||||
ud.revision = ud.parm['rev']
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def forcefetch(self, url, ud, d):
|
||||
if (ud.date == "now"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch urls"""
|
||||
|
||||
if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
|
||||
return
|
||||
|
||||
svkroot = ud.host + ud.path
|
||||
|
||||
svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
|
||||
|
||||
if ud.revision:
|
||||
svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
|
||||
|
||||
# create temp directory
|
||||
localdata = data.createCopy(d)
|
||||
data.update_data(localdata)
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
|
||||
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
||||
raise FetchError(ud.module)
|
||||
|
||||
# check out sources there
|
||||
os.chdir(tmpfile)
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
|
||||
myret = os.system(svkcmd)
|
||||
if myret != 0:
|
||||
try:
|
||||
os.rmdir(tmpfile)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(ud.module)
|
||||
|
||||
os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
|
||||
# tar them up to a defined filename
|
||||
myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
|
||||
if myret != 0:
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise FetchError(ud.module)
|
||||
# cleanup
|
||||
os.system('rm -rf %s' % tmpfile)
|
||||
206
bitbake-dev/lib/bb/fetch/svn.py
Normal file
206
bitbake-dev/lib/bb/fetch/svn.py
Normal file
@@ -0,0 +1,206 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementation for svn.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2004 Marcin Juszkiewicz
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import sys
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import MissingParameterError
|
||||
from bb.fetch import runfetchcmd
|
||||
|
||||
class Svn(Fetch):
|
||||
"""Class to fetch a module or modules from svn repositories"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with svn.
|
||||
"""
|
||||
return ud.type in ['svn']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError("svn method needs a 'module' parameter")
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
# Create paths to svn checkouts
|
||||
relpath = ud.path
|
||||
if relpath.startswith('/'):
|
||||
# Remove leading slash as os.path.join can't cope
|
||||
relpath = relpath[1:]
|
||||
ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
ud.date = ""
|
||||
ud.revision = ud.parm['rev']
|
||||
elif 'date' in ud.date:
|
||||
ud.date = ud.parm['date']
|
||||
ud.revision = ""
|
||||
else:
|
||||
#
|
||||
# ***Nasty hack***
|
||||
# If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
|
||||
# Should warn people to switch to SRCREV here
|
||||
#
|
||||
pv = data.getVar("PV", d, 0)
|
||||
if "DATE" in pv:
|
||||
ud.revision = ""
|
||||
else:
|
||||
rev = Fetch.srcrev_internal_helper(ud, d)
|
||||
if rev is True:
|
||||
ud.revision = self.latest_revision(url, ud, d)
|
||||
ud.date = ""
|
||||
elif rev:
|
||||
ud.revision = rev
|
||||
ud.date = ""
|
||||
else:
|
||||
ud.revision = ""
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _buildsvncommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an svn commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_svn}', d)
|
||||
|
||||
proto = "svn"
|
||||
if "proto" in ud.parm:
|
||||
proto = ud.parm["proto"]
|
||||
|
||||
svn_rsh = None
|
||||
if proto == "svn+ssh" and "rsh" in ud.parm:
|
||||
svn_rsh = ud.parm["rsh"]
|
||||
|
||||
svnroot = ud.host + ud.path
|
||||
|
||||
# either use the revision, or SRCDATE in braces,
|
||||
options = []
|
||||
|
||||
if ud.user:
|
||||
options.append("--username %s" % ud.user)
|
||||
|
||||
if ud.pswd:
|
||||
options.append("--password %s" % ud.pswd)
|
||||
|
||||
if command is "info":
|
||||
svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
|
||||
else:
|
||||
suffix = ""
|
||||
if ud.revision:
|
||||
options.append("-r %s" % ud.revision)
|
||||
suffix = "@%s" % (ud.revision)
|
||||
elif ud.date:
|
||||
options.append("-r {%s}" % ud.date)
|
||||
|
||||
if command is "fetch":
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
elif command is "update":
|
||||
svncmd = "%s update %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid svn command %s" % command)
|
||||
|
||||
if svn_rsh:
|
||||
svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
|
||||
|
||||
return svncmd
|
||||
|
||||
def go(self, loc, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
# try to use the tarball stash
|
||||
if Fetch.try_mirror(d, ud.localfile):
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
|
||||
return
|
||||
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
|
||||
svnupdatecmd = self._buildsvncommand(ud, d, "update")
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd)
|
||||
runfetchcmd(svnupdatecmd, d)
|
||||
else:
|
||||
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
||||
# check out sources there
|
||||
bb.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd)
|
||||
runfetchcmd(svnfetchcmd, d)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
# tar them up to a defined filename
|
||||
try:
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
|
||||
except:
|
||||
t, v, tb = sys.exc_info()
|
||||
try:
|
||||
os.unlink(ud.localpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise t, v, tb
|
||||
|
||||
def suppports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _revision_key(self, url, ud, d):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "svn:" + ud.moddir
|
||||
|
||||
def _latest_revision(self, url, ud, d):
|
||||
"""
|
||||
Return the latest upstream revision number
|
||||
"""
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url)
|
||||
|
||||
output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
|
||||
|
||||
revision = None
|
||||
for line in output.splitlines():
|
||||
if "Last Changed Rev" in line:
|
||||
revision = line.split(":")[1].strip()
|
||||
|
||||
return revision
|
||||
|
||||
def _sortable_revision(self, url, ud, d):
|
||||
"""
|
||||
Return a sortable revision number which in our case is the revision number
|
||||
"""
|
||||
|
||||
return self._build_revision(url, ud, d)
|
||||
|
||||
def _build_revision(self, url, ud, d):
|
||||
return ud.revision
|
||||
130
bitbake-dev/lib/bb/fetch/wget.py
Normal file
130
bitbake-dev/lib/bb/fetch/wget.py
Normal file
@@ -0,0 +1,130 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch import Fetch
|
||||
from bb.fetch import FetchError
|
||||
from bb.fetch import uri_replace
|
||||
|
||||
class Wget(Fetch):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
def supports(self, url, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with cvs.
|
||||
"""
|
||||
return ud.type in ['http','https','ftp']
|
||||
|
||||
def localpath(self, url, ud, d):
|
||||
|
||||
url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
ud.localfile = data.expand(os.path.basename(url), d)
|
||||
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def go(self, uri, ud, d, checkonly = False):
|
||||
"""Fetch urls"""
|
||||
|
||||
def fetch_uri(uri, ud, d):
|
||||
if checkonly:
|
||||
fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
|
||||
elif os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
|
||||
else:
|
||||
fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
|
||||
|
||||
uri = uri.split(";")[0]
|
||||
uri_decoded = list(bb.decodeurl(uri))
|
||||
uri_type = uri_decoded[0]
|
||||
uri_host = uri_decoded[1]
|
||||
|
||||
bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
|
||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||
httpproxy = None
|
||||
ftpproxy = None
|
||||
if uri_type == 'http':
|
||||
httpproxy = data.getVar("HTTP_PROXY", d, True)
|
||||
httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
|
||||
for p in httpproxy_ignore:
|
||||
if uri_host.endswith(p):
|
||||
httpproxy = None
|
||||
break
|
||||
if uri_type == 'ftp':
|
||||
ftpproxy = data.getVar("FTP_PROXY", d, True)
|
||||
ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
|
||||
for p in ftpproxy_ignore:
|
||||
if uri_host.endswith(p):
|
||||
ftpproxy = None
|
||||
break
|
||||
if httpproxy:
|
||||
fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd
|
||||
if ftpproxy:
|
||||
fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
|
||||
ret = os.system(fetchcmd)
|
||||
if ret != 0:
|
||||
return False
|
||||
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath) and not checkonly:
|
||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
|
||||
for (find, replace) in premirrors:
|
||||
newuri = uri_replace(uri, find, replace, d)
|
||||
if newuri != uri:
|
||||
if fetch_uri(newuri, ud, localdata):
|
||||
return True
|
||||
|
||||
if fetch_uri(uri, ud, localdata):
|
||||
return True
|
||||
|
||||
# try mirrors
|
||||
mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
|
||||
for (find, replace) in mirrors:
|
||||
newuri = uri_replace(uri, find, replace, d)
|
||||
if newuri != uri:
|
||||
if fetch_uri(newuri, ud, localdata):
|
||||
return True
|
||||
|
||||
raise FetchError(uri)
|
||||
|
||||
|
||||
def checkstatus(self, uri, ud, d):
|
||||
return self.go(uri, ud, d, True)
|
||||
144
bitbake-dev/lib/bb/manifest.py
Normal file
144
bitbake-dev/lib/bb/manifest.py
Normal file
@@ -0,0 +1,144 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os, sys
|
||||
import bb, bb.data
|
||||
|
||||
def getfields(line):
|
||||
fields = {}
|
||||
fieldmap = ( "pkg", "src", "dest", "type", "mode", "uid", "gid", "major", "minor", "start", "inc", "count" )
|
||||
for f in xrange(len(fieldmap)):
|
||||
fields[fieldmap[f]] = None
|
||||
|
||||
if not line:
|
||||
return None
|
||||
|
||||
splitline = line.split()
|
||||
if not len(splitline):
|
||||
return None
|
||||
|
||||
try:
|
||||
for f in xrange(len(fieldmap)):
|
||||
if splitline[f] == '-':
|
||||
continue
|
||||
fields[fieldmap[f]] = splitline[f]
|
||||
except IndexError:
|
||||
pass
|
||||
return fields
|
||||
|
||||
def parse (mfile, d):
|
||||
manifest = []
|
||||
while 1:
|
||||
line = mfile.readline()
|
||||
if not line:
|
||||
break
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
fields = getfields(line)
|
||||
if not fields:
|
||||
continue
|
||||
manifest.append(fields)
|
||||
return manifest
|
||||
|
||||
def emit (func, manifest, d):
|
||||
#str = "%s () {\n" % func
|
||||
str = ""
|
||||
for line in manifest:
|
||||
emittedline = emit_line(func, line, d)
|
||||
if not emittedline:
|
||||
continue
|
||||
str += emittedline + "\n"
|
||||
# str += "}\n"
|
||||
return str
|
||||
|
||||
def mangle (func, line, d):
|
||||
import copy
|
||||
newline = copy.copy(line)
|
||||
src = bb.data.expand(newline["src"], d)
|
||||
|
||||
if src:
|
||||
if not os.path.isabs(src):
|
||||
src = "${WORKDIR}/" + src
|
||||
|
||||
dest = newline["dest"]
|
||||
if not dest:
|
||||
return
|
||||
|
||||
if dest.startswith("/"):
|
||||
dest = dest[1:]
|
||||
|
||||
if func is "do_install":
|
||||
dest = "${D}/" + dest
|
||||
|
||||
elif func is "do_populate":
|
||||
dest = "${WORKDIR}/install/" + newline["pkg"] + "/" + dest
|
||||
|
||||
elif func is "do_stage":
|
||||
varmap = {}
|
||||
varmap["${bindir}"] = "${STAGING_DIR}/${HOST_SYS}/bin"
|
||||
varmap["${libdir}"] = "${STAGING_DIR}/${HOST_SYS}/lib"
|
||||
varmap["${includedir}"] = "${STAGING_DIR}/${HOST_SYS}/include"
|
||||
varmap["${datadir}"] = "${STAGING_DATADIR}"
|
||||
|
||||
matched = 0
|
||||
for key in varmap.keys():
|
||||
if dest.startswith(key):
|
||||
dest = varmap[key] + "/" + dest[len(key):]
|
||||
matched = 1
|
||||
if not matched:
|
||||
newline = None
|
||||
return
|
||||
else:
|
||||
newline = None
|
||||
return
|
||||
|
||||
newline["src"] = src
|
||||
newline["dest"] = dest
|
||||
return newline
|
||||
|
||||
def emit_line (func, line, d):
|
||||
import copy
|
||||
newline = copy.deepcopy(line)
|
||||
newline = mangle(func, newline, d)
|
||||
if not newline:
|
||||
return None
|
||||
|
||||
str = ""
|
||||
type = newline["type"]
|
||||
mode = newline["mode"]
|
||||
src = newline["src"]
|
||||
dest = newline["dest"]
|
||||
if type is "d":
|
||||
str = "install -d "
|
||||
if mode:
|
||||
str += "-m %s " % mode
|
||||
str += dest
|
||||
elif type is "f":
|
||||
if not src:
|
||||
return None
|
||||
if dest.endswith("/"):
|
||||
str = "install -d "
|
||||
str += dest + "\n"
|
||||
str += "install "
|
||||
else:
|
||||
str = "install -D "
|
||||
if mode:
|
||||
str += "-m %s " % mode
|
||||
str += src + " " + dest
|
||||
del newline
|
||||
return str
|
||||
84
bitbake-dev/lib/bb/methodpool.py
Normal file
84
bitbake-dev/lib/bb/methodpool.py
Normal file
@@ -0,0 +1,84 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
#
|
||||
# Copyright (C) 2006 Holger Hans Peter Freyther
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
"""
|
||||
What is a method pool?
|
||||
|
||||
BitBake has a global method scope where .bb, .inc and .bbclass
|
||||
files can install methods. These methods are parsed from strings.
|
||||
To avoid recompiling and executing these string we introduce
|
||||
a method pool to do this task.
|
||||
|
||||
This pool will be used to compile and execute the functions. It
|
||||
will be smart enough to
|
||||
"""
|
||||
|
||||
from bb.utils import better_compile, better_exec
|
||||
from bb import error
|
||||
|
||||
# A dict of modules we have handled
|
||||
# it is the number of .bbclasses + x in size
|
||||
_parsed_methods = { }
|
||||
_parsed_fns = { }
|
||||
|
||||
def insert_method(modulename, code, fn):
|
||||
"""
|
||||
Add code of a module should be added. The methods
|
||||
will be simply added, no checking will be done
|
||||
"""
|
||||
comp = better_compile(code, "<bb>", fn )
|
||||
better_exec(comp, __builtins__, code, fn)
|
||||
|
||||
# now some instrumentation
|
||||
code = comp.co_names
|
||||
for name in code:
|
||||
if name in ['None', 'False']:
|
||||
continue
|
||||
elif name in _parsed_fns and not _parsed_fns[name] == modulename:
|
||||
error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename))
|
||||
else:
|
||||
_parsed_fns[name] = modulename
|
||||
|
||||
def check_insert_method(modulename, code, fn):
|
||||
"""
|
||||
Add the code if it wasnt added before. The module
|
||||
name will be used for that
|
||||
|
||||
Variables:
|
||||
@modulename a short name e.g. base.bbclass
|
||||
@code The actual python code
|
||||
@fn The filename from the outer file
|
||||
"""
|
||||
if not modulename in _parsed_methods:
|
||||
return insert_method(modulename, code, fn)
|
||||
_parsed_methods[modulename] = 1
|
||||
|
||||
def parsed_module(modulename):
|
||||
"""
|
||||
Inform me file xyz was parsed
|
||||
"""
|
||||
return modulename in _parsed_methods
|
||||
|
||||
|
||||
def get_parsed_dict():
|
||||
"""
|
||||
shortcut
|
||||
"""
|
||||
return _parsed_methods
|
||||
125
bitbake-dev/lib/bb/msg.py
Normal file
125
bitbake-dev/lib/bb/msg.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'msg' implementation
|
||||
|
||||
Message handling infrastructure for bitbake
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys, bb
|
||||
from bb import event
|
||||
|
||||
debug_level = {}
|
||||
|
||||
verbose = False
|
||||
|
||||
domain = bb.utils.Enum(
|
||||
'Build',
|
||||
'Cache',
|
||||
'Collection',
|
||||
'Data',
|
||||
'Depends',
|
||||
'Fetcher',
|
||||
'Parsing',
|
||||
'PersistData',
|
||||
'Provider',
|
||||
'RunQueue',
|
||||
'TaskData',
|
||||
'Util')
|
||||
|
||||
|
||||
class MsgBase(bb.event.Event):
|
||||
"""Base class for messages"""
|
||||
|
||||
def __init__(self, msg, d ):
|
||||
self._message = msg
|
||||
event.Event.__init__(self, d)
|
||||
|
||||
class MsgDebug(MsgBase):
|
||||
"""Debug Message"""
|
||||
|
||||
class MsgNote(MsgBase):
|
||||
"""Note Message"""
|
||||
|
||||
class MsgWarn(MsgBase):
|
||||
"""Warning Message"""
|
||||
|
||||
class MsgError(MsgBase):
|
||||
"""Error Message"""
|
||||
|
||||
class MsgFatal(MsgBase):
|
||||
"""Fatal Message"""
|
||||
|
||||
class MsgPlain(MsgBase):
|
||||
"""General output"""
|
||||
|
||||
#
|
||||
# Message control functions
|
||||
#
|
||||
|
||||
def set_debug_level(level):
|
||||
bb.msg.debug_level = {}
|
||||
for domain in bb.msg.domain:
|
||||
bb.msg.debug_level[domain] = level
|
||||
bb.msg.debug_level['default'] = level
|
||||
|
||||
def set_verbose(level):
|
||||
bb.msg.verbose = level
|
||||
|
||||
def set_debug_domains(domains):
|
||||
for domain in domains:
|
||||
found = False
|
||||
for ddomain in bb.msg.domain:
|
||||
if domain == str(ddomain):
|
||||
bb.msg.debug_level[ddomain] = bb.msg.debug_level[ddomain] + 1
|
||||
found = True
|
||||
if not found:
|
||||
bb.msg.warn(None, "Logging domain %s is not valid, ignoring" % domain)
|
||||
|
||||
#
|
||||
# Message handling functions
|
||||
#
|
||||
|
||||
def debug(level, domain, msg, fn = None):
|
||||
if not domain:
|
||||
domain = 'default'
|
||||
if debug_level[domain] >= level:
|
||||
bb.event.fire(MsgDebug(msg, None))
|
||||
|
||||
def note(level, domain, msg, fn = None):
|
||||
if not domain:
|
||||
domain = 'default'
|
||||
if level == 1 or verbose or debug_level[domain] >= 1:
|
||||
bb.event.fire(MsgNote(msg, None))
|
||||
|
||||
def warn(domain, msg, fn = None):
|
||||
bb.event.fire(MsgWarn(msg, None))
|
||||
|
||||
def error(domain, msg, fn = None):
|
||||
bb.event.fire(MsgError(msg, None))
|
||||
print 'ERROR: ' + msg
|
||||
|
||||
def fatal(domain, msg, fn = None):
|
||||
bb.event.fire(MsgFatal(msg, None))
|
||||
print 'FATAL: ' + msg
|
||||
sys.exit(1)
|
||||
|
||||
def plain(msg, fn = None):
|
||||
bb.event.fire(MsgPlain(msg, None))
|
||||
|
||||
84
bitbake-dev/lib/bb/parse/__init__.py
Normal file
84
bitbake-dev/lib/bb/parse/__init__.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""
|
||||
BitBake Parsers
|
||||
|
||||
File parsers for the BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
__all__ = [ 'ParseError', 'SkipPackage', 'cached_mtime', 'mark_dependency',
|
||||
'supports', 'handle', 'init' ]
|
||||
handlers = []
|
||||
|
||||
import bb, os
|
||||
|
||||
class ParseError(Exception):
|
||||
"""Exception raised when parsing fails"""
|
||||
|
||||
class SkipPackage(Exception):
|
||||
"""Exception raised to skip this package"""
|
||||
|
||||
__mtime_cache = {}
|
||||
def cached_mtime(f):
|
||||
if not __mtime_cache.has_key(f):
|
||||
__mtime_cache[f] = os.stat(f)[8]
|
||||
return __mtime_cache[f]
|
||||
|
||||
def cached_mtime_noerror(f):
|
||||
if not __mtime_cache.has_key(f):
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[8]
|
||||
except OSError:
|
||||
return 0
|
||||
return __mtime_cache[f]
|
||||
|
||||
def update_mtime(f):
|
||||
__mtime_cache[f] = os.stat(f)[8]
|
||||
return __mtime_cache[f]
|
||||
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
deps = bb.data.getVar('__depends', d) or []
|
||||
deps.append( (f, cached_mtime(f)) )
|
||||
bb.data.setVar('__depends', deps, d)
|
||||
|
||||
def supports(fn, data):
|
||||
"""Returns true if we have a handler for this file, false otherwise"""
|
||||
for h in handlers:
|
||||
if h['supports'](fn, data):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def handle(fn, data, include = 0):
|
||||
"""Call the handler that is appropriate for this file"""
|
||||
for h in handlers:
|
||||
if h['supports'](fn, data):
|
||||
return h['handle'](fn, data, include)
|
||||
raise ParseError("%s is not a BitBake file" % fn)
|
||||
|
||||
def init(fn, data):
|
||||
for h in handlers:
|
||||
if h['supports'](fn):
|
||||
return h['init'](data)
|
||||
|
||||
|
||||
from parse_py import __version__, ConfHandler, BBHandler
|
||||
401
bitbake-dev/lib/bb/parse/parse_py/BBHandler.py
Normal file
401
bitbake-dev/lib/bb/parse/parse_py/BBHandler.py
Normal file
@@ -0,0 +1,401 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
class for handling .bb files
|
||||
|
||||
Reads a .bb file and obtains its metadata
|
||||
|
||||
"""
|
||||
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import re, bb, os, sys, time, string
|
||||
import bb.fetch, bb.build, bb.utils
|
||||
from bb import data, fetch, methodpool
|
||||
|
||||
from ConfHandler import include, localpath, obtain, init
|
||||
from bb.parse import ParseError
|
||||
|
||||
__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
|
||||
__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
|
||||
__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
|
||||
__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
|
||||
__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
|
||||
__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
|
||||
__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
|
||||
__word__ = re.compile(r"\S+")
|
||||
|
||||
__infunc__ = ""
|
||||
__inpython__ = False
|
||||
__body__ = []
|
||||
__classname__ = ""
|
||||
classes = [ None, ]
|
||||
|
||||
# We need to indicate EOF to the feeder. This code is so messy that
|
||||
# factoring it out to a close_parse_file method is out of question.
|
||||
# We will use the IN_PYTHON_EOF as an indicator to just close the method
|
||||
#
|
||||
# The two parts using it are tightly integrated anyway
|
||||
IN_PYTHON_EOF = -9999999999999
|
||||
|
||||
__parsed_methods__ = methodpool.get_parsed_dict()
|
||||
|
||||
def supports(fn, d):
|
||||
localfn = localpath(fn, d)
|
||||
return localfn[-3:] == ".bb" or localfn[-8:] == ".bbclass" or localfn[-4:] == ".inc"
|
||||
|
||||
def inherit(files, d):
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
fn = ""
|
||||
lineno = 0
|
||||
files = data.expand(files, d)
|
||||
for file in files:
|
||||
if file[0] != "/" and file[-8:] != ".bbclass":
|
||||
file = os.path.join('classes', '%s.bbclass' % file)
|
||||
|
||||
if not file in __inherit_cache:
|
||||
bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file))
|
||||
__inherit_cache.append( file )
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
include(fn, file, d, "inherit")
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
|
||||
|
||||
def finalise(fn, d):
|
||||
data.expandKeys(d)
|
||||
data.update_data(d)
|
||||
anonqueue = data.getVar("__anonqueue", d, 1) or []
|
||||
body = [x['content'] for x in anonqueue]
|
||||
flag = { 'python' : 1, 'func' : 1 }
|
||||
data.setVar("__anonfunc", "\n".join(body), d)
|
||||
data.setVarFlags("__anonfunc", flag, d)
|
||||
from bb import build
|
||||
try:
|
||||
t = data.getVar('T', d)
|
||||
data.setVar('T', '${TMPDIR}/anonfunc/', d)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS', d) or []
|
||||
code = ""
|
||||
for f in anonfuncs:
|
||||
code = code + " %s(d)\n" % f
|
||||
data.setVar("__anonfunc", code, d)
|
||||
build.exec_func("__anonfunc", d)
|
||||
data.delVar('T', d)
|
||||
if t:
|
||||
data.setVar('T', t, d)
|
||||
except Exception, e:
|
||||
bb.msg.debug(1, bb.msg.domain.Parsing, "Exception when executing anonymous function: %s" % e)
|
||||
raise
|
||||
data.delVar("__anonqueue", d)
|
||||
data.delVar("__anonfunc", d)
|
||||
data.update_data(d)
|
||||
|
||||
all_handlers = {}
|
||||
for var in data.getVar('__BBHANDLERS', d) or []:
|
||||
# try to add the handler
|
||||
handler = data.getVar(var,d)
|
||||
bb.event.register(var, handler)
|
||||
|
||||
tasklist = data.getVar('__BBTASKS', d) or []
|
||||
bb.build.add_tasks(tasklist, d)
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn, d))
|
||||
|
||||
|
||||
def handle(fn, d, include = 0):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
|
||||
__body__ = []
|
||||
__infunc__ = ""
|
||||
__classname__ = ""
|
||||
__residue__ = []
|
||||
|
||||
if include == 0:
|
||||
bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)")
|
||||
else:
|
||||
bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)")
|
||||
|
||||
(root, ext) = os.path.splitext(os.path.basename(fn))
|
||||
base_name = "%s%s" % (root,ext)
|
||||
init(d)
|
||||
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
classes.append(__classname__)
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
|
||||
if include != 0:
|
||||
oldfile = data.getVar('FILE', d)
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
fn = obtain(fn, d)
|
||||
bbpath = (data.getVar('BBPATH', d, 1) or '').split(':')
|
||||
if not os.path.isabs(fn):
|
||||
f = None
|
||||
for p in bbpath:
|
||||
j = os.path.join(p, fn)
|
||||
if os.access(j, os.R_OK):
|
||||
abs_fn = j
|
||||
f = open(j, 'r')
|
||||
break
|
||||
if f is None:
|
||||
raise IOError("file %s not found" % fn)
|
||||
else:
|
||||
f = open(fn,'r')
|
||||
abs_fn = fn
|
||||
|
||||
if include:
|
||||
bb.parse.mark_dependency(d, abs_fn)
|
||||
|
||||
if ext != ".bbclass":
|
||||
data.setVar('FILE', fn, d)
|
||||
|
||||
lineno = 0
|
||||
while 1:
|
||||
lineno = lineno + 1
|
||||
s = f.readline()
|
||||
if not s: break
|
||||
s = s.rstrip()
|
||||
feeder(lineno, s, fn, base_name, d)
|
||||
if __inpython__:
|
||||
# add a blank line to close out any python definition
|
||||
feeder(IN_PYTHON_EOF, "", fn, base_name, d)
|
||||
if ext == ".bbclass":
|
||||
classes.remove(__classname__)
|
||||
else:
|
||||
if include == 0:
|
||||
multi = data.getVar('BBCLASSEXTEND', d, 1)
|
||||
if multi:
|
||||
based = bb.data.createCopy(d)
|
||||
finalise(fn, based)
|
||||
darray = {"": based}
|
||||
for cls in multi.split():
|
||||
pn = data.getVar('PN', d, True)
|
||||
based = bb.data.createCopy(d)
|
||||
data.setVar('PN', pn + '-' + cls, based)
|
||||
inherit([cls], based)
|
||||
finalise(fn, based)
|
||||
darray[cls] = based
|
||||
return darray
|
||||
else:
|
||||
finalise(fn, d)
|
||||
bbpath.pop(0)
|
||||
if oldfile:
|
||||
bb.data.setVar("FILE", oldfile, d)
|
||||
|
||||
# we have parsed the bb class now
|
||||
if ext == ".bbclass" or ext == ".inc":
|
||||
__parsed_methods__[base_name] = 1
|
||||
|
||||
return d
|
||||
|
||||
def feeder(lineno, s, fn, root, d):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__,__infunc__, __body__, classes, bb, __residue__
|
||||
if __infunc__:
|
||||
if s == '}':
|
||||
__body__.append('')
|
||||
if __infunc__ == "__anonymous":
|
||||
funcname = ("__anon_%s_%s" % (lineno, fn.translate(string.maketrans('/.+-', '____'))))
|
||||
if not funcname in methodpool._parsed_fns:
|
||||
text = "def %s(d):\n" % (funcname) + '\n'.join(__body__)
|
||||
methodpool.insert_method(funcname, text, fn)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS', d) or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs, d)
|
||||
else:
|
||||
data.setVarFlag(__infunc__, "func", 1, d)
|
||||
data.setVar(__infunc__, '\n'.join(__body__), d)
|
||||
__infunc__ = ""
|
||||
__body__ = []
|
||||
else:
|
||||
__body__.append(s)
|
||||
return
|
||||
|
||||
if __inpython__:
|
||||
m = __python_func_regexp__.match(s)
|
||||
if m and lineno != IN_PYTHON_EOF:
|
||||
__body__.append(s)
|
||||
return
|
||||
else:
|
||||
# Note we will add root to parsedmethods after having parse
|
||||
# 'this' file. This means we will not parse methods from
|
||||
# bb classes twice
|
||||
if not root in __parsed_methods__:
|
||||
text = '\n'.join(__body__)
|
||||
methodpool.insert_method( root, text, fn )
|
||||
__body__ = []
|
||||
__inpython__ = False
|
||||
|
||||
if lineno == IN_PYTHON_EOF:
|
||||
return
|
||||
|
||||
# fall through
|
||||
|
||||
if s == '' or s[0] == '#': return # skip comments and empty lines
|
||||
|
||||
if s[-1] == '\\':
|
||||
__residue__.append(s[:-1])
|
||||
return
|
||||
|
||||
s = "".join(__residue__) + s
|
||||
__residue__ = []
|
||||
|
||||
m = __func_start_regexp__.match(s)
|
||||
if m:
|
||||
__infunc__ = m.group("func") or "__anonymous"
|
||||
key = __infunc__
|
||||
if data.getVar(key, d):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.setVarFlag(key, 'python', None, d)
|
||||
data.setVarFlag(key, 'fakeroot', None, d)
|
||||
if m.group("py") is not None:
|
||||
data.setVarFlag(key, "python", "1", d)
|
||||
else:
|
||||
data.delVarFlag(key, "python", d)
|
||||
if m.group("fr") is not None:
|
||||
data.setVarFlag(key, "fakeroot", "1", d)
|
||||
else:
|
||||
data.delVarFlag(key, "fakeroot", d)
|
||||
return
|
||||
|
||||
m = __def_regexp__.match(s)
|
||||
if m:
|
||||
__body__.append(s)
|
||||
__inpython__ = True
|
||||
return
|
||||
|
||||
m = __export_func_regexp__.match(s)
|
||||
if m:
|
||||
fns = m.group(1)
|
||||
n = __word__.findall(fns)
|
||||
for f in n:
|
||||
allvars = []
|
||||
allvars.append(f)
|
||||
allvars.append(classes[-1] + "_" + f)
|
||||
|
||||
vars = [[ allvars[0], allvars[1] ]]
|
||||
if len(classes) > 1 and classes[-2] is not None:
|
||||
allvars.append(classes[-2] + "_" + f)
|
||||
vars = []
|
||||
vars.append([allvars[2], allvars[1]])
|
||||
vars.append([allvars[0], allvars[2]])
|
||||
|
||||
for (var, calledvar) in vars:
|
||||
if data.getVar(var, d) and not data.getVarFlag(var, 'export_func', d):
|
||||
continue
|
||||
|
||||
if data.getVar(var, d):
|
||||
data.setVarFlag(var, 'python', None, d)
|
||||
data.setVarFlag(var, 'func', None, d)
|
||||
|
||||
for flag in [ "func", "python" ]:
|
||||
if data.getVarFlag(calledvar, flag, d):
|
||||
data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag, d), d)
|
||||
for flag in [ "dirs" ]:
|
||||
if data.getVarFlag(var, flag, d):
|
||||
data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag, d), d)
|
||||
|
||||
if data.getVarFlag(calledvar, "python", d):
|
||||
data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", d)
|
||||
else:
|
||||
data.setVar(var, "\t" + calledvar + "\n", d)
|
||||
data.setVarFlag(var, 'export_func', '1', d)
|
||||
|
||||
return
|
||||
|
||||
m = __addtask_regexp__.match(s)
|
||||
if m:
|
||||
func = m.group("func")
|
||||
before = m.group("before")
|
||||
after = m.group("after")
|
||||
if func is None:
|
||||
return
|
||||
var = "do_" + func
|
||||
|
||||
data.setVarFlag(var, "task", 1, d)
|
||||
|
||||
bbtasks = data.getVar('__BBTASKS', d) or []
|
||||
if not var in bbtasks:
|
||||
bbtasks.append(var)
|
||||
data.setVar('__BBTASKS', bbtasks, d)
|
||||
|
||||
existing = data.getVarFlag(var, "deps", d) or []
|
||||
if after is not None:
|
||||
# set up deps for function
|
||||
for entry in after.split():
|
||||
if entry not in existing:
|
||||
existing.append(entry)
|
||||
data.setVarFlag(var, "deps", existing, d)
|
||||
if before is not None:
|
||||
# set up things that depend on this func
|
||||
for entry in before.split():
|
||||
existing = data.getVarFlag(entry, "deps", d) or []
|
||||
if var not in existing:
|
||||
data.setVarFlag(entry, "deps", [var] + existing, d)
|
||||
return
|
||||
|
||||
m = __addhandler_regexp__.match(s)
|
||||
if m:
|
||||
fns = m.group(1)
|
||||
hs = __word__.findall(fns)
|
||||
bbhands = data.getVar('__BBHANDLERS', d) or []
|
||||
for h in hs:
|
||||
bbhands.append(h)
|
||||
data.setVarFlag(h, "handler", 1, d)
|
||||
data.setVar('__BBHANDLERS', bbhands, d)
|
||||
return
|
||||
|
||||
m = __inherit_regexp__.match(s)
|
||||
if m:
|
||||
|
||||
files = m.group(1)
|
||||
n = __word__.findall(files)
|
||||
inherit(n, d)
|
||||
return
|
||||
|
||||
from bb.parse import ConfHandler
|
||||
return ConfHandler.feeder(lineno, s, fn, d)
|
||||
|
||||
__pkgsplit_cache__={}
|
||||
def vars_from_file(mypkg, d):
|
||||
if not mypkg:
|
||||
return (None, None, None)
|
||||
if mypkg in __pkgsplit_cache__:
|
||||
return __pkgsplit_cache__[mypkg]
|
||||
|
||||
myfile = os.path.splitext(os.path.basename(mypkg))
|
||||
parts = myfile[0].split('_')
|
||||
__pkgsplit_cache__[mypkg] = parts
|
||||
if len(parts) > 3:
|
||||
raise ParseError("Unable to generate default variables from the filename: %s (too many underscores)" % mypkg)
|
||||
exp = 3 - len(parts)
|
||||
tmplist = []
|
||||
while exp != 0:
|
||||
exp -= 1
|
||||
tmplist.append(None)
|
||||
parts.extend(tmplist)
|
||||
return parts
|
||||
|
||||
# Add us to the handlers list
|
||||
from bb.parse import handlers
|
||||
handlers.append({'supports': supports, 'handle': handle, 'init': init})
|
||||
del handlers
|
||||
234
bitbake-dev/lib/bb/parse/parse_py/ConfHandler.py
Normal file
234
bitbake-dev/lib/bb/parse/parse_py/ConfHandler.py
Normal file
@@ -0,0 +1,234 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
class for handling configuration data files
|
||||
|
||||
Reads a .conf file and obtains its metadata
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import re, bb.data, os, sys
|
||||
from bb.parse import ParseError
|
||||
|
||||
#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
||||
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
||||
__include_regexp__ = re.compile( r"include\s+(.+)" )
|
||||
__require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+(.+)" )
|
||||
|
||||
def init(data):
|
||||
if not bb.data.getVar('TOPDIR', data):
|
||||
bb.data.setVar('TOPDIR', os.getcwd(), data)
|
||||
if not bb.data.getVar('BBPATH', data):
|
||||
bb.data.setVar('BBPATH', os.path.join(sys.prefix, 'share', 'bitbake'), data)
|
||||
|
||||
def supports(fn, d):
|
||||
return localpath(fn, d)[-5:] == ".conf"
|
||||
|
||||
def localpath(fn, d):
|
||||
if os.path.exists(fn):
|
||||
return fn
|
||||
|
||||
if "://" not in fn:
|
||||
return fn
|
||||
|
||||
localfn = None
|
||||
try:
|
||||
localfn = bb.fetch.localpath(fn, d, False)
|
||||
except bb.MalformedUrl:
|
||||
pass
|
||||
|
||||
if not localfn:
|
||||
return fn
|
||||
return localfn
|
||||
|
||||
def obtain(fn, data):
|
||||
import sys, bb
|
||||
fn = bb.data.expand(fn, data)
|
||||
localfn = bb.data.expand(localpath(fn, data), data)
|
||||
|
||||
if localfn != fn:
|
||||
dldir = bb.data.getVar('DL_DIR', data, 1)
|
||||
if not dldir:
|
||||
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: DL_DIR not defined")
|
||||
return localfn
|
||||
bb.mkdirhier(dldir)
|
||||
try:
|
||||
bb.fetch.init([fn], data)
|
||||
except bb.fetch.NoMethodError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value)
|
||||
return localfn
|
||||
|
||||
try:
|
||||
bb.fetch.go(data)
|
||||
except bb.fetch.MissingParameterError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value)
|
||||
return localfn
|
||||
except bb.fetch.FetchError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: failed: %s" % value)
|
||||
return localfn
|
||||
return localfn
|
||||
|
||||
|
||||
def include(oldfn, fn, data, error_out):
|
||||
"""
|
||||
|
||||
error_out If True a ParseError will be reaised if the to be included
|
||||
"""
|
||||
if oldfn == fn: # prevent infinate recursion
|
||||
return None
|
||||
|
||||
import bb
|
||||
fn = bb.data.expand(fn, data)
|
||||
oldfn = bb.data.expand(oldfn, data)
|
||||
|
||||
if not os.path.isabs(fn):
|
||||
dname = os.path.dirname(oldfn)
|
||||
bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1))
|
||||
abs_fn = bb.which(bbpath, fn)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
|
||||
from bb.parse import handle
|
||||
try:
|
||||
ret = handle(fn, data, True)
|
||||
except IOError:
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
|
||||
bb.msg.debug(2, bb.msg.domain.Parsing, "CONF file '%s' not found" % fn)
|
||||
|
||||
def handle(fn, data, include = 0):
|
||||
if include:
|
||||
inc_string = "including"
|
||||
else:
|
||||
inc_string = "reading"
|
||||
init(data)
|
||||
|
||||
if include == 0:
|
||||
oldfile = None
|
||||
else:
|
||||
oldfile = bb.data.getVar('FILE', data)
|
||||
|
||||
fn = obtain(fn, data)
|
||||
if not os.path.isabs(fn):
|
||||
f = None
|
||||
bbpath = bb.data.getVar("BBPATH", data, 1) or []
|
||||
for p in bbpath.split(":"):
|
||||
currname = os.path.join(p, fn)
|
||||
if os.access(currname, os.R_OK):
|
||||
f = open(currname, 'r')
|
||||
abs_fn = currname
|
||||
bb.msg.debug(2, bb.msg.domain.Parsing, "CONF %s %s" % (inc_string, currname))
|
||||
break
|
||||
if f is None:
|
||||
raise IOError("file '%s' not found" % fn)
|
||||
else:
|
||||
f = open(fn,'r')
|
||||
bb.msg.debug(1, bb.msg.domain.Parsing, "CONF %s %s" % (inc_string,fn))
|
||||
abs_fn = fn
|
||||
|
||||
if include:
|
||||
bb.parse.mark_dependency(data, abs_fn)
|
||||
|
||||
lineno = 0
|
||||
bb.data.setVar('FILE', fn, data)
|
||||
while 1:
|
||||
lineno = lineno + 1
|
||||
s = f.readline()
|
||||
if not s: break
|
||||
w = s.strip()
|
||||
if not w: continue # skip empty lines
|
||||
s = s.rstrip()
|
||||
if s[0] == '#': continue # skip comments
|
||||
while s[-1] == '\\':
|
||||
s2 = f.readline()[:-1].strip()
|
||||
lineno = lineno + 1
|
||||
s = s[:-1] + s2
|
||||
feeder(lineno, s, fn, data)
|
||||
|
||||
if oldfile:
|
||||
bb.data.setVar('FILE', oldfile, data)
|
||||
return data
|
||||
|
||||
def feeder(lineno, s, fn, data):
|
||||
def getFunc(groupd, key, data):
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
return bb.data.getVarFlag(key, groupd['flag'], data)
|
||||
else:
|
||||
return bb.data.getVar(key, data)
|
||||
|
||||
m = __config_regexp__.match(s)
|
||||
if m:
|
||||
groupd = m.groupdict()
|
||||
key = groupd["var"]
|
||||
if "exp" in groupd and groupd["exp"] != None:
|
||||
bb.data.setVarFlag(key, "export", 1, data)
|
||||
if "ques" in groupd and groupd["ques"] != None:
|
||||
val = getFunc(groupd, key, data)
|
||||
if val == None:
|
||||
val = groupd["value"]
|
||||
elif "colon" in groupd and groupd["colon"] != None:
|
||||
e = data.createCopy()
|
||||
bb.data.update_data(e)
|
||||
val = bb.data.expand(groupd["value"], e)
|
||||
elif "append" in groupd and groupd["append"] != None:
|
||||
val = "%s %s" % ((getFunc(groupd, key, data) or ""), groupd["value"])
|
||||
elif "prepend" in groupd and groupd["prepend"] != None:
|
||||
val = "%s %s" % (groupd["value"], (getFunc(groupd, key, data) or ""))
|
||||
elif "postdot" in groupd and groupd["postdot"] != None:
|
||||
val = "%s%s" % ((getFunc(groupd, key, data) or ""), groupd["value"])
|
||||
elif "predot" in groupd and groupd["predot"] != None:
|
||||
val = "%s%s" % (groupd["value"], (getFunc(groupd, key, data) or ""))
|
||||
else:
|
||||
val = groupd["value"]
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
bb.msg.debug(3, bb.msg.domain.Parsing, "setVarFlag(%s, %s, %s, data)" % (key, groupd['flag'], val))
|
||||
bb.data.setVarFlag(key, groupd['flag'], val, data)
|
||||
else:
|
||||
bb.data.setVar(key, val, data)
|
||||
return
|
||||
|
||||
m = __include_regexp__.match(s)
|
||||
if m:
|
||||
s = bb.data.expand(m.group(1), data)
|
||||
bb.msg.debug(3, bb.msg.domain.Parsing, "CONF %s:%d: including %s" % (fn, lineno, s))
|
||||
include(fn, s, data, False)
|
||||
return
|
||||
|
||||
m = __require_regexp__.match(s)
|
||||
if m:
|
||||
s = bb.data.expand(m.group(1), data)
|
||||
include(fn, s, data, "include required")
|
||||
return
|
||||
|
||||
m = __export_regexp__.match(s)
|
||||
if m:
|
||||
bb.data.setVarFlag(m.group(1), "export", 1, data)
|
||||
return
|
||||
|
||||
raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s));
|
||||
|
||||
# Add us to the handlers list
|
||||
from bb.parse import handlers
|
||||
handlers.append({'supports': supports, 'handle': handle, 'init': init})
|
||||
del handlers
|
||||
33
bitbake-dev/lib/bb/parse/parse_py/__init__.py
Normal file
33
bitbake-dev/lib/bb/parse/parse_py/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake Parsers
|
||||
|
||||
File parsers for the BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
__version__ = '1.0'
|
||||
|
||||
__all__ = [ 'ConfHandler', 'BBHandler']
|
||||
|
||||
import ConfHandler
|
||||
import BBHandler
|
||||
121
bitbake-dev/lib/bb/persist_data.py
Normal file
121
bitbake-dev/lib/bb/persist_data.py
Normal file
@@ -0,0 +1,121 @@
|
||||
# BitBake Persistent Data Store
|
||||
#
|
||||
# Copyright (C) 2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import bb, os
|
||||
|
||||
try:
|
||||
import sqlite3
|
||||
except ImportError:
|
||||
try:
|
||||
from pysqlite2 import dbapi2 as sqlite3
|
||||
except ImportError:
|
||||
bb.msg.fatal(bb.msg.domain.PersistData, "Importing sqlite3 and pysqlite2 failed, please install one of them. Python 2.5 or a 'python-pysqlite2' like package is likely to be what you need.")
|
||||
|
||||
sqlversion = sqlite3.sqlite_version_info
|
||||
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
|
||||
bb.msg.fatal(bb.msg.domain.PersistData, "sqlite3 version 3.3.0 or later is required.")
|
||||
|
||||
class PersistData:
|
||||
"""
|
||||
BitBake Persistent Data Store
|
||||
|
||||
Used to store data in a central location such that other threads/tasks can
|
||||
access them at some future date.
|
||||
|
||||
The "domain" is used as a key to isolate each data pool and in this
|
||||
implementation corresponds to an SQL table. The SQL table consists of a
|
||||
simple key and value pair.
|
||||
|
||||
Why sqlite? It handles all the locking issues for us.
|
||||
"""
|
||||
def __init__(self, d):
|
||||
self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)
|
||||
if self.cachedir in [None, '']:
|
||||
bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.")
|
||||
try:
|
||||
os.stat(self.cachedir)
|
||||
except OSError:
|
||||
bb.mkdirhier(self.cachedir)
|
||||
|
||||
self.cachefile = os.path.join(self.cachedir,"bb_persist_data.sqlite3")
|
||||
bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile)
|
||||
|
||||
self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
|
||||
|
||||
def addDomain(self, domain):
|
||||
"""
|
||||
Should be called before any domain is used
|
||||
Creates it if it doesn't exist.
|
||||
"""
|
||||
self.connection.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain)
|
||||
|
||||
def delDomain(self, domain):
|
||||
"""
|
||||
Removes a domain and all the data it contains
|
||||
"""
|
||||
self.connection.execute("DROP TABLE IF EXISTS %s;" % domain)
|
||||
|
||||
def getKeyValues(self, domain):
|
||||
"""
|
||||
Return a list of key + value pairs for a domain
|
||||
"""
|
||||
ret = {}
|
||||
data = self.connection.execute("SELECT key, value from %s;" % domain)
|
||||
for row in data:
|
||||
ret[str(row[0])] = str(row[1])
|
||||
|
||||
return ret
|
||||
|
||||
def getValue(self, domain, key):
|
||||
"""
|
||||
Return the value of a key for a domain
|
||||
"""
|
||||
data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
|
||||
for row in data:
|
||||
return row[1]
|
||||
|
||||
def setValue(self, domain, key, value):
|
||||
"""
|
||||
Sets the value of a key for a domain
|
||||
"""
|
||||
data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
|
||||
rows = 0
|
||||
for row in data:
|
||||
rows = rows + 1
|
||||
if rows:
|
||||
self._execute("UPDATE %s SET value=? WHERE key=?;" % domain, [value, key])
|
||||
else:
|
||||
self._execute("INSERT into %s(key, value) values (?, ?);" % domain, [key, value])
|
||||
|
||||
def delValue(self, domain, key):
|
||||
"""
|
||||
Deletes a key/value pair
|
||||
"""
|
||||
self._execute("DELETE from %s where key=?;" % domain, [key])
|
||||
|
||||
def _execute(self, *query):
|
||||
while True:
|
||||
try:
|
||||
self.connection.execute(*query)
|
||||
return
|
||||
except sqlite3.OperationalError, e:
|
||||
if 'database is locked' in str(e):
|
||||
continue
|
||||
raise
|
||||
|
||||
|
||||
|
||||
315
bitbake-dev/lib/bb/providers.py
Normal file
315
bitbake-dev/lib/bb/providers.py
Normal file
@@ -0,0 +1,315 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import re
|
||||
from bb import data, utils
|
||||
import bb
|
||||
|
||||
class NoProvider(Exception):
|
||||
"""Exception raised when no provider of a build dependency can be found"""
|
||||
|
||||
class NoRProvider(Exception):
|
||||
"""Exception raised when no provider of a runtime dependency can be found"""
|
||||
|
||||
|
||||
def sortPriorities(pn, dataCache, pkg_pn = None):
|
||||
"""
|
||||
Reorder pkg_pn by file priority and default preference
|
||||
"""
|
||||
|
||||
if not pkg_pn:
|
||||
pkg_pn = dataCache.pkg_pn
|
||||
|
||||
files = pkg_pn[pn]
|
||||
priorities = {}
|
||||
for f in files:
|
||||
priority = dataCache.bbfile_priority[f]
|
||||
preference = dataCache.pkg_dp[f]
|
||||
if priority not in priorities:
|
||||
priorities[priority] = {}
|
||||
if preference not in priorities[priority]:
|
||||
priorities[priority][preference] = []
|
||||
priorities[priority][preference].append(f)
|
||||
pri_list = priorities.keys()
|
||||
pri_list.sort(lambda a, b: a - b)
|
||||
tmp_pn = []
|
||||
for pri in pri_list:
|
||||
pref_list = priorities[pri].keys()
|
||||
pref_list.sort(lambda a, b: b - a)
|
||||
tmp_pref = []
|
||||
for pref in pref_list:
|
||||
tmp_pref.extend(priorities[pri][pref])
|
||||
tmp_pn = [tmp_pref] + tmp_pn
|
||||
|
||||
return tmp_pn
|
||||
|
||||
|
||||
def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
"""
|
||||
Find the first provider in pkg_pn with a PREFERRED_VERSION set.
|
||||
"""
|
||||
|
||||
preferred_file = None
|
||||
preferred_ver = None
|
||||
|
||||
localdata = data.createCopy(cfgData)
|
||||
bb.data.setVar('OVERRIDES', "pn-%s:%s:%s" % (pn, pn, data.getVar('OVERRIDES', localdata)), localdata)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
preferred_v = bb.data.getVar('PREFERRED_VERSION_%s' % pn, localdata, True)
|
||||
if preferred_v:
|
||||
m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
|
||||
if m:
|
||||
if m.group(1):
|
||||
preferred_e = int(m.group(1)[:-1])
|
||||
else:
|
||||
preferred_e = None
|
||||
preferred_v = m.group(2)
|
||||
if m.group(3):
|
||||
preferred_r = m.group(3)[1:]
|
||||
else:
|
||||
preferred_r = None
|
||||
else:
|
||||
preferred_e = None
|
||||
preferred_r = None
|
||||
|
||||
for file_set in pkg_pn:
|
||||
for f in file_set:
|
||||
pe,pv,pr = dataCache.pkg_pepvpr[f]
|
||||
if preferred_v == pv and (preferred_r == pr or preferred_r == None) and (preferred_e == pe or preferred_e == None):
|
||||
preferred_file = f
|
||||
preferred_ver = (pe, pv, pr)
|
||||
break
|
||||
if preferred_file:
|
||||
break;
|
||||
if preferred_r:
|
||||
pv_str = '%s-%s' % (preferred_v, preferred_r)
|
||||
else:
|
||||
pv_str = preferred_v
|
||||
if not (preferred_e is None):
|
||||
pv_str = '%s:%s' % (preferred_e, pv_str)
|
||||
itemstr = ""
|
||||
if item:
|
||||
itemstr = " (for item %s)" % item
|
||||
if preferred_file is None:
|
||||
bb.msg.note(1, bb.msg.domain.Provider, "preferred version %s of %s not available%s" % (pv_str, pn, itemstr))
|
||||
else:
|
||||
bb.msg.debug(1, bb.msg.domain.Provider, "selecting %s as PREFERRED_VERSION %s of package %s%s" % (preferred_file, pv_str, pn, itemstr))
|
||||
|
||||
return (preferred_ver, preferred_file)
|
||||
|
||||
|
||||
def findLatestProvider(pn, cfgData, dataCache, file_set):
|
||||
"""
|
||||
Return the highest version of the providers in file_set.
|
||||
Take default preferences into account.
|
||||
"""
|
||||
latest = None
|
||||
latest_p = 0
|
||||
latest_f = None
|
||||
for file_name in file_set:
|
||||
pe,pv,pr = dataCache.pkg_pepvpr[file_name]
|
||||
dp = dataCache.pkg_dp[file_name]
|
||||
|
||||
if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p):
|
||||
latest = (pe, pv, pr)
|
||||
latest_f = file_name
|
||||
latest_p = dp
|
||||
|
||||
return (latest, latest_f)
|
||||
|
||||
|
||||
def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
"""
|
||||
If there is a PREFERRED_VERSION, find the highest-priority bbfile
|
||||
providing that version. If not, find the latest version provided by
|
||||
an bbfile in the highest-priority set.
|
||||
"""
|
||||
|
||||
sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
|
||||
# Find the highest priority provider with a PREFERRED_VERSION set
|
||||
(preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
|
||||
# Find the latest version of the highest priority provider
|
||||
(latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
|
||||
|
||||
if preferred_file is None:
|
||||
preferred_file = latest_f
|
||||
preferred_ver = latest
|
||||
|
||||
return (latest, latest_f, preferred_ver, preferred_file)
|
||||
|
||||
|
||||
def _filterProviders(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables and previous build results
|
||||
"""
|
||||
eligible = []
|
||||
preferred_versions = {}
|
||||
sortpkg_pn = {}
|
||||
|
||||
# The order of providers depends on the order of the files on the disk
|
||||
# up to here. Sort pkg_pn to make dependency issues reproducible rather
|
||||
# than effectively random.
|
||||
providers.sort()
|
||||
|
||||
# Collate providers by PN
|
||||
pkg_pn = {}
|
||||
for p in providers:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
if pn not in pkg_pn:
|
||||
pkg_pn[pn] = []
|
||||
pkg_pn[pn].append(p)
|
||||
|
||||
bb.msg.debug(1, bb.msg.domain.Provider, "providers for %s are: %s" % (item, pkg_pn.keys()))
|
||||
|
||||
# First add PREFERRED_VERSIONS
|
||||
for pn in pkg_pn.keys():
|
||||
sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
|
||||
preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
|
||||
if preferred_versions[pn][1]:
|
||||
eligible.append(preferred_versions[pn][1])
|
||||
|
||||
# Now add latest verisons
|
||||
for pn in pkg_pn.keys():
|
||||
if pn in preferred_versions and preferred_versions[pn][1]:
|
||||
continue
|
||||
preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
|
||||
eligible.append(preferred_versions[pn][1])
|
||||
|
||||
if len(eligible) == 0:
|
||||
bb.msg.error(bb.msg.domain.Provider, "no eligible providers for %s" % item)
|
||||
return 0
|
||||
|
||||
# If pn == item, give it a slight default preference
|
||||
# This means PREFERRED_PROVIDER_foobar defaults to foobar if available
|
||||
for p in providers:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
if pn != item:
|
||||
continue
|
||||
(newvers, fn) = preferred_versions[pn]
|
||||
if not fn in eligible:
|
||||
continue
|
||||
eligible.remove(fn)
|
||||
eligible = [fn] + eligible
|
||||
|
||||
return eligible
|
||||
|
||||
|
||||
def filterProviders(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables and previous build results
|
||||
Takes a "normal" target item
|
||||
"""
|
||||
|
||||
eligible = _filterProviders(providers, item, cfgData, dataCache)
|
||||
|
||||
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1)
|
||||
if prefervar:
|
||||
dataCache.preferred[item] = prefervar
|
||||
|
||||
foundUnique = False
|
||||
if item in dataCache.preferred:
|
||||
for p in eligible:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
if dataCache.preferred[item] == pn:
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy %s due to PREFERRED_PROVIDERS" % (pn, item))
|
||||
eligible.remove(p)
|
||||
eligible = [p] + eligible
|
||||
foundUnique = True
|
||||
break
|
||||
|
||||
bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible))
|
||||
|
||||
return eligible, foundUnique
|
||||
|
||||
def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables and previous build results
|
||||
Takes a "runtime" target item
|
||||
"""
|
||||
|
||||
eligible = _filterProviders(providers, item, cfgData, dataCache)
|
||||
|
||||
# Should use dataCache.preferred here?
|
||||
preferred = []
|
||||
preferred_vars = []
|
||||
for p in eligible:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
provides = dataCache.pn_provides[pn]
|
||||
for provide in provides:
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "checking PREFERRED_PROVIDER_%s" % (provide))
|
||||
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
|
||||
if prefervar == pn:
|
||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy runtime %s due to %s" % (pn, item, var))
|
||||
preferred_vars.append(var)
|
||||
eligible.remove(p)
|
||||
eligible = [p] + eligible
|
||||
preferred.append(p)
|
||||
break
|
||||
|
||||
numberPreferred = len(preferred)
|
||||
|
||||
if numberPreferred > 1:
|
||||
bb.msg.error(bb.msg.domain.Provider, "Conflicting PREFERRED_PROVIDER entries were found which resulted in an attempt to select multiple providers (%s) for runtime dependecy %s\nThe entries resulting in this conflict were: %s" % (preferred, item, preferred_vars))
|
||||
|
||||
bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible))
|
||||
|
||||
return eligible, numberPreferred
|
||||
|
||||
regexp_cache = {}
|
||||
|
||||
def getRuntimeProviders(dataCache, rdepend):
|
||||
"""
|
||||
Return any providers of runtime dependency
|
||||
"""
|
||||
rproviders = []
|
||||
|
||||
if rdepend in dataCache.rproviders:
|
||||
rproviders += dataCache.rproviders[rdepend]
|
||||
|
||||
if rdepend in dataCache.packages:
|
||||
rproviders += dataCache.packages[rdepend]
|
||||
|
||||
if rproviders:
|
||||
return rproviders
|
||||
|
||||
# Only search dynamic packages if we can't find anything in other variables
|
||||
for pattern in dataCache.packages_dynamic:
|
||||
pattern = pattern.replace('+', "\+")
|
||||
if pattern in regexp_cache:
|
||||
regexp = regexp_cache[pattern]
|
||||
else:
|
||||
try:
|
||||
regexp = re.compile(pattern)
|
||||
except:
|
||||
bb.msg.error(bb.msg.domain.Provider, "Error parsing re expression: %s" % pattern)
|
||||
raise
|
||||
regexp_cache[pattern] = regexp
|
||||
if regexp.match(rdepend):
|
||||
rproviders += dataCache.packages_dynamic[pattern]
|
||||
|
||||
return rproviders
|
||||
1126
bitbake-dev/lib/bb/runqueue.py
Normal file
1126
bitbake-dev/lib/bb/runqueue.py
Normal file
File diff suppressed because it is too large
Load Diff
824
bitbake-dev/lib/bb/shell.py
Normal file
824
bitbake-dev/lib/bb/shell.py
Normal file
@@ -0,0 +1,824 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
##########################################################################
|
||||
#
|
||||
# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
|
||||
# Copyright (C) 2005-2006 Vanille Media
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
##########################################################################
|
||||
#
|
||||
# Thanks to:
|
||||
# * Holger Freyther <zecke@handhelds.org>
|
||||
# * Justin Patrin <papercrane@reversefold.com>
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
"""
|
||||
BitBake Shell
|
||||
|
||||
IDEAS:
|
||||
* list defined tasks per package
|
||||
* list classes
|
||||
* toggle force
|
||||
* command to reparse just one (or more) bbfile(s)
|
||||
* automatic check if reparsing is necessary (inotify?)
|
||||
* frontend for bb file manipulation
|
||||
* more shell-like features:
|
||||
- output control, i.e. pipe output into grep, sort, etc.
|
||||
- job control, i.e. bring running commands into background and foreground
|
||||
* start parsing in background right after startup
|
||||
* ncurses interface
|
||||
|
||||
PROBLEMS:
|
||||
* force doesn't always work
|
||||
* readline completion for commands with more than one parameters
|
||||
|
||||
"""
|
||||
|
||||
##########################################################################
|
||||
# Import and setup global variables
|
||||
##########################################################################
|
||||
|
||||
try:
|
||||
set
|
||||
except NameError:
|
||||
from sets import Set as set
|
||||
import sys, os, readline, socket, httplib, urllib, commands, popen2, copy, shlex, Queue, fnmatch
|
||||
from bb import data, parse, build, fatal, cache, taskdata, runqueue, providers as Providers
|
||||
|
||||
__version__ = "0.5.3.1"
|
||||
__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
|
||||
Type 'help' for more information, press CTRL-D to exit.""" % __version__
|
||||
|
||||
cmds = {}
|
||||
leave_mainloop = False
|
||||
last_exception = None
|
||||
cooker = None
|
||||
parsed = False
|
||||
debug = os.environ.get( "BBSHELL_DEBUG", "" )
|
||||
|
||||
##########################################################################
|
||||
# Class BitBakeShellCommands
|
||||
##########################################################################
|
||||
|
||||
class BitBakeShellCommands:
|
||||
"""This class contains the valid commands for the shell"""
|
||||
|
||||
def __init__( self, shell ):
|
||||
"""Register all the commands"""
|
||||
self._shell = shell
|
||||
for attr in BitBakeShellCommands.__dict__:
|
||||
if not attr.startswith( "_" ):
|
||||
if attr.endswith( "_" ):
|
||||
command = attr[:-1].lower()
|
||||
else:
|
||||
command = attr[:].lower()
|
||||
method = getattr( BitBakeShellCommands, attr )
|
||||
debugOut( "registering command '%s'" % command )
|
||||
# scan number of arguments
|
||||
usage = getattr( method, "usage", "" )
|
||||
if usage != "<...>":
|
||||
numArgs = len( usage.split() )
|
||||
else:
|
||||
numArgs = -1
|
||||
shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ )
|
||||
|
||||
def _checkParsed( self ):
|
||||
if not parsed:
|
||||
print "SHELL: This command needs to parse bbfiles..."
|
||||
self.parse( None )
|
||||
|
||||
def _findProvider( self, item ):
|
||||
self._checkParsed()
|
||||
# Need to use taskData for this information
|
||||
preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
|
||||
if not preferred: preferred = item
|
||||
try:
|
||||
lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
|
||||
except KeyError:
|
||||
if item in cooker.status.providers:
|
||||
pf = cooker.status.providers[item][0]
|
||||
else:
|
||||
pf = None
|
||||
return pf
|
||||
|
||||
def alias( self, params ):
|
||||
"""Register a new name for a command"""
|
||||
new, old = params
|
||||
if not old in cmds:
|
||||
print "ERROR: Command '%s' not known" % old
|
||||
else:
|
||||
cmds[new] = cmds[old]
|
||||
print "OK"
|
||||
alias.usage = "<alias> <command>"
|
||||
|
||||
def buffer( self, params ):
|
||||
"""Dump specified output buffer"""
|
||||
index = params[0]
|
||||
print self._shell.myout.buffer( int( index ) )
|
||||
buffer.usage = "<index>"
|
||||
|
||||
def buffers( self, params ):
|
||||
"""Show the available output buffers"""
|
||||
commands = self._shell.myout.bufferedCommands()
|
||||
if not commands:
|
||||
print "SHELL: No buffered commands available yet. Start doing something."
|
||||
else:
|
||||
print "="*35, "Available Output Buffers", "="*27
|
||||
for index, cmd in enumerate( commands ):
|
||||
print "| %s %s" % ( str( index ).ljust( 3 ), cmd )
|
||||
print "="*88
|
||||
|
||||
def build( self, params, cmd = "build" ):
|
||||
"""Build a providee"""
|
||||
global last_exception
|
||||
globexpr = params[0]
|
||||
self._checkParsed()
|
||||
names = globfilter( cooker.status.pkg_pn.keys(), globexpr )
|
||||
if len( names ) == 0: names = [ globexpr ]
|
||||
print "SHELL: Building %s" % ' '.join( names )
|
||||
|
||||
td = taskdata.TaskData(cooker.configuration.abort)
|
||||
localdata = data.createCopy(cooker.configuration.data)
|
||||
data.update_data(localdata)
|
||||
data.expandKeys(localdata)
|
||||
|
||||
try:
|
||||
tasks = []
|
||||
for name in names:
|
||||
td.add_provider(localdata, cooker.status, name)
|
||||
providers = td.get_provider(name)
|
||||
|
||||
if len(providers) == 0:
|
||||
raise Providers.NoProvider
|
||||
|
||||
tasks.append([name, "do_%s" % cmd])
|
||||
|
||||
td.add_unresolved(localdata, cooker.status)
|
||||
|
||||
rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
|
||||
rq.prepare_runqueue()
|
||||
rq.execute_runqueue()
|
||||
|
||||
except Providers.NoProvider:
|
||||
print "ERROR: No Provider"
|
||||
last_exception = Providers.NoProvider
|
||||
|
||||
except runqueue.TaskFailure, fnids:
|
||||
for fnid in fnids:
|
||||
print "ERROR: '%s' failed" % td.fn_index[fnid]
|
||||
last_exception = runqueue.TaskFailure
|
||||
|
||||
except build.EventException, e:
|
||||
print "ERROR: Couldn't build '%s'" % names
|
||||
last_exception = e
|
||||
|
||||
|
||||
build.usage = "<providee>"
|
||||
|
||||
def clean( self, params ):
|
||||
"""Clean a providee"""
|
||||
self.build( params, "clean" )
|
||||
clean.usage = "<providee>"
|
||||
|
||||
def compile( self, params ):
|
||||
"""Execute 'compile' on a providee"""
|
||||
self.build( params, "compile" )
|
||||
compile.usage = "<providee>"
|
||||
|
||||
def configure( self, params ):
|
||||
"""Execute 'configure' on a providee"""
|
||||
self.build( params, "configure" )
|
||||
configure.usage = "<providee>"
|
||||
|
||||
def install( self, params ):
|
||||
"""Execute 'install' on a providee"""
|
||||
self.build( params, "install" )
|
||||
install.usage = "<providee>"
|
||||
|
||||
def edit( self, params ):
|
||||
"""Call $EDITOR on a providee"""
|
||||
name = params[0]
|
||||
bbfile = self._findProvider( name )
|
||||
if bbfile is not None:
|
||||
os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
|
||||
else:
|
||||
print "ERROR: Nothing provides '%s'" % name
|
||||
edit.usage = "<providee>"
|
||||
|
||||
def environment( self, params ):
|
||||
"""Dump out the outer BitBake environment"""
|
||||
cooker.showEnvironment()
|
||||
|
||||
def exit_( self, params ):
|
||||
"""Leave the BitBake Shell"""
|
||||
debugOut( "setting leave_mainloop to true" )
|
||||
global leave_mainloop
|
||||
leave_mainloop = True
|
||||
|
||||
def fetch( self, params ):
|
||||
"""Fetch a providee"""
|
||||
self.build( params, "fetch" )
|
||||
fetch.usage = "<providee>"
|
||||
|
||||
def fileBuild( self, params, cmd = "build" ):
|
||||
"""Parse and build a .bb file"""
|
||||
global last_exception
|
||||
name = params[0]
|
||||
bf = completeFilePath( name )
|
||||
print "SHELL: Calling '%s' on '%s'" % ( cmd, bf )
|
||||
|
||||
try:
|
||||
cooker.buildFile(bf, cmd)
|
||||
except parse.ParseError:
|
||||
print "ERROR: Unable to open or parse '%s'" % bf
|
||||
except build.EventException, e:
|
||||
print "ERROR: Couldn't build '%s'" % name
|
||||
last_exception = e
|
||||
|
||||
fileBuild.usage = "<bbfile>"
|
||||
|
||||
def fileClean( self, params ):
|
||||
"""Clean a .bb file"""
|
||||
self.fileBuild( params, "clean" )
|
||||
fileClean.usage = "<bbfile>"
|
||||
|
||||
def fileEdit( self, params ):
|
||||
"""Call $EDITOR on a .bb file"""
|
||||
name = params[0]
|
||||
os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) )
|
||||
fileEdit.usage = "<bbfile>"
|
||||
|
||||
def fileRebuild( self, params ):
|
||||
"""Rebuild (clean & build) a .bb file"""
|
||||
self.fileBuild( params, "rebuild" )
|
||||
fileRebuild.usage = "<bbfile>"
|
||||
|
||||
def fileReparse( self, params ):
|
||||
"""(re)Parse a bb file"""
|
||||
bbfile = params[0]
|
||||
print "SHELL: Parsing '%s'" % bbfile
|
||||
parse.update_mtime( bbfile )
|
||||
cooker.bb_cache.cacheValidUpdate(bbfile)
|
||||
fromCache = cooker.bb_cache.loadData(bbfile, cooker.configuration.data, cooker.status)
|
||||
cooker.bb_cache.sync()
|
||||
if False: #fromCache:
|
||||
print "SHELL: File has not been updated, not reparsing"
|
||||
else:
|
||||
print "SHELL: Parsed"
|
||||
fileReparse.usage = "<bbfile>"
|
||||
|
||||
def abort( self, params ):
|
||||
"""Toggle abort task execution flag (see bitbake -k)"""
|
||||
cooker.configuration.abort = not cooker.configuration.abort
|
||||
print "SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort )
|
||||
|
||||
def force( self, params ):
|
||||
"""Toggle force task execution flag (see bitbake -f)"""
|
||||
cooker.configuration.force = not cooker.configuration.force
|
||||
print "SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force )
|
||||
|
||||
def help( self, params ):
|
||||
"""Show a comprehensive list of commands and their purpose"""
|
||||
print "="*30, "Available Commands", "="*30
|
||||
allcmds = cmds.keys()
|
||||
allcmds.sort()
|
||||
for cmd in allcmds:
|
||||
function,numparams,usage,helptext = cmds[cmd]
|
||||
print "| %s | %s" % (usage.ljust(30), helptext)
|
||||
print "="*78
|
||||
|
||||
def lastError( self, params ):
|
||||
"""Show the reason or log that was produced by the last BitBake event exception"""
|
||||
if last_exception is None:
|
||||
print "SHELL: No Errors yet (Phew)..."
|
||||
else:
|
||||
reason, event = last_exception.args
|
||||
print "SHELL: Reason for the last error: '%s'" % reason
|
||||
if ':' in reason:
|
||||
msg, filename = reason.split( ':' )
|
||||
filename = filename.strip()
|
||||
print "SHELL: Dumping log file for last error:"
|
||||
try:
|
||||
print open( filename ).read()
|
||||
except IOError:
|
||||
print "ERROR: Couldn't open '%s'" % filename
|
||||
|
||||
def match( self, params ):
|
||||
"""Dump all files or providers matching a glob expression"""
|
||||
what, globexpr = params
|
||||
if what == "files":
|
||||
self._checkParsed()
|
||||
for key in globfilter( cooker.status.pkg_fn.keys(), globexpr ): print key
|
||||
elif what == "providers":
|
||||
self._checkParsed()
|
||||
for key in globfilter( cooker.status.pkg_pn.keys(), globexpr ): print key
|
||||
else:
|
||||
print "Usage: match %s" % self.print_.usage
|
||||
match.usage = "<files|providers> <glob>"
|
||||
|
||||
def new( self, params ):
|
||||
"""Create a new .bb file and open the editor"""
|
||||
dirname, filename = params
|
||||
packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] )
|
||||
fulldirname = "%s/%s" % ( packages, dirname )
|
||||
|
||||
if not os.path.exists( fulldirname ):
|
||||
print "SHELL: Creating '%s'" % fulldirname
|
||||
os.mkdir( fulldirname )
|
||||
if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
|
||||
if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
|
||||
print "SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename )
|
||||
return False
|
||||
print "SHELL: Creating '%s/%s'" % ( fulldirname, filename )
|
||||
newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
|
||||
print >>newpackage,"""DESCRIPTION = ""
|
||||
SECTION = ""
|
||||
AUTHOR = ""
|
||||
HOMEPAGE = ""
|
||||
MAINTAINER = ""
|
||||
LICENSE = "GPL"
|
||||
PR = "r0"
|
||||
|
||||
SRC_URI = ""
|
||||
|
||||
#inherit base
|
||||
|
||||
#do_configure() {
|
||||
#
|
||||
#}
|
||||
|
||||
#do_compile() {
|
||||
#
|
||||
#}
|
||||
|
||||
#do_stage() {
|
||||
#
|
||||
#}
|
||||
|
||||
#do_install() {
|
||||
#
|
||||
#}
|
||||
"""
|
||||
newpackage.close()
|
||||
os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
|
||||
new.usage = "<directory> <filename>"
|
||||
|
||||
def package( self, params ):
|
||||
"""Execute 'package' on a providee"""
|
||||
self.build( params, "package" )
|
||||
package.usage = "<providee>"
|
||||
|
||||
def pasteBin( self, params ):
|
||||
"""Send a command + output buffer to the pastebin at http://rafb.net/paste"""
|
||||
index = params[0]
|
||||
contents = self._shell.myout.buffer( int( index ) )
|
||||
sendToPastebin( "output of " + params[0], contents )
|
||||
pasteBin.usage = "<index>"
|
||||
|
||||
def pasteLog( self, params ):
|
||||
"""Send the last event exception error log (if there is one) to http://rafb.net/paste"""
|
||||
if last_exception is None:
|
||||
print "SHELL: No Errors yet (Phew)..."
|
||||
else:
|
||||
reason, event = last_exception.args
|
||||
print "SHELL: Reason for the last error: '%s'" % reason
|
||||
if ':' in reason:
|
||||
msg, filename = reason.split( ':' )
|
||||
filename = filename.strip()
|
||||
print "SHELL: Pasting log file to pastebin..."
|
||||
|
||||
file = open( filename ).read()
|
||||
sendToPastebin( "contents of " + filename, file )
|
||||
|
||||
def patch( self, params ):
|
||||
"""Execute 'patch' command on a providee"""
|
||||
self.build( params, "patch" )
|
||||
patch.usage = "<providee>"
|
||||
|
||||
def parse( self, params ):
|
||||
"""(Re-)parse .bb files and calculate the dependency graph"""
|
||||
cooker.status = cache.CacheData()
|
||||
ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
|
||||
cooker.status.ignored_dependencies = set( ignore.split() )
|
||||
cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )
|
||||
|
||||
(filelist, masked) = cooker.collect_bbfiles()
|
||||
cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
|
||||
cooker.buildDepgraph()
|
||||
global parsed
|
||||
parsed = True
|
||||
print
|
||||
|
||||
def reparse( self, params ):
|
||||
"""(re)Parse a providee's bb file"""
|
||||
bbfile = self._findProvider( params[0] )
|
||||
if bbfile is not None:
|
||||
print "SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] )
|
||||
self.fileReparse( [ bbfile ] )
|
||||
else:
|
||||
print "ERROR: Nothing provides '%s'" % params[0]
|
||||
reparse.usage = "<providee>"
|
||||
|
||||
def getvar( self, params ):
|
||||
"""Dump the contents of an outer BitBake environment variable"""
|
||||
var = params[0]
|
||||
value = data.getVar( var, cooker.configuration.data, 1 )
|
||||
print value
|
||||
getvar.usage = "<variable>"
|
||||
|
||||
def peek( self, params ):
|
||||
"""Dump contents of variable defined in providee's metadata"""
|
||||
name, var = params
|
||||
bbfile = self._findProvider( name )
|
||||
if bbfile is not None:
|
||||
the_data = cooker.bb_cache.loadDataFull(bbfile, cooker.configuration.data)
|
||||
value = the_data.getVar( var, 1 )
|
||||
print value
|
||||
else:
|
||||
print "ERROR: Nothing provides '%s'" % name
|
||||
peek.usage = "<providee> <variable>"
|
||||
|
||||
def poke( self, params ):
|
||||
"""Set contents of variable defined in providee's metadata"""
|
||||
name, var, value = params
|
||||
bbfile = self._findProvider( name )
|
||||
if bbfile is not None:
|
||||
print "ERROR: Sorry, this functionality is currently broken"
|
||||
#d = cooker.pkgdata[bbfile]
|
||||
#data.setVar( var, value, d )
|
||||
|
||||
# mark the change semi persistant
|
||||
#cooker.pkgdata.setDirty(bbfile, d)
|
||||
#print "OK"
|
||||
else:
|
||||
print "ERROR: Nothing provides '%s'" % name
|
||||
poke.usage = "<providee> <variable> <value>"
|
||||
|
||||
def print_( self, params ):
|
||||
"""Dump all files or providers"""
|
||||
what = params[0]
|
||||
if what == "files":
|
||||
self._checkParsed()
|
||||
for key in cooker.status.pkg_fn.keys(): print key
|
||||
elif what == "providers":
|
||||
self._checkParsed()
|
||||
for key in cooker.status.providers.keys(): print key
|
||||
else:
|
||||
print "Usage: print %s" % self.print_.usage
|
||||
print_.usage = "<files|providers>"
|
||||
|
||||
def python( self, params ):
|
||||
"""Enter the expert mode - an interactive BitBake Python Interpreter"""
|
||||
sys.ps1 = "EXPERT BB>>> "
|
||||
sys.ps2 = "EXPERT BB... "
|
||||
import code
|
||||
interpreter = code.InteractiveConsole( dict( globals() ) )
|
||||
interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
|
||||
|
||||
def showdata( self, params ):
|
||||
"""Execute 'showdata' on a providee"""
|
||||
cooker.showEnvironment(None, params)
|
||||
showdata.usage = "<providee>"
|
||||
|
||||
def setVar( self, params ):
|
||||
"""Set an outer BitBake environment variable"""
|
||||
var, value = params
|
||||
data.setVar( var, value, cooker.configuration.data )
|
||||
print "OK"
|
||||
setVar.usage = "<variable> <value>"
|
||||
|
||||
def rebuild( self, params ):
|
||||
"""Clean and rebuild a .bb file or a providee"""
|
||||
self.build( params, "clean" )
|
||||
self.build( params, "build" )
|
||||
rebuild.usage = "<providee>"
|
||||
|
||||
def shell( self, params ):
|
||||
"""Execute a shell command and dump the output"""
|
||||
if params != "":
|
||||
print commands.getoutput( " ".join( params ) )
|
||||
shell.usage = "<...>"
|
||||
|
||||
def stage( self, params ):
|
||||
"""Execute 'stage' on a providee"""
|
||||
self.build( params, "populate_staging" )
|
||||
stage.usage = "<providee>"
|
||||
|
||||
def status( self, params ):
|
||||
"""<just for testing>"""
|
||||
print "-" * 78
|
||||
print "building list = '%s'" % cooker.building_list
|
||||
print "build path = '%s'" % cooker.build_path
|
||||
print "consider_msgs_cache = '%s'" % cooker.consider_msgs_cache
|
||||
print "build stats = '%s'" % cooker.stats
|
||||
if last_exception is not None: print "last_exception = '%s'" % repr( last_exception.args )
|
||||
print "memory output contents = '%s'" % self._shell.myout._buffer
|
||||
|
||||
def test( self, params ):
|
||||
"""<just for testing>"""
|
||||
print "testCommand called with '%s'" % params
|
||||
|
||||
def unpack( self, params ):
|
||||
"""Execute 'unpack' on a providee"""
|
||||
self.build( params, "unpack" )
|
||||
unpack.usage = "<providee>"
|
||||
|
||||
def which( self, params ):
|
||||
"""Computes the providers for a given providee"""
|
||||
# Need to use taskData for this information
|
||||
item = params[0]
|
||||
|
||||
self._checkParsed()
|
||||
|
||||
preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
|
||||
if not preferred: preferred = item
|
||||
|
||||
try:
|
||||
lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
|
||||
except KeyError:
|
||||
lv, lf, pv, pf = (None,)*4
|
||||
|
||||
try:
|
||||
providers = cooker.status.providers[item]
|
||||
except KeyError:
|
||||
print "SHELL: ERROR: Nothing provides", preferred
|
||||
else:
|
||||
for provider in providers:
|
||||
if provider == pf: provider = " (***) %s" % provider
|
||||
else: provider = " %s" % provider
|
||||
print provider
|
||||
which.usage = "<providee>"
|
||||
|
||||
##########################################################################
|
||||
# Common helper functions
|
||||
##########################################################################
|
||||
|
||||
def completeFilePath( bbfile ):
|
||||
"""Get the complete bbfile path"""
|
||||
if not cooker.status: return bbfile
|
||||
if not cooker.status.pkg_fn: return bbfile
|
||||
for key in cooker.status.pkg_fn.keys():
|
||||
if key.endswith( bbfile ):
|
||||
return key
|
||||
return bbfile
|
||||
|
||||
def sendToPastebin( desc, content ):
|
||||
"""Send content to http://oe.pastebin.com"""
|
||||
mydata = {}
|
||||
mydata["lang"] = "Plain Text"
|
||||
mydata["desc"] = desc
|
||||
mydata["cvt_tabs"] = "No"
|
||||
mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
|
||||
mydata["text"] = content
|
||||
params = urllib.urlencode( mydata )
|
||||
headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"}
|
||||
|
||||
host = "rafb.net"
|
||||
conn = httplib.HTTPConnection( "%s:80" % host )
|
||||
conn.request("POST", "/paste/paste.php", params, headers )
|
||||
|
||||
response = conn.getresponse()
|
||||
conn.close()
|
||||
|
||||
if response.status == 302:
|
||||
location = response.getheader( "location" ) or "unknown"
|
||||
print "SHELL: Pasted to http://%s%s" % ( host, location )
|
||||
else:
|
||||
print "ERROR: %s %s" % ( response.status, response.reason )
|
||||
|
||||
def completer( text, state ):
|
||||
"""Return a possible readline completion"""
|
||||
debugOut( "completer called with text='%s', state='%d'" % ( text, state ) )
|
||||
|
||||
if state == 0:
|
||||
line = readline.get_line_buffer()
|
||||
if " " in line:
|
||||
line = line.split()
|
||||
# we are in second (or more) argument
|
||||
if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage
|
||||
u = getattr( cmds[line[0]][0], "usage" ).split()[0]
|
||||
if u == "<variable>":
|
||||
allmatches = cooker.configuration.data.keys()
|
||||
elif u == "<bbfile>":
|
||||
if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
|
||||
else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn.keys() ]
|
||||
elif u == "<providee>":
|
||||
if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
|
||||
else: allmatches = cooker.status.providers.iterkeys()
|
||||
else: allmatches = [ "(No tab completion available for this command)" ]
|
||||
else: allmatches = [ "(No tab completion available for this command)" ]
|
||||
else:
|
||||
# we are in first argument
|
||||
allmatches = cmds.iterkeys()
|
||||
|
||||
completer.matches = [ x for x in allmatches if x[:len(text)] == text ]
|
||||
#print "completer.matches = '%s'" % completer.matches
|
||||
if len( completer.matches ) > state:
|
||||
return completer.matches[state]
|
||||
else:
|
||||
return None
|
||||
|
||||
def debugOut( text ):
|
||||
if debug:
|
||||
sys.stderr.write( "( %s )\n" % text )
|
||||
|
||||
def columnize( alist, width = 80 ):
|
||||
"""
|
||||
A word-wrap function that preserves existing line breaks
|
||||
and most spaces in the text. Expects that existing line
|
||||
breaks are posix newlines (\n).
|
||||
"""
|
||||
return reduce(lambda line, word, width=width: '%s%s%s' %
|
||||
(line,
|
||||
' \n'[(len(line[line.rfind('\n')+1:])
|
||||
+ len(word.split('\n',1)[0]
|
||||
) >= width)],
|
||||
word),
|
||||
alist
|
||||
)
|
||||
|
||||
def globfilter( names, pattern ):
|
||||
return fnmatch.filter( names, pattern )
|
||||
|
||||
##########################################################################
|
||||
# Class MemoryOutput
|
||||
##########################################################################
|
||||
|
||||
class MemoryOutput:
|
||||
"""File-like output class buffering the output of the last 10 commands"""
|
||||
def __init__( self, delegate ):
|
||||
self.delegate = delegate
|
||||
self._buffer = []
|
||||
self.text = []
|
||||
self._command = None
|
||||
|
||||
def startCommand( self, command ):
|
||||
self._command = command
|
||||
self.text = []
|
||||
def endCommand( self ):
|
||||
if self._command is not None:
|
||||
if len( self._buffer ) == 10: del self._buffer[0]
|
||||
self._buffer.append( ( self._command, self.text ) )
|
||||
def removeLast( self ):
|
||||
if self._buffer:
|
||||
del self._buffer[ len( self._buffer ) - 1 ]
|
||||
self.text = []
|
||||
self._command = None
|
||||
def lastBuffer( self ):
|
||||
if self._buffer:
|
||||
return self._buffer[ len( self._buffer ) -1 ][1]
|
||||
def bufferedCommands( self ):
|
||||
return [ cmd for cmd, output in self._buffer ]
|
||||
def buffer( self, i ):
|
||||
if i < len( self._buffer ):
|
||||
return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) )
|
||||
else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 )
|
||||
def write( self, text ):
|
||||
if self._command is not None and text != "BB>> ": self.text.append( text )
|
||||
if self.delegate is not None: self.delegate.write( text )
|
||||
def flush( self ):
|
||||
return self.delegate.flush()
|
||||
def fileno( self ):
|
||||
return self.delegate.fileno()
|
||||
def isatty( self ):
|
||||
return self.delegate.isatty()
|
||||
|
||||
##########################################################################
|
||||
# Class BitBakeShell
|
||||
##########################################################################
|
||||
|
||||
class BitBakeShell:
|
||||
|
||||
def __init__( self ):
|
||||
"""Register commands and set up readline"""
|
||||
self.commandQ = Queue.Queue()
|
||||
self.commands = BitBakeShellCommands( self )
|
||||
self.myout = MemoryOutput( sys.stdout )
|
||||
self.historyfilename = os.path.expanduser( "~/.bbsh_history" )
|
||||
self.startupfilename = os.path.expanduser( "~/.bbsh_startup" )
|
||||
|
||||
readline.set_completer( completer )
|
||||
readline.set_completer_delims( " " )
|
||||
readline.parse_and_bind("tab: complete")
|
||||
|
||||
try:
|
||||
readline.read_history_file( self.historyfilename )
|
||||
except IOError:
|
||||
pass # It doesn't exist yet.
|
||||
|
||||
print __credits__
|
||||
|
||||
def cleanup( self ):
|
||||
"""Write readline history and clean up resources"""
|
||||
debugOut( "writing command history" )
|
||||
try:
|
||||
readline.write_history_file( self.historyfilename )
|
||||
except:
|
||||
print "SHELL: Unable to save command history"
|
||||
|
||||
def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
|
||||
"""Register a command"""
|
||||
if usage == "": usage = command
|
||||
if helptext == "": helptext = function.__doc__ or "<not yet documented>"
|
||||
cmds[command] = ( function, numparams, usage, helptext )
|
||||
|
||||
def processCommand( self, command, params ):
|
||||
"""Process a command. Check number of params and print a usage string, if appropriate"""
|
||||
debugOut( "processing command '%s'..." % command )
|
||||
try:
|
||||
function, numparams, usage, helptext = cmds[command]
|
||||
except KeyError:
|
||||
print "SHELL: ERROR: '%s' command is not a valid command." % command
|
||||
self.myout.removeLast()
|
||||
else:
|
||||
if (numparams != -1) and (not len( params ) == numparams):
|
||||
print "Usage: '%s'" % usage
|
||||
return
|
||||
|
||||
result = function( self.commands, params )
|
||||
debugOut( "result was '%s'" % result )
|
||||
|
||||
def processStartupFile( self ):
|
||||
"""Read and execute all commands found in $HOME/.bbsh_startup"""
|
||||
if os.path.exists( self.startupfilename ):
|
||||
startupfile = open( self.startupfilename, "r" )
|
||||
for cmdline in startupfile:
|
||||
debugOut( "processing startup line '%s'" % cmdline )
|
||||
if not cmdline:
|
||||
continue
|
||||
if "|" in cmdline:
|
||||
print "ERROR: '|' in startup file is not allowed. Ignoring line"
|
||||
continue
|
||||
self.commandQ.put( cmdline.strip() )
|
||||
|
||||
def main( self ):
|
||||
"""The main command loop"""
|
||||
while not leave_mainloop:
|
||||
try:
|
||||
if self.commandQ.empty():
|
||||
sys.stdout = self.myout.delegate
|
||||
cmdline = raw_input( "BB>> " )
|
||||
sys.stdout = self.myout
|
||||
else:
|
||||
cmdline = self.commandQ.get()
|
||||
if cmdline:
|
||||
allCommands = cmdline.split( ';' )
|
||||
for command in allCommands:
|
||||
pipecmd = None
|
||||
#
|
||||
# special case for expert mode
|
||||
if command == 'python':
|
||||
sys.stdout = self.myout.delegate
|
||||
self.processCommand( command, "" )
|
||||
sys.stdout = self.myout
|
||||
else:
|
||||
self.myout.startCommand( command )
|
||||
if '|' in command: # disable output
|
||||
command, pipecmd = command.split( '|' )
|
||||
delegate = self.myout.delegate
|
||||
self.myout.delegate = None
|
||||
tokens = shlex.split( command, True )
|
||||
self.processCommand( tokens[0], tokens[1:] or "" )
|
||||
self.myout.endCommand()
|
||||
if pipecmd is not None: # restore output
|
||||
self.myout.delegate = delegate
|
||||
|
||||
pipe = popen2.Popen4( pipecmd )
|
||||
pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) )
|
||||
pipe.tochild.close()
|
||||
sys.stdout.write( pipe.fromchild.read() )
|
||||
#
|
||||
except EOFError:
|
||||
print
|
||||
return
|
||||
except KeyboardInterrupt:
|
||||
print
|
||||
|
||||
##########################################################################
|
||||
# Start function - called from the BitBake command line utility
|
||||
##########################################################################
|
||||
|
||||
def start( aCooker ):
|
||||
global cooker
|
||||
cooker = aCooker
|
||||
bbshell = BitBakeShell()
|
||||
bbshell.processStartupFile()
|
||||
bbshell.main()
|
||||
bbshell.cleanup()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "SHELL: Sorry, this program should only be called by BitBake."
|
||||
595
bitbake-dev/lib/bb/taskdata.py
Normal file
595
bitbake-dev/lib/bb/taskdata.py
Normal file
@@ -0,0 +1,595 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'TaskData' implementation
|
||||
|
||||
Task data collection and handling
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import bb
|
||||
|
||||
class TaskData:
|
||||
"""
|
||||
BitBake Task Data implementation
|
||||
"""
|
||||
def __init__(self, abort = True, tryaltconfigs = False):
|
||||
self.build_names_index = []
|
||||
self.run_names_index = []
|
||||
self.fn_index = []
|
||||
|
||||
self.build_targets = {}
|
||||
self.run_targets = {}
|
||||
|
||||
self.external_targets = []
|
||||
|
||||
self.tasks_fnid = []
|
||||
self.tasks_name = []
|
||||
self.tasks_tdepends = []
|
||||
self.tasks_idepends = []
|
||||
# Cache to speed up task ID lookups
|
||||
self.tasks_lookup = {}
|
||||
|
||||
self.depids = {}
|
||||
self.rdepids = {}
|
||||
|
||||
self.consider_msgs_cache = []
|
||||
|
||||
self.failed_deps = []
|
||||
self.failed_rdeps = []
|
||||
self.failed_fnids = []
|
||||
|
||||
self.abort = abort
|
||||
self.tryaltconfigs = tryaltconfigs
|
||||
|
||||
def getbuild_id(self, name):
|
||||
"""
|
||||
Return an ID number for the build target name.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.build_names_index:
|
||||
self.build_names_index.append(name)
|
||||
return len(self.build_names_index) - 1
|
||||
|
||||
return self.build_names_index.index(name)
|
||||
|
||||
def getrun_id(self, name):
|
||||
"""
|
||||
Return an ID number for the run target name.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.run_names_index:
|
||||
self.run_names_index.append(name)
|
||||
return len(self.run_names_index) - 1
|
||||
|
||||
return self.run_names_index.index(name)
|
||||
|
||||
def getfn_id(self, name):
|
||||
"""
|
||||
Return an ID number for the filename.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.fn_index:
|
||||
self.fn_index.append(name)
|
||||
return len(self.fn_index) - 1
|
||||
|
||||
return self.fn_index.index(name)
|
||||
|
||||
def gettask_ids(self, fnid):
|
||||
"""
|
||||
Return an array of the ID numbers matching a given fnid.
|
||||
"""
|
||||
ids = []
|
||||
if fnid in self.tasks_lookup:
|
||||
for task in self.tasks_lookup[fnid]:
|
||||
ids.append(self.tasks_lookup[fnid][task])
|
||||
return ids
|
||||
|
||||
def gettask_id(self, fn, task, create = True):
|
||||
"""
|
||||
Return an ID number for the task matching fn and task.
|
||||
If it doesn't exist, create one by default.
|
||||
Optionally return None instead.
|
||||
"""
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
if fnid in self.tasks_lookup:
|
||||
if task in self.tasks_lookup[fnid]:
|
||||
return self.tasks_lookup[fnid][task]
|
||||
|
||||
if not create:
|
||||
return None
|
||||
|
||||
self.tasks_name.append(task)
|
||||
self.tasks_fnid.append(fnid)
|
||||
self.tasks_tdepends.append([])
|
||||
self.tasks_idepends.append([])
|
||||
|
||||
listid = len(self.tasks_name) - 1
|
||||
|
||||
if fnid not in self.tasks_lookup:
|
||||
self.tasks_lookup[fnid] = {}
|
||||
self.tasks_lookup[fnid][task] = listid
|
||||
|
||||
return listid
|
||||
|
||||
def add_tasks(self, fn, dataCache):
|
||||
"""
|
||||
Add tasks for a given fn to the database
|
||||
"""
|
||||
|
||||
task_deps = dataCache.task_deps[fn]
|
||||
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
if fnid in self.failed_fnids:
|
||||
bb.msg.fatal(bb.msg.domain.TaskData, "Trying to re-add a failed file? Something is broken...")
|
||||
|
||||
# Check if we've already seen this fn
|
||||
if fnid in self.tasks_fnid:
|
||||
return
|
||||
|
||||
for task in task_deps['tasks']:
|
||||
|
||||
# Work out task dependencies
|
||||
parentids = []
|
||||
for dep in task_deps['parents'][task]:
|
||||
parentid = self.gettask_id(fn, dep)
|
||||
parentids.append(parentid)
|
||||
taskid = self.gettask_id(fn, task)
|
||||
self.tasks_tdepends[taskid].extend(parentids)
|
||||
|
||||
# Touch all intertask dependencies
|
||||
if 'depends' in task_deps and task in task_deps['depends']:
|
||||
ids = []
|
||||
for dep in task_deps['depends'][task].split():
|
||||
if dep:
|
||||
ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_idepends[taskid].extend(ids)
|
||||
|
||||
# Work out build dependencies
|
||||
if not fnid in self.depids:
|
||||
dependids = {}
|
||||
for depend in dataCache.deps[fn]:
|
||||
bb.msg.debug(2, bb.msg.domain.TaskData, "Added dependency %s for %s" % (depend, fn))
|
||||
dependids[self.getbuild_id(depend)] = None
|
||||
self.depids[fnid] = dependids.keys()
|
||||
|
||||
# Work out runtime dependencies
|
||||
if not fnid in self.rdepids:
|
||||
rdependids = {}
|
||||
rdepends = dataCache.rundeps[fn]
|
||||
rrecs = dataCache.runrecs[fn]
|
||||
for package in rdepends:
|
||||
for rdepend in bb.utils.explode_deps(rdepends[package]):
|
||||
bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime dependency %s for %s" % (rdepend, fn))
|
||||
rdependids[self.getrun_id(rdepend)] = None
|
||||
for package in rrecs:
|
||||
for rdepend in bb.utils.explode_deps(rrecs[package]):
|
||||
bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime recommendation %s for %s" % (rdepend, fn))
|
||||
rdependids[self.getrun_id(rdepend)] = None
|
||||
self.rdepids[fnid] = rdependids.keys()
|
||||
|
||||
for dep in self.depids[fnid]:
|
||||
if dep in self.failed_deps:
|
||||
self.fail_fnid(fnid)
|
||||
return
|
||||
for dep in self.rdepids[fnid]:
|
||||
if dep in self.failed_rdeps:
|
||||
self.fail_fnid(fnid)
|
||||
return
|
||||
|
||||
def have_build_target(self, target):
|
||||
"""
|
||||
Have we a build target matching this name?
|
||||
"""
|
||||
targetid = self.getbuild_id(target)
|
||||
|
||||
if targetid in self.build_targets:
|
||||
return True
|
||||
return False
|
||||
|
||||
def have_runtime_target(self, target):
|
||||
"""
|
||||
Have we a runtime target matching this name?
|
||||
"""
|
||||
targetid = self.getrun_id(target)
|
||||
|
||||
if targetid in self.run_targets:
|
||||
return True
|
||||
return False
|
||||
|
||||
def add_build_target(self, fn, item):
|
||||
"""
|
||||
Add a build target.
|
||||
If already present, append the provider fn to the list
|
||||
"""
|
||||
targetid = self.getbuild_id(item)
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
if targetid in self.build_targets:
|
||||
if fnid in self.build_targets[targetid]:
|
||||
return
|
||||
self.build_targets[targetid].append(fnid)
|
||||
return
|
||||
self.build_targets[targetid] = [fnid]
|
||||
|
||||
def add_runtime_target(self, fn, item):
|
||||
"""
|
||||
Add a runtime target.
|
||||
If already present, append the provider fn to the list
|
||||
"""
|
||||
targetid = self.getrun_id(item)
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
if targetid in self.run_targets:
|
||||
if fnid in self.run_targets[targetid]:
|
||||
return
|
||||
self.run_targets[targetid].append(fnid)
|
||||
return
|
||||
self.run_targets[targetid] = [fnid]
|
||||
|
||||
def mark_external_target(self, item):
|
||||
"""
|
||||
Mark a build target as being externally requested
|
||||
"""
|
||||
targetid = self.getbuild_id(item)
|
||||
|
||||
if targetid not in self.external_targets:
|
||||
self.external_targets.append(targetid)
|
||||
|
||||
def get_unresolved_build_targets(self, dataCache):
|
||||
"""
|
||||
Return a list of build targets who's providers
|
||||
are unknown.
|
||||
"""
|
||||
unresolved = []
|
||||
for target in self.build_names_index:
|
||||
if target in dataCache.ignored_dependencies:
|
||||
continue
|
||||
if self.build_names_index.index(target) in self.failed_deps:
|
||||
continue
|
||||
if not self.have_build_target(target):
|
||||
unresolved.append(target)
|
||||
return unresolved
|
||||
|
||||
def get_unresolved_run_targets(self, dataCache):
|
||||
"""
|
||||
Return a list of runtime targets who's providers
|
||||
are unknown.
|
||||
"""
|
||||
unresolved = []
|
||||
for target in self.run_names_index:
|
||||
if target in dataCache.ignored_dependencies:
|
||||
continue
|
||||
if self.run_names_index.index(target) in self.failed_rdeps:
|
||||
continue
|
||||
if not self.have_runtime_target(target):
|
||||
unresolved.append(target)
|
||||
return unresolved
|
||||
|
||||
def get_provider(self, item):
|
||||
"""
|
||||
Return a list of providers of item
|
||||
"""
|
||||
targetid = self.getbuild_id(item)
|
||||
|
||||
return self.build_targets[targetid]
|
||||
|
||||
def get_dependees(self, itemid):
|
||||
"""
|
||||
Return a list of targets which depend on item
|
||||
"""
|
||||
dependees = []
|
||||
for fnid in self.depids:
|
||||
if itemid in self.depids[fnid]:
|
||||
dependees.append(fnid)
|
||||
return dependees
|
||||
|
||||
def get_dependees_str(self, item):
|
||||
"""
|
||||
Return a list of targets which depend on item as a user readable string
|
||||
"""
|
||||
itemid = self.getbuild_id(item)
|
||||
dependees = []
|
||||
for fnid in self.depids:
|
||||
if itemid in self.depids[fnid]:
|
||||
dependees.append(self.fn_index[fnid])
|
||||
return dependees
|
||||
|
||||
def get_rdependees(self, itemid):
|
||||
"""
|
||||
Return a list of targets which depend on runtime item
|
||||
"""
|
||||
dependees = []
|
||||
for fnid in self.rdepids:
|
||||
if itemid in self.rdepids[fnid]:
|
||||
dependees.append(fnid)
|
||||
return dependees
|
||||
|
||||
def get_rdependees_str(self, item):
|
||||
"""
|
||||
Return a list of targets which depend on runtime item as a user readable string
|
||||
"""
|
||||
itemid = self.getrun_id(item)
|
||||
dependees = []
|
||||
for fnid in self.rdepids:
|
||||
if itemid in self.rdepids[fnid]:
|
||||
dependees.append(self.fn_index[fnid])
|
||||
return dependees
|
||||
|
||||
def add_provider(self, cfgData, dataCache, item):
|
||||
try:
|
||||
self.add_provider_internal(cfgData, dataCache, item)
|
||||
except bb.providers.NoProvider:
|
||||
if self.abort:
|
||||
if self.get_rdependees_str(item):
|
||||
bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (item, self.get_dependees_str(item)))
|
||||
else:
|
||||
bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (item))
|
||||
raise
|
||||
targetid = self.getbuild_id(item)
|
||||
self.remove_buildtarget(targetid)
|
||||
|
||||
self.mark_external_target(item)
|
||||
|
||||
def add_provider_internal(self, cfgData, dataCache, item):
|
||||
"""
|
||||
Add the providers of item to the task data
|
||||
Mark entries were specifically added externally as against dependencies
|
||||
added internally during dependency resolution
|
||||
"""
|
||||
|
||||
if item in dataCache.ignored_dependencies:
|
||||
return
|
||||
|
||||
if not item in dataCache.providers:
|
||||
if self.get_rdependees_str(item):
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (item, self.get_dependees_str(item)))
|
||||
else:
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (item))
|
||||
bb.event.fire(bb.event.NoProvider(item, cfgData))
|
||||
raise bb.providers.NoProvider(item)
|
||||
|
||||
if self.have_build_target(item):
|
||||
return
|
||||
|
||||
all_p = dataCache.providers[item]
|
||||
|
||||
eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
|
||||
eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
|
||||
|
||||
if not eligible:
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "No buildable provider PROVIDES '%s' but '%s' DEPENDS on or otherwise requires it. Enable debugging and see earlier logs to find unbuildable providers." % (item, self.get_dependees_str(item)))
|
||||
bb.event.fire(bb.event.NoProvider(item, cfgData))
|
||||
raise bb.providers.NoProvider(item)
|
||||
|
||||
if len(eligible) > 1 and foundUnique == False:
|
||||
if item not in self.consider_msgs_cache:
|
||||
providers_list = []
|
||||
for fn in eligible:
|
||||
providers_list.append(dataCache.pkg_fn[fn])
|
||||
bb.msg.note(1, bb.msg.domain.Provider, "multiple providers are available for %s (%s);" % (item, ", ".join(providers_list)))
|
||||
bb.msg.note(1, bb.msg.domain.Provider, "consider defining PREFERRED_PROVIDER_%s" % item)
|
||||
bb.event.fire(bb.event.MultipleProviders(item, providers_list, cfgData))
|
||||
self.consider_msgs_cache.append(item)
|
||||
|
||||
for fn in eligible:
|
||||
fnid = self.getfn_id(fn)
|
||||
if fnid in self.failed_fnids:
|
||||
continue
|
||||
bb.msg.debug(2, bb.msg.domain.Provider, "adding %s to satisfy %s" % (fn, item))
|
||||
self.add_build_target(fn, item)
|
||||
self.add_tasks(fn, dataCache)
|
||||
|
||||
|
||||
#item = dataCache.pkg_fn[fn]
|
||||
|
||||
def add_rprovider(self, cfgData, dataCache, item):
|
||||
"""
|
||||
Add the runtime providers of item to the task data
|
||||
(takes item names from RDEPENDS/PACKAGES namespace)
|
||||
"""
|
||||
|
||||
if item in dataCache.ignored_dependencies:
|
||||
return
|
||||
|
||||
if self.have_runtime_target(item):
|
||||
return
|
||||
|
||||
all_p = bb.providers.getRuntimeProviders(dataCache, item)
|
||||
|
||||
if not all_p:
|
||||
bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables" % (self.get_rdependees_str(item), item))
|
||||
bb.event.fire(bb.event.NoProvider(item, cfgData, runtime=True))
|
||||
raise bb.providers.NoRProvider(item)
|
||||
|
||||
eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
|
||||
eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
|
||||
|
||||
if not eligible:
|
||||
bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables of any buildable targets.\nEnable debugging and see earlier logs to find unbuildable targets." % (self.get_rdependees_str(item), item))
|
||||
bb.event.fire(bb.event.NoProvider(item, cfgData, runtime=True))
|
||||
raise bb.providers.NoRProvider(item)
|
||||
|
||||
if len(eligible) > 1 and numberPreferred == 0:
|
||||
if item not in self.consider_msgs_cache:
|
||||
providers_list = []
|
||||
for fn in eligible:
|
||||
providers_list.append(dataCache.pkg_fn[fn])
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (%s);" % (item, ", ".join(providers_list)))
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "consider defining a PREFERRED_PROVIDER entry to match runtime %s" % item)
|
||||
bb.event.fire(bb.event.MultipleProviders(item,providers_list, cfgData, runtime=True))
|
||||
self.consider_msgs_cache.append(item)
|
||||
|
||||
if numberPreferred > 1:
|
||||
if item not in self.consider_msgs_cache:
|
||||
providers_list = []
|
||||
for fn in eligible:
|
||||
providers_list.append(dataCache.pkg_fn[fn])
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (top %s entries preferred) (%s);" % (item, numberPreferred, ", ".join(providers_list)))
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "consider defining only one PREFERRED_PROVIDER entry to match runtime %s" % item)
|
||||
bb.event.fire(bb.event.MultipleProviders(item,providers_list, cfgData, runtime=True))
|
||||
self.consider_msgs_cache.append(item)
|
||||
|
||||
# run through the list until we find one that we can build
|
||||
for fn in eligible:
|
||||
fnid = self.getfn_id(fn)
|
||||
if fnid in self.failed_fnids:
|
||||
continue
|
||||
bb.msg.debug(2, bb.msg.domain.Provider, "adding '%s' to satisfy runtime '%s'" % (fn, item))
|
||||
self.add_runtime_target(fn, item)
|
||||
self.add_tasks(fn, dataCache)
|
||||
|
||||
def fail_fnid(self, fnid, missing_list = []):
|
||||
"""
|
||||
Mark a file as failed (unbuildable)
|
||||
Remove any references from build and runtime provider lists
|
||||
|
||||
missing_list, A list of missing requirements for this target
|
||||
"""
|
||||
if fnid in self.failed_fnids:
|
||||
return
|
||||
bb.msg.debug(1, bb.msg.domain.Provider, "File '%s' is unbuildable, removing..." % self.fn_index[fnid])
|
||||
self.failed_fnids.append(fnid)
|
||||
for target in self.build_targets:
|
||||
if fnid in self.build_targets[target]:
|
||||
self.build_targets[target].remove(fnid)
|
||||
if len(self.build_targets[target]) == 0:
|
||||
self.remove_buildtarget(target, missing_list)
|
||||
for target in self.run_targets:
|
||||
if fnid in self.run_targets[target]:
|
||||
self.run_targets[target].remove(fnid)
|
||||
if len(self.run_targets[target]) == 0:
|
||||
self.remove_runtarget(target, missing_list)
|
||||
|
||||
def remove_buildtarget(self, targetid, missing_list = []):
|
||||
"""
|
||||
Mark a build target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
"""
|
||||
if not missing_list:
|
||||
missing_list = [self.build_names_index[targetid]]
|
||||
else:
|
||||
missing_list = [self.build_names_index[targetid]] + missing_list
|
||||
bb.msg.note(2, bb.msg.domain.Provider, "Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list))
|
||||
self.failed_deps.append(targetid)
|
||||
dependees = self.get_dependees(targetid)
|
||||
for fnid in dependees:
|
||||
self.fail_fnid(fnid, missing_list)
|
||||
for taskid in range(len(self.tasks_idepends)):
|
||||
idepends = self.tasks_idepends[taskid]
|
||||
for (idependid, idependtask) in idepends:
|
||||
if idependid == targetid:
|
||||
self.fail_fnid(self.tasks_fnid[taskid], missing_list)
|
||||
|
||||
if self.abort and targetid in self.external_targets:
|
||||
bb.msg.error(bb.msg.domain.Provider, "Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list))
|
||||
raise bb.providers.NoProvider
|
||||
|
||||
def remove_runtarget(self, targetid, missing_list = []):
|
||||
"""
|
||||
Mark a run target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
"""
|
||||
if not missing_list:
|
||||
missing_list = [self.run_names_index[targetid]]
|
||||
else:
|
||||
missing_list = [self.run_names_index[targetid]] + missing_list
|
||||
|
||||
bb.msg.note(1, bb.msg.domain.Provider, "Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.run_names_index[targetid], missing_list))
|
||||
self.failed_rdeps.append(targetid)
|
||||
dependees = self.get_rdependees(targetid)
|
||||
for fnid in dependees:
|
||||
self.fail_fnid(fnid, missing_list)
|
||||
|
||||
def add_unresolved(self, cfgData, dataCache):
|
||||
"""
|
||||
Resolve all unresolved build and runtime targets
|
||||
"""
|
||||
bb.msg.note(1, bb.msg.domain.TaskData, "Resolving any missing task queue dependencies")
|
||||
while 1:
|
||||
added = 0
|
||||
for target in self.get_unresolved_build_targets(dataCache):
|
||||
try:
|
||||
self.add_provider_internal(cfgData, dataCache, target)
|
||||
added = added + 1
|
||||
except bb.providers.NoProvider:
|
||||
targetid = self.getbuild_id(target)
|
||||
if self.abort and targetid in self.external_targets:
|
||||
if self.get_rdependees_str(target):
|
||||
bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (target, self.get_dependees_str(target)))
|
||||
else:
|
||||
bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (target))
|
||||
raise
|
||||
self.remove_buildtarget(targetid)
|
||||
for target in self.get_unresolved_run_targets(dataCache):
|
||||
try:
|
||||
self.add_rprovider(cfgData, dataCache, target)
|
||||
added = added + 1
|
||||
except bb.providers.NoRProvider:
|
||||
self.remove_runtarget(self.getrun_id(target))
|
||||
bb.msg.debug(1, bb.msg.domain.TaskData, "Resolved " + str(added) + " extra dependecies")
|
||||
if added == 0:
|
||||
break
|
||||
# self.dump_data()
|
||||
|
||||
def dump_data(self):
|
||||
"""
|
||||
Dump some debug information on the internal data structures
|
||||
"""
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:")
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index))
|
||||
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:")
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index))
|
||||
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:")
|
||||
for buildid in range(len(self.build_names_index)):
|
||||
target = self.build_names_index[buildid]
|
||||
targets = "None"
|
||||
if buildid in self.build_targets:
|
||||
targets = self.build_targets[buildid]
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (buildid, target, targets))
|
||||
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:")
|
||||
for runid in range(len(self.run_names_index)):
|
||||
target = self.run_names_index[runid]
|
||||
targets = "None"
|
||||
if runid in self.run_targets:
|
||||
targets = self.run_targets[runid]
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (runid, target, targets))
|
||||
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:")
|
||||
for task in range(len(self.tasks_name)):
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % (
|
||||
task,
|
||||
self.fn_index[self.tasks_fnid[task]],
|
||||
self.tasks_name[task],
|
||||
self.tasks_tdepends[task]))
|
||||
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, "dependency ids (per fn):")
|
||||
for fnid in self.depids:
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.depids[fnid]))
|
||||
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, "runtime dependency ids (per fn):")
|
||||
for fnid in self.rdepids:
|
||||
bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid]))
|
||||
|
||||
|
||||
18
bitbake-dev/lib/bb/ui/__init__.py
Normal file
18
bitbake-dev/lib/bb/ui/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
#
|
||||
# BitBake UI Implementation
|
||||
#
|
||||
# Copyright (C) 2006-2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
18
bitbake-dev/lib/bb/ui/crumbs/__init__.py
Normal file
18
bitbake-dev/lib/bb/ui/crumbs/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
#
|
||||
# BitBake UI Implementation
|
||||
#
|
||||
# Copyright (C) 2006-2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
457
bitbake-dev/lib/bb/ui/crumbs/buildmanager.py
Normal file
457
bitbake-dev/lib/bb/ui/crumbs/buildmanager.py
Normal file
@@ -0,0 +1,457 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2008 Intel Corporation
|
||||
#
|
||||
# Authored by Rob Bradford <rob@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import threading
|
||||
import os
|
||||
import datetime
|
||||
import time
|
||||
|
||||
class BuildConfiguration:
|
||||
""" Represents a potential *or* historic *or* concrete build. It
|
||||
encompasses all the things that we need to tell bitbake to do to make it
|
||||
build what we want it to build.
|
||||
|
||||
It also stored the metadata URL and the set of possible machines (and the
|
||||
distros / images / uris for these. Apart from the metdata URL these are
|
||||
not serialised to file (since they may be transient). In some ways this
|
||||
functionality might be shifted to the loader class."""
|
||||
|
||||
def __init__ (self):
|
||||
self.metadata_url = None
|
||||
|
||||
# Tuple of (distros, image, urls)
|
||||
self.machine_options = {}
|
||||
|
||||
self.machine = None
|
||||
self.distro = None
|
||||
self.image = None
|
||||
self.urls = []
|
||||
self.extra_urls = []
|
||||
self.extra_pkgs = []
|
||||
|
||||
def get_machines_model (self):
|
||||
model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
for machine in self.machine_options.keys():
|
||||
model.append ([machine])
|
||||
|
||||
return model
|
||||
|
||||
def get_distro_and_images_models (self, machine):
|
||||
distro_model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
|
||||
for distro in self.machine_options[machine][0]:
|
||||
distro_model.append ([distro])
|
||||
|
||||
image_model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
|
||||
for image in self.machine_options[machine][1]:
|
||||
image_model.append ([image])
|
||||
|
||||
return (distro_model, image_model)
|
||||
|
||||
def get_repos (self):
|
||||
self.urls = self.machine_options[self.machine][2]
|
||||
return self.urls
|
||||
|
||||
# It might be a lot lot better if we stored these in like, bitbake conf
|
||||
# file format.
|
||||
@staticmethod
|
||||
def load_from_file (filename):
|
||||
f = open (filename, "r")
|
||||
|
||||
conf = BuildConfiguration()
|
||||
for line in f.readlines():
|
||||
data = line.split (";")[1]
|
||||
if (line.startswith ("metadata-url;")):
|
||||
conf.metadata_url = data.strip()
|
||||
continue
|
||||
if (line.startswith ("url;")):
|
||||
conf.urls += [data.strip()]
|
||||
continue
|
||||
if (line.startswith ("extra-url;")):
|
||||
conf.extra_urls += [data.strip()]
|
||||
continue
|
||||
if (line.startswith ("machine;")):
|
||||
conf.machine = data.strip()
|
||||
continue
|
||||
if (line.startswith ("distribution;")):
|
||||
conf.distro = data.strip()
|
||||
continue
|
||||
if (line.startswith ("image;")):
|
||||
conf.image = data.strip()
|
||||
continue
|
||||
|
||||
f.close ()
|
||||
return conf
|
||||
|
||||
# Serialise to a file. This is part of the build process and we use this
|
||||
# to be able to repeat a given build (using the same set of parameters)
|
||||
# but also so that we can include the details of the image / machine /
|
||||
# distro in the build manager tree view.
|
||||
def write_to_file (self, filename):
|
||||
f = open (filename, "w")
|
||||
|
||||
lines = []
|
||||
|
||||
if (self.metadata_url):
|
||||
lines += ["metadata-url;%s\n" % (self.metadata_url)]
|
||||
|
||||
for url in self.urls:
|
||||
lines += ["url;%s\n" % (url)]
|
||||
|
||||
for url in self.extra_urls:
|
||||
lines += ["extra-url;%s\n" % (url)]
|
||||
|
||||
if (self.machine):
|
||||
lines += ["machine;%s\n" % (self.machine)]
|
||||
|
||||
if (self.distro):
|
||||
lines += ["distribution;%s\n" % (self.distro)]
|
||||
|
||||
if (self.image):
|
||||
lines += ["image;%s\n" % (self.image)]
|
||||
|
||||
f.writelines (lines)
|
||||
f.close ()
|
||||
|
||||
class BuildResult(gobject.GObject):
|
||||
""" Represents an historic build. Perhaps not successful. But it includes
|
||||
things such as the files that are in the directory (the output from the
|
||||
build) as well as a deserialised BuildConfiguration file that is stored in
|
||||
".conf" in the directory for the build.
|
||||
|
||||
This is GObject so that it can be included in the TreeStore."""
|
||||
|
||||
(STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \
|
||||
(0, 1, 2)
|
||||
|
||||
def __init__ (self, parent, identifier):
|
||||
gobject.GObject.__init__ (self)
|
||||
self.date = None
|
||||
|
||||
self.files = []
|
||||
self.status = None
|
||||
self.identifier = identifier
|
||||
self.path = os.path.join (parent, identifier)
|
||||
|
||||
# Extract the date, since the directory name is of the
|
||||
# format build-<year><month><day>-<ordinal> we can easily
|
||||
# pull it out.
|
||||
# TODO: Better to stat a file?
|
||||
(_ , date, revision) = identifier.split ("-")
|
||||
print date
|
||||
|
||||
year = int (date[0:4])
|
||||
month = int (date[4:6])
|
||||
day = int (date[6:8])
|
||||
|
||||
self.date = datetime.date (year, month, day)
|
||||
|
||||
self.conf = None
|
||||
|
||||
# By default builds are STATE_FAILED unless we find a "complete" file
|
||||
# in which case they are STATE_COMPLETE
|
||||
self.state = BuildResult.STATE_FAILED
|
||||
for file in os.listdir (self.path):
|
||||
if (file.startswith (".conf")):
|
||||
conffile = os.path.join (self.path, file)
|
||||
self.conf = BuildConfiguration.load_from_file (conffile)
|
||||
elif (file.startswith ("complete")):
|
||||
self.state = BuildResult.STATE_COMPLETE
|
||||
else:
|
||||
self.add_file (file)
|
||||
|
||||
def add_file (self, file):
|
||||
# Just add the file for now. Don't care about the type.
|
||||
self.files += [(file, None)]
|
||||
|
||||
class BuildManagerModel (gtk.TreeStore):
|
||||
""" Model for the BuildManagerTreeView. This derives from gtk.TreeStore
|
||||
but it abstracts nicely what the columns mean and the setup of the columns
|
||||
in the model. """
|
||||
|
||||
(COL_IDENT, COL_DESC, COL_MACHINE, COL_DISTRO, COL_BUILD_RESULT, COL_DATE, COL_STATE) = \
|
||||
(0, 1, 2, 3, 4, 5, 6)
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_OBJECT,
|
||||
gobject.TYPE_INT64,
|
||||
gobject.TYPE_INT)
|
||||
|
||||
class BuildManager (gobject.GObject):
|
||||
""" This class manages the historic builds that have been found in the
|
||||
"results" directory but is also used for starting a new build."""
|
||||
|
||||
__gsignals__ = {
|
||||
'population-finished' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'populate-error' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
())
|
||||
}
|
||||
|
||||
def update_build_result (self, result, iter):
|
||||
# Convert the date into something we can sort by.
|
||||
date = long (time.mktime (result.date.timetuple()))
|
||||
|
||||
# Add a top level entry for the build
|
||||
|
||||
self.model.set (iter,
|
||||
BuildManagerModel.COL_IDENT, result.identifier,
|
||||
BuildManagerModel.COL_DESC, result.conf.image,
|
||||
BuildManagerModel.COL_MACHINE, result.conf.machine,
|
||||
BuildManagerModel.COL_DISTRO, result.conf.distro,
|
||||
BuildManagerModel.COL_BUILD_RESULT, result,
|
||||
BuildManagerModel.COL_DATE, date,
|
||||
BuildManagerModel.COL_STATE, result.state)
|
||||
|
||||
# And then we use the files in the directory as the children for the
|
||||
# top level iter.
|
||||
for file in result.files:
|
||||
self.model.append (iter, (None, file[0], None, None, None, date, -1))
|
||||
|
||||
# This function is called as an idle by the BuildManagerPopulaterThread
|
||||
def add_build_result (self, result):
|
||||
gtk.gdk.threads_enter()
|
||||
self.known_builds += [result]
|
||||
|
||||
self.update_build_result (result, self.model.append (None))
|
||||
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
def notify_build_finished (self):
|
||||
# This is a bit of a hack. If we have a running build running then we
|
||||
# will have a row in the model in STATE_ONGOING. Find it and make it
|
||||
# as if it was a proper historic build (well, it is completed now....)
|
||||
|
||||
# We need to use the iters here rather than the Python iterator
|
||||
# interface to the model since we need to pass it into
|
||||
# update_build_result
|
||||
|
||||
iter = self.model.get_iter_first()
|
||||
|
||||
while (iter):
|
||||
(ident, state) = self.model.get(iter,
|
||||
BuildManagerModel.COL_IDENT,
|
||||
BuildManagerModel.COL_STATE)
|
||||
|
||||
if state == BuildResult.STATE_ONGOING:
|
||||
result = BuildResult (self.results_directory, ident)
|
||||
self.update_build_result (result, iter)
|
||||
iter = self.model.iter_next(iter)
|
||||
|
||||
def notify_build_succeeded (self):
|
||||
# Write the "complete" file so that when we create the BuildResult
|
||||
# object we put into the model
|
||||
|
||||
complete_file_path = os.path.join (self.cur_build_directory, "complete")
|
||||
f = file (complete_file_path, "w")
|
||||
f.close()
|
||||
self.notify_build_finished()
|
||||
|
||||
def notify_build_failed (self):
|
||||
# Without a "complete" file then this will mark the build as failed:
|
||||
self.notify_build_finished()
|
||||
|
||||
# This function is called as an idle
|
||||
def emit_population_finished_signal (self):
|
||||
gtk.gdk.threads_enter()
|
||||
self.emit ("population-finished")
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
class BuildManagerPopulaterThread (threading.Thread):
|
||||
def __init__ (self, manager, directory):
|
||||
threading.Thread.__init__ (self)
|
||||
self.manager = manager
|
||||
self.directory = directory
|
||||
|
||||
def run (self):
|
||||
# For each of the "build-<...>" directories ..
|
||||
|
||||
if os.path.exists (self.directory):
|
||||
for directory in os.listdir (self.directory):
|
||||
|
||||
if not directory.startswith ("build-"):
|
||||
continue
|
||||
|
||||
build_result = BuildResult (self.directory, directory)
|
||||
self.manager.add_build_result (build_result)
|
||||
|
||||
gobject.idle_add (BuildManager.emit_population_finished_signal,
|
||||
self.manager)
|
||||
|
||||
def __init__ (self, server, results_directory):
|
||||
gobject.GObject.__init__ (self)
|
||||
|
||||
# The builds that we've found from walking the result directory
|
||||
self.known_builds = []
|
||||
|
||||
# Save out the bitbake server, we need this for issuing commands to
|
||||
# the cooker:
|
||||
self.server = server
|
||||
|
||||
# The TreeStore that we use
|
||||
self.model = BuildManagerModel ()
|
||||
|
||||
# The results directory is where we create (and look for) the
|
||||
# build-<xyz>-<n> directories. We need to populate ourselves from
|
||||
# directory
|
||||
self.results_directory = results_directory
|
||||
self.populate_from_directory (self.results_directory)
|
||||
|
||||
def populate_from_directory (self, directory):
|
||||
thread = BuildManager.BuildManagerPopulaterThread (self, directory)
|
||||
thread.start()
|
||||
|
||||
# Come up with the name for the next build ident by combining "build-"
|
||||
# with the date formatted as yyyymmdd and then an ordinal. We do this by
|
||||
# an optimistic algorithm incrementing the ordinal if we find that it
|
||||
# already exists.
|
||||
def get_next_build_ident (self):
|
||||
today = datetime.date.today ()
|
||||
datestr = str (today.year) + str (today.month) + str (today.day)
|
||||
|
||||
revision = 0
|
||||
test_name = "build-%s-%d" % (datestr, revision)
|
||||
test_path = os.path.join (self.results_directory, test_name)
|
||||
|
||||
while (os.path.exists (test_path)):
|
||||
revision += 1
|
||||
test_name = "build-%s-%d" % (datestr, revision)
|
||||
test_path = os.path.join (self.results_directory, test_name)
|
||||
|
||||
return test_name
|
||||
|
||||
# Take a BuildConfiguration and then try and build it based on the
|
||||
# parameters of that configuration. S
|
||||
def do_build (self, conf):
|
||||
server = self.server
|
||||
|
||||
# Work out the build directory. Note we actually create the
|
||||
# directories here since we need to write the ".conf" file. Otherwise
|
||||
# we could have relied on bitbake's builder thread to actually make
|
||||
# the directories as it proceeds with the build.
|
||||
ident = self.get_next_build_ident ()
|
||||
build_directory = os.path.join (self.results_directory,
|
||||
ident)
|
||||
self.cur_build_directory = build_directory
|
||||
os.makedirs (build_directory)
|
||||
|
||||
conffile = os.path.join (build_directory, ".conf")
|
||||
conf.write_to_file (conffile)
|
||||
|
||||
# Add a row to the model representing this ongoing build. It's kinda a
|
||||
# fake entry. If this build completes or fails then this gets updated
|
||||
# with the real stuff like the historic builds
|
||||
date = long (time.time())
|
||||
self.model.append (None, (ident, conf.image, conf.machine, conf.distro,
|
||||
None, date, BuildResult.STATE_ONGOING))
|
||||
try:
|
||||
server.runCommand(["setVariable", "BUILD_IMAGES_FROM_FEEDS", 1])
|
||||
server.runCommand(["setVariable", "MACHINE", conf.machine])
|
||||
server.runCommand(["setVariable", "DISTRO", conf.distro])
|
||||
server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_ipk"])
|
||||
server.runCommand(["setVariable", "BBFILES", \
|
||||
"""${OEROOT}/meta/packages/*/*.bb ${OEROOT}/meta-moblin/packages/*/*.bb"""])
|
||||
server.runCommand(["setVariable", "TMPDIR", "${OEROOT}/build/tmp"])
|
||||
server.runCommand(["setVariable", "IPK_FEED_URIS", \
|
||||
" ".join(conf.get_repos())])
|
||||
server.runCommand(["setVariable", "DEPLOY_DIR_IMAGE",
|
||||
build_directory])
|
||||
server.runCommand(["buildTargets", [conf.image], "rootfs"])
|
||||
|
||||
except Exception, e:
|
||||
print e
|
||||
|
||||
class BuildManagerTreeView (gtk.TreeView):
|
||||
""" The tree view for the build manager. This shows the historic builds
|
||||
and so forth. """
|
||||
|
||||
# We use this function to control what goes in the cell since we store
|
||||
# the date in the model as seconds since the epoch (for sorting) and so we
|
||||
# need to make it human readable.
|
||||
def date_format_custom_cell_data_func (self, col, cell, model, iter):
|
||||
date = model.get (iter, BuildManagerModel.COL_DATE)[0]
|
||||
datestr = time.strftime("%A %d %B %Y", time.localtime(date))
|
||||
cell.set_property ("text", datestr)
|
||||
|
||||
# This format function controls what goes in the cell. We use this to map
|
||||
# the integer state to a string and also to colourise the text
|
||||
def state_format_custom_cell_data_fun (self, col, cell, model, iter):
|
||||
state = model.get (iter, BuildManagerModel.COL_STATE)[0]
|
||||
|
||||
if (state == BuildResult.STATE_ONGOING):
|
||||
cell.set_property ("text", "Active")
|
||||
cell.set_property ("foreground", "#000000")
|
||||
elif (state == BuildResult.STATE_FAILED):
|
||||
cell.set_property ("text", "Failed")
|
||||
cell.set_property ("foreground", "#ff0000")
|
||||
elif (state == BuildResult.STATE_COMPLETE):
|
||||
cell.set_property ("text", "Complete")
|
||||
cell.set_property ("foreground", "#00ff00")
|
||||
else:
|
||||
cell.set_property ("text", "")
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__(self)
|
||||
|
||||
# Misc descriptiony thing
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn (None, renderer,
|
||||
text=BuildManagerModel.COL_DESC)
|
||||
self.append_column (col)
|
||||
|
||||
# Machine
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Machine", renderer,
|
||||
text=BuildManagerModel.COL_MACHINE)
|
||||
self.append_column (col)
|
||||
|
||||
# distro
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Distribution", renderer,
|
||||
text=BuildManagerModel.COL_DISTRO)
|
||||
self.append_column (col)
|
||||
|
||||
# date (using a custom function for formatting the cell contents it
|
||||
# takes epoch -> human readable string)
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Date", renderer,
|
||||
text=BuildManagerModel.COL_DATE)
|
||||
self.append_column (col)
|
||||
col.set_cell_data_func (renderer,
|
||||
self.date_format_custom_cell_data_func)
|
||||
|
||||
# For status.
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Status", renderer,
|
||||
text = BuildManagerModel.COL_STATE)
|
||||
self.append_column (col)
|
||||
col.set_cell_data_func (renderer,
|
||||
self.state_format_custom_cell_data_fun)
|
||||
|
||||
606
bitbake-dev/lib/bb/ui/crumbs/puccho.glade
Normal file
606
bitbake-dev/lib/bb/ui/crumbs/puccho.glade
Normal file
@@ -0,0 +1,606 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
|
||||
<!--Generated with glade3 3.4.5 on Mon Nov 10 12:24:12 2008 -->
|
||||
<glade-interface>
|
||||
<widget class="GtkDialog" id="build_dialog">
|
||||
<property name="title" translatable="yes">Start a build</property>
|
||||
<property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
|
||||
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
|
||||
<property name="has_separator">False</property>
|
||||
<child internal-child="vbox">
|
||||
<widget class="GtkVBox" id="dialog-vbox1">
|
||||
<property name="visible">True</property>
|
||||
<property name="spacing">2</property>
|
||||
<child>
|
||||
<widget class="GtkTable" id="build_table">
|
||||
<property name="visible">True</property>
|
||||
<property name="border_width">6</property>
|
||||
<property name="n_rows">7</property>
|
||||
<property name="n_columns">3</property>
|
||||
<property name="column_spacing">5</property>
|
||||
<property name="row_spacing">6</property>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="status_alignment">
|
||||
<property name="visible">True</property>
|
||||
<property name="left_padding">12</property>
|
||||
<child>
|
||||
<widget class="GtkHBox" id="status_hbox">
|
||||
<property name="spacing">6</property>
|
||||
<child>
|
||||
<widget class="GtkImage" id="status_image">
|
||||
<property name="visible">True</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="stock">gtk-dialog-error</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="status_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes">If you see this text something is wrong...</property>
|
||||
<property name="use_markup">True</property>
|
||||
<property name="use_underline">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">2</property>
|
||||
<property name="bottom_attach">3</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label2">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes"><b>Build configuration</b></property>
|
||||
<property name="use_markup">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">3</property>
|
||||
<property name="bottom_attach">4</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkComboBox" id="image_combo">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">2</property>
|
||||
<property name="top_attach">6</property>
|
||||
<property name="bottom_attach">7</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="image_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Image:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">6</property>
|
||||
<property name="bottom_attach">7</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkComboBox" id="distribution_combo">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">2</property>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="distribution_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Distribution:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkComboBox" id="machine_combo">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">2</property>
|
||||
<property name="top_attach">4</property>
|
||||
<property name="bottom_attach">5</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="machine_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Machine:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">4</property>
|
||||
<property name="bottom_attach">5</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkButton" id="refresh_button">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-refresh</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">2</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkEntry" id="location_entry">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="width_chars">32</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">2</property>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label3">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Location:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label1">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes"><b>Repository</b></property>
|
||||
<property name="use_markup">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment1">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">2</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">4</property>
|
||||
<property name="bottom_attach">5</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment2">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">2</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment3">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">2</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">6</property>
|
||||
<property name="bottom_attach">7</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child internal-child="action_area">
|
||||
<widget class="GtkHButtonBox" id="dialog-action_area1">
|
||||
<property name="visible">True</property>
|
||||
<property name="layout_style">GTK_BUTTONBOX_END</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="pack_type">GTK_PACK_END</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<widget class="GtkDialog" id="dialog2">
|
||||
<property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
|
||||
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
|
||||
<property name="has_separator">False</property>
|
||||
<child internal-child="vbox">
|
||||
<widget class="GtkVBox" id="dialog-vbox2">
|
||||
<property name="visible">True</property>
|
||||
<property name="spacing">2</property>
|
||||
<child>
|
||||
<widget class="GtkTable" id="table2">
|
||||
<property name="visible">True</property>
|
||||
<property name="border_width">6</property>
|
||||
<property name="n_rows">7</property>
|
||||
<property name="n_columns">3</property>
|
||||
<property name="column_spacing">6</property>
|
||||
<property name="row_spacing">6</property>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label7">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes"><b>Repositories</b></property>
|
||||
<property name="use_markup">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment4">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="left_padding">12</property>
|
||||
<child>
|
||||
<widget class="GtkScrolledWindow" id="scrolledwindow1">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<child>
|
||||
<widget class="GtkTreeView" id="treeview1">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="headers_clickable">True</property>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">2</property>
|
||||
<property name="bottom_attach">3</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkEntry" id="entry1">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label9">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes"><b>Additional packages</b></property>
|
||||
<property name="use_markup">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">4</property>
|
||||
<property name="bottom_attach">5</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment6">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xscale">0</property>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label8">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="yalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Location: </property>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment7">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">1</property>
|
||||
<property name="xscale">0</property>
|
||||
<child>
|
||||
<widget class="GtkHButtonBox" id="hbuttonbox1">
|
||||
<property name="visible">True</property>
|
||||
<property name="spacing">5</property>
|
||||
<child>
|
||||
<widget class="GtkButton" id="button7">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-remove</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkButton" id="button6">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-edit</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkButton" id="button5">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-add</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">3</property>
|
||||
<property name="bottom_attach">4</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment5">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">3</property>
|
||||
<property name="bottom_attach">4</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label10">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="yalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Search:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkEntry" id="entry2">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment8">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="left_padding">12</property>
|
||||
<child>
|
||||
<widget class="GtkScrolledWindow" id="scrolledwindow2">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<child>
|
||||
<widget class="GtkTreeView" id="treeview2">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="headers_clickable">True</property>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">6</property>
|
||||
<property name="bottom_attach">7</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child internal-child="action_area">
|
||||
<widget class="GtkHButtonBox" id="dialog-action_area2">
|
||||
<property name="visible">True</property>
|
||||
<property name="layout_style">GTK_BUTTONBOX_END</property>
|
||||
<child>
|
||||
<widget class="GtkButton" id="button4">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-close</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="pack_type">GTK_PACK_END</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<widget class="GtkWindow" id="main_window">
|
||||
<child>
|
||||
<widget class="GtkVBox" id="main_window_vbox">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<widget class="GtkToolbar" id="main_toolbar">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<widget class="GtkToolButton" id="main_toolbutton_build">
|
||||
<property name="visible">True</property>
|
||||
<property name="label" translatable="yes">Build</property>
|
||||
<property name="stock_id">gtk-execute</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkVPaned" id="vpaned1">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<child>
|
||||
<widget class="GtkScrolledWindow" id="results_scrolledwindow">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="resize">False</property>
|
||||
<property name="shrink">True</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkScrolledWindow" id="progress_scrolledwindow">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="resize">True</property>
|
||||
<property name="shrink">True</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
</glade-interface>
|
||||
180
bitbake-dev/lib/bb/ui/crumbs/runningbuild.py
Normal file
180
bitbake-dev/lib/bb/ui/crumbs/runningbuild.py
Normal file
@@ -0,0 +1,180 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2008 Intel Corporation
|
||||
#
|
||||
# Authored by Rob Bradford <rob@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
|
||||
class RunningBuildModel (gtk.TreeStore):
|
||||
(COL_TYPE, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_ACTIVE) = (0, 1, 2, 3, 4, 5)
|
||||
def __init__ (self):
|
||||
gtk.TreeStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN)
|
||||
|
||||
class RunningBuild (gobject.GObject):
|
||||
__gsignals__ = {
|
||||
'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-failed' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
())
|
||||
}
|
||||
pids_to_task = {}
|
||||
tasks_to_iter = {}
|
||||
|
||||
def __init__ (self):
|
||||
gobject.GObject.__init__ (self)
|
||||
self.model = RunningBuildModel()
|
||||
|
||||
def handle_event (self, event):
|
||||
# Handle an event from the event queue, this may result in updating
|
||||
# the model and thus the UI. Or it may be to tell us that the build
|
||||
# has finished successfully (or not, as the case may be.)
|
||||
|
||||
parent = None
|
||||
pid = 0
|
||||
package = None
|
||||
task = None
|
||||
|
||||
# If we have a pid attached to this message/event try and get the
|
||||
# (package, task) pair for it. If we get that then get the parent iter
|
||||
# for the message.
|
||||
if event[1].has_key ('pid'):
|
||||
pid = event[1]['pid']
|
||||
if self.pids_to_task.has_key(pid):
|
||||
(package, task) = self.pids_to_task[pid]
|
||||
parent = self.tasks_to_iter[(package, task)]
|
||||
|
||||
if event[0].startswith('bb.msg.Msg'):
|
||||
# Set a pretty icon for the message based on it's type.
|
||||
if (event[0].startswith ('bb.msg.MsgWarn')):
|
||||
icon = "dialog-warning"
|
||||
elif (event[0].startswith ('bb.msg.MsgErr')):
|
||||
icon = "dialog-error"
|
||||
else:
|
||||
icon = None
|
||||
|
||||
# Ignore the "Running task i of n .." messages
|
||||
if (event[1]['_message'].startswith ("Running task")):
|
||||
return
|
||||
|
||||
# Add the message to the tree either at the top level if parent is
|
||||
# None otherwise as a descendent of a task.
|
||||
self.model.append (parent,
|
||||
(event[0].split()[-1], # e.g. MsgWarn, MsgError
|
||||
package,
|
||||
task,
|
||||
event[1]['_message'],
|
||||
icon,
|
||||
False))
|
||||
elif event[0].startswith('bb.build.TaskStarted'):
|
||||
(package, task) = (event[1]['_package'], event[1]['_task'])
|
||||
|
||||
# Save out this PID.
|
||||
self.pids_to_task[pid] = (package,task)
|
||||
|
||||
# Check if we already have this package in our model. If so then
|
||||
# that can be the parent for the task. Otherwise we create a new
|
||||
# top level for the package.
|
||||
if (self.tasks_to_iter.has_key ((package, None))):
|
||||
parent = self.tasks_to_iter[(package, None)]
|
||||
else:
|
||||
parent = self.model.append (None, (None,
|
||||
package,
|
||||
None,
|
||||
"Package: %s" % (package),
|
||||
None,
|
||||
False))
|
||||
self.tasks_to_iter[(package, None)] = parent
|
||||
|
||||
# Because this parent package now has an active child mark it as
|
||||
# such.
|
||||
self.model.set(parent, self.model.COL_ICON, "gtk-execute")
|
||||
|
||||
# Add an entry in the model for this task
|
||||
i = self.model.append (parent, (None,
|
||||
package,
|
||||
task,
|
||||
"Task: %s" % (task),
|
||||
None,
|
||||
False))
|
||||
|
||||
# Save out the iter so that we can find it when we have a message
|
||||
# that we need to attach to a task.
|
||||
self.tasks_to_iter[(package, task)] = i
|
||||
|
||||
# Mark this task as active.
|
||||
self.model.set(i, self.model.COL_ICON, "gtk-execute")
|
||||
|
||||
elif event[0].startswith('bb.build.Task'):
|
||||
|
||||
if event[0].startswith('bb.build.TaskFailed'):
|
||||
# Mark the task as failed
|
||||
i = self.tasks_to_iter[(package, task)]
|
||||
self.model.set(i, self.model.COL_ICON, "dialog-error")
|
||||
|
||||
# Mark the parent package as failed
|
||||
i = self.tasks_to_iter[(package, None)]
|
||||
self.model.set(i, self.model.COL_ICON, "dialog-error")
|
||||
else:
|
||||
# Mark the task as inactive
|
||||
i = self.tasks_to_iter[(package, task)]
|
||||
self.model.set(i, self.model.COL_ICON, None)
|
||||
|
||||
# Mark the parent package as inactive
|
||||
i = self.tasks_to_iter[(package, None)]
|
||||
self.model.set(i, self.model.COL_ICON, None)
|
||||
|
||||
|
||||
# Clear the iters and the pids since when the task goes away the
|
||||
# pid will no longer be used for messages
|
||||
del self.tasks_to_iter[(package, task)]
|
||||
del self.pids_to_task[pid]
|
||||
|
||||
elif event[0].startswith('bb.event.BuildCompleted'):
|
||||
failures = int (event[1]['_failures'])
|
||||
|
||||
# Emit the appropriate signal depending on the number of failures
|
||||
if (failures > 1):
|
||||
self.emit ("build-failed")
|
||||
else:
|
||||
self.emit ("build-succeeded")
|
||||
|
||||
class RunningBuildTreeView (gtk.TreeView):
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__ (self)
|
||||
|
||||
# The icon that indicates whether we're building or failed.
|
||||
renderer = gtk.CellRendererPixbuf ()
|
||||
col = gtk.TreeViewColumn ("Status", renderer)
|
||||
col.add_attribute (renderer, "icon-name", 4)
|
||||
self.append_column (col)
|
||||
|
||||
# The message of the build.
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Message", renderer, text=3)
|
||||
self.append_column (col)
|
||||
|
||||
|
||||
272
bitbake-dev/lib/bb/ui/depexp.py
Normal file
272
bitbake-dev/lib/bb/ui/depexp.py
Normal file
@@ -0,0 +1,272 @@
|
||||
#
|
||||
# BitBake Graphical GTK based Dependency Explorer
|
||||
#
|
||||
# Copyright (C) 2007 Ross Burton
|
||||
# Copyright (C) 2007 - 2008 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
import threading
|
||||
import xmlrpclib
|
||||
|
||||
# Package Model
|
||||
(COL_PKG_NAME) = (0)
|
||||
|
||||
# Dependency Model
|
||||
(TYPE_DEP, TYPE_RDEP) = (0, 1)
|
||||
(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
|
||||
|
||||
class PackageDepView(gtk.TreeView):
|
||||
def __init__(self, model, dep_type, label):
|
||||
gtk.TreeView.__init__(self)
|
||||
self.current = None
|
||||
self.dep_type = dep_type
|
||||
self.filter_model = model.filter_new()
|
||||
self.filter_model.set_visible_func(self._filter)
|
||||
self.set_model(self.filter_model)
|
||||
#self.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
|
||||
self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PACKAGE))
|
||||
|
||||
def _filter(self, model, iter):
|
||||
(this_type, package) = model.get(iter, COL_DEP_TYPE, COL_DEP_PARENT)
|
||||
if this_type != self.dep_type: return False
|
||||
return package == self.current
|
||||
|
||||
def set_current_package(self, package):
|
||||
self.current = package
|
||||
self.filter_model.refilter()
|
||||
|
||||
class PackageReverseDepView(gtk.TreeView):
|
||||
def __init__(self, model, label):
|
||||
gtk.TreeView.__init__(self)
|
||||
self.current = None
|
||||
self.filter_model = model.filter_new()
|
||||
self.filter_model.set_visible_func(self._filter)
|
||||
self.set_model(self.filter_model)
|
||||
self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PARENT))
|
||||
|
||||
def _filter(self, model, iter):
|
||||
package = model.get_value(iter, COL_DEP_PACKAGE)
|
||||
return package == self.current
|
||||
|
||||
def set_current_package(self, package):
|
||||
self.current = package
|
||||
self.filter_model.refilter()
|
||||
|
||||
class DepExplorer(gtk.Window):
|
||||
def __init__(self):
|
||||
gtk.Window.__init__(self)
|
||||
self.set_title("Dependency Explorer")
|
||||
self.set_default_size(500, 500)
|
||||
self.connect("delete-event", gtk.main_quit)
|
||||
|
||||
# Create the data models
|
||||
self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
|
||||
self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
|
||||
|
||||
pane = gtk.HPaned()
|
||||
pane.set_position(250)
|
||||
self.add(pane)
|
||||
|
||||
# The master list of packages
|
||||
scrolled = gtk.ScrolledWindow()
|
||||
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scrolled.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.pkg_treeview = gtk.TreeView(self.pkg_model)
|
||||
self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
|
||||
self.pkg_treeview.append_column(gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME))
|
||||
pane.add1(scrolled)
|
||||
scrolled.add(self.pkg_treeview)
|
||||
|
||||
box = gtk.VBox(homogeneous=True, spacing=4)
|
||||
|
||||
# Runtime Depends
|
||||
scrolled = gtk.ScrolledWindow()
|
||||
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scrolled.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
|
||||
self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
|
||||
scrolled.add(self.rdep_treeview)
|
||||
box.add(scrolled)
|
||||
|
||||
# Build Depends
|
||||
scrolled = gtk.ScrolledWindow()
|
||||
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scrolled.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
|
||||
self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
|
||||
scrolled.add(self.dep_treeview)
|
||||
box.add(scrolled)
|
||||
pane.add2(box)
|
||||
|
||||
# Reverse Depends
|
||||
scrolled = gtk.ScrolledWindow()
|
||||
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scrolled.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
|
||||
self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
|
||||
scrolled.add(self.revdep_treeview)
|
||||
box.add(scrolled)
|
||||
pane.add2(box)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def on_package_activated(self, treeview, path, column, data_col):
|
||||
model = treeview.get_model()
|
||||
package = model.get_value(model.get_iter(path), data_col)
|
||||
|
||||
pkg_path = []
|
||||
def finder(model, path, iter, needle):
|
||||
package = model.get_value(iter, COL_PKG_NAME)
|
||||
if package == needle:
|
||||
pkg_path.append(path)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
self.pkg_model.foreach(finder, package)
|
||||
if pkg_path:
|
||||
self.pkg_treeview.get_selection().select_path(pkg_path[0])
|
||||
self.pkg_treeview.scroll_to_cell(pkg_path[0])
|
||||
|
||||
def on_cursor_changed(self, selection):
|
||||
(model, it) = selection.get_selected()
|
||||
if iter is None:
|
||||
current_package = None
|
||||
else:
|
||||
current_package = model.get_value(it, COL_PKG_NAME)
|
||||
self.rdep_treeview.set_current_package(current_package)
|
||||
self.dep_treeview.set_current_package(current_package)
|
||||
self.revdep_treeview.set_current_package(current_package)
|
||||
|
||||
|
||||
def parse(depgraph, pkg_model, depends_model):
|
||||
|
||||
for package in depgraph["pn"]:
|
||||
pkg_model.set(pkg_model.append(), COL_PKG_NAME, package)
|
||||
|
||||
for package in depgraph["depends"]:
|
||||
for depend in depgraph["depends"][package]:
|
||||
depends_model.set (depends_model.append(),
|
||||
COL_DEP_TYPE, TYPE_DEP,
|
||||
COL_DEP_PARENT, package,
|
||||
COL_DEP_PACKAGE, depend)
|
||||
|
||||
for package in depgraph["rdepends-pn"]:
|
||||
for rdepend in depgraph["rdepends-pn"][package]:
|
||||
depends_model.set (depends_model.append(),
|
||||
COL_DEP_TYPE, TYPE_RDEP,
|
||||
COL_DEP_PARENT, package,
|
||||
COL_DEP_PACKAGE, rdepend)
|
||||
|
||||
class ProgressBar(gtk.Window):
|
||||
def __init__(self):
|
||||
|
||||
gtk.Window.__init__(self)
|
||||
self.set_title("Parsing .bb files, please wait...")
|
||||
self.set_default_size(500, 0)
|
||||
self.connect("delete-event", gtk.main_quit)
|
||||
|
||||
self.progress = gtk.ProgressBar()
|
||||
self.add(self.progress)
|
||||
self.show_all()
|
||||
|
||||
class gtkthread(threading.Thread):
|
||||
quit = threading.Event()
|
||||
def __init__(self, shutdown):
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(True)
|
||||
self.shutdown = shutdown
|
||||
|
||||
def run(self):
|
||||
gobject.threads_init()
|
||||
gtk.gdk.threads_init()
|
||||
gtk.main()
|
||||
gtkthread.quit.set()
|
||||
|
||||
def init(server, eventHandler):
|
||||
|
||||
try:
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
if not cmdline or cmdline[0] != "generateDotGraph":
|
||||
print "This UI is only compatible with the -g option"
|
||||
return
|
||||
ret = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
|
||||
if ret != True:
|
||||
print "Couldn't run command! %s" % ret
|
||||
return
|
||||
except xmlrpclib.Fault, x:
|
||||
print "XMLRPC Fault getting commandline:\n %s" % x
|
||||
return
|
||||
|
||||
shutdown = 0
|
||||
|
||||
gtkgui = gtkthread(shutdown)
|
||||
gtkgui.start()
|
||||
|
||||
gtk.gdk.threads_enter()
|
||||
pbar = ProgressBar()
|
||||
dep = DepExplorer()
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
while True:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if gtkthread.quit.isSet():
|
||||
break
|
||||
|
||||
if event is None:
|
||||
continue
|
||||
if event[0].startswith('bb.event.ParseProgress'):
|
||||
x = event[1]['sofar']
|
||||
y = event[1]['total']
|
||||
if x == y:
|
||||
print("\nParsing finished. %d cached, %d parsed, %d skipped, %d masked, %d errors."
|
||||
% ( event[1]['cached'], event[1]['parsed'], event[1]['skipped'], event[1]['masked'], event[1]['errors']))
|
||||
pbar.hide()
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.progress.set_fraction(float(x)/float(y))
|
||||
pbar.progress.set_text("%d/%d (%2d %%)" % (x, y, x*100/y))
|
||||
gtk.gdk.threads_leave()
|
||||
continue
|
||||
|
||||
if event[0] == "bb.event.DepTreeGenerated":
|
||||
gtk.gdk.threads_enter()
|
||||
parse(event[1]['_depgraph'], dep.pkg_model, dep.depends_model)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
if event[0] == 'bb.command.CookerCommandCompleted':
|
||||
continue
|
||||
if event[0] == 'bb.command.CookerCommandFailed':
|
||||
print "Command execution failed: %s" % event[1]['error']
|
||||
break
|
||||
if event[0] == 'bb.cooker.CookerExit':
|
||||
break
|
||||
|
||||
continue
|
||||
|
||||
except KeyboardInterrupt:
|
||||
if shutdown == 2:
|
||||
print "\nThird Keyboard Interrupt, exit.\n"
|
||||
break
|
||||
if shutdown == 1:
|
||||
print "\nSecond Keyboard Interrupt, stopping...\n"
|
||||
server.runCommand(["stateStop"])
|
||||
if shutdown == 0:
|
||||
print "\nKeyboard Interrupt, closing down...\n"
|
||||
server.runCommand(["stateShutdown"])
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
77
bitbake-dev/lib/bb/ui/goggle.py
Normal file
77
bitbake-dev/lib/bb/ui/goggle.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2008 Intel Corporation
|
||||
#
|
||||
# Authored by Rob Bradford <rob@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
import xmlrpclib
|
||||
from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
|
||||
|
||||
def event_handle_idle_func (eventHandler, build):
|
||||
|
||||
# Consume as many messages as we can in the time available to us
|
||||
event = eventHandler.getEvent()
|
||||
while event:
|
||||
build.handle_event (event)
|
||||
event = eventHandler.getEvent()
|
||||
|
||||
return True
|
||||
|
||||
class MainWindow (gtk.Window):
|
||||
def __init__ (self):
|
||||
gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
|
||||
|
||||
# Setup tree view and the scrolled window
|
||||
scrolled_window = gtk.ScrolledWindow ()
|
||||
self.add (scrolled_window)
|
||||
self.cur_build_tv = RunningBuildTreeView()
|
||||
scrolled_window.add (self.cur_build_tv)
|
||||
|
||||
def init (server, eventHandler):
|
||||
gobject.threads_init()
|
||||
gtk.gdk.threads_init()
|
||||
|
||||
window = MainWindow ()
|
||||
window.show_all ()
|
||||
|
||||
# Create the object for the current build
|
||||
running_build = RunningBuild ()
|
||||
window.cur_build_tv.set_model (running_build.model)
|
||||
try:
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
print cmdline
|
||||
if not cmdline:
|
||||
return 1
|
||||
ret = server.runCommand(cmdline)
|
||||
if ret != True:
|
||||
print "Couldn't get default commandline! %s" % ret
|
||||
return 1
|
||||
except xmlrpclib.Fault, x:
|
||||
print "XMLRPC Fault getting commandline:\n %s" % x
|
||||
return 1
|
||||
|
||||
# Use a timeout function for probing the event queue to find out if we
|
||||
# have a message waiting for us.
|
||||
gobject.timeout_add (200,
|
||||
event_handle_idle_func,
|
||||
eventHandler,
|
||||
running_build)
|
||||
|
||||
gtk.main()
|
||||
|
||||
162
bitbake-dev/lib/bb/ui/knotty.py
Normal file
162
bitbake-dev/lib/bb/ui/knotty.py
Normal file
@@ -0,0 +1,162 @@
|
||||
#
|
||||
# BitBake (No)TTY UI Implementation
|
||||
#
|
||||
# Handling output to TTYs or files (no TTY)
|
||||
#
|
||||
# Copyright (C) 2006-2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
|
||||
import sys
|
||||
import itertools
|
||||
import xmlrpclib
|
||||
|
||||
parsespin = itertools.cycle( r'|/-\\' )
|
||||
|
||||
def init(server, eventHandler):
|
||||
|
||||
# Get values of variables which control our output
|
||||
includelogs = server.runCommand(["getVariable", "BBINCLUDELOGS"])
|
||||
loglines = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
|
||||
|
||||
try:
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
#print cmdline
|
||||
if not cmdline:
|
||||
return 1
|
||||
ret = server.runCommand(cmdline)
|
||||
if ret != True:
|
||||
print "Couldn't get default commandline! %s" % ret
|
||||
return 1
|
||||
except xmlrpclib.Fault, x:
|
||||
print "XMLRPC Fault getting commandline:\n %s" % x
|
||||
return 1
|
||||
|
||||
shutdown = 0
|
||||
return_value = 0
|
||||
while True:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if event is None:
|
||||
continue
|
||||
#print event
|
||||
if event[0].startswith('bb.msg.MsgPlain'):
|
||||
print event[1]['_message']
|
||||
continue
|
||||
if event[0].startswith('bb.msg.MsgDebug'):
|
||||
print 'DEBUG: ' + event[1]['_message']
|
||||
continue
|
||||
if event[0].startswith('bb.msg.MsgNote'):
|
||||
print 'NOTE: ' + event[1]['_message']
|
||||
continue
|
||||
if event[0].startswith('bb.msg.MsgWarn'):
|
||||
print 'WARNING: ' + event[1]['_message']
|
||||
continue
|
||||
if event[0].startswith('bb.msg.MsgError'):
|
||||
return_value = 1
|
||||
print 'ERROR: ' + event[1]['_message']
|
||||
continue
|
||||
if event[0].startswith('bb.msg.MsgFatal'):
|
||||
return_value = 1
|
||||
print 'FATAL: ' + event[1]['_message']
|
||||
break
|
||||
if event[0].startswith('bb.build.TaskFailed'):
|
||||
return_value = 1
|
||||
logfile = event[1]['logfile']
|
||||
if logfile:
|
||||
print "ERROR: Logfile of failure stored in %s." % logfile
|
||||
if 1 or includelogs:
|
||||
print "Log data follows:"
|
||||
f = open(logfile, "r")
|
||||
lines = []
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
l = l.rstrip()
|
||||
if loglines:
|
||||
lines.append(' | %s' % l)
|
||||
if len(lines) > int(loglines):
|
||||
lines.pop(0)
|
||||
else:
|
||||
print '| %s' % l
|
||||
f.close()
|
||||
if lines:
|
||||
for line in lines:
|
||||
print line
|
||||
if event[0].startswith('bb.build.Task'):
|
||||
print "NOTE: %s" % event[1]['_message']
|
||||
continue
|
||||
if event[0].startswith('bb.event.ParseProgress'):
|
||||
x = event[1]['sofar']
|
||||
y = event[1]['total']
|
||||
if os.isatty(sys.stdout.fileno()):
|
||||
sys.stdout.write("\rNOTE: Handling BitBake files: %s (%04d/%04d) [%2d %%]" % ( parsespin.next(), x, y, x*100/y ) )
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
if x == 1:
|
||||
sys.stdout.write("Parsing .bb files, please wait...")
|
||||
sys.stdout.flush()
|
||||
if x == y:
|
||||
sys.stdout.write("done.")
|
||||
sys.stdout.flush()
|
||||
if x == y:
|
||||
print("\nParsing finished. %d cached, %d parsed, %d skipped, %d masked, %d errors."
|
||||
% ( event[1]['cached'], event[1]['parsed'], event[1]['skipped'], event[1]['masked'], event[1]['errors']))
|
||||
continue
|
||||
|
||||
if event[0] == 'bb.command.CookerCommandCompleted':
|
||||
break
|
||||
if event[0] == 'bb.command.CookerCommandSetExitCode':
|
||||
return_value = event[1]['exitcode']
|
||||
continue
|
||||
if event[0] == 'bb.command.CookerCommandFailed':
|
||||
return_value = 1
|
||||
print "Command execution failed: %s" % event[1]['error']
|
||||
break
|
||||
if event[0] == 'bb.cooker.CookerExit':
|
||||
break
|
||||
|
||||
# ignore
|
||||
if event[0].startswith('bb.event.BuildStarted'):
|
||||
continue
|
||||
if event[0].startswith('bb.event.BuildCompleted'):
|
||||
continue
|
||||
if event[0].startswith('bb.event.MultipleProviders'):
|
||||
continue
|
||||
if event[0].startswith('bb.runqueue.runQueue'):
|
||||
continue
|
||||
if event[0].startswith('bb.event.StampUpdate'):
|
||||
continue
|
||||
if event[0].startswith('bb.event.ConfigParsed'):
|
||||
continue
|
||||
if event[0].startswith('bb.event.RecipeParsed'):
|
||||
continue
|
||||
print "Unknown Event: %s" % event
|
||||
|
||||
except KeyboardInterrupt:
|
||||
if shutdown == 2:
|
||||
print "\nThird Keyboard Interrupt, exit.\n"
|
||||
break
|
||||
if shutdown == 1:
|
||||
print "\nSecond Keyboard Interrupt, stopping...\n"
|
||||
server.runCommand(["stateStop"])
|
||||
if shutdown == 0:
|
||||
print "\nKeyboard Interrupt, closing down...\n"
|
||||
server.runCommand(["stateShutdown"])
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
return return_value
|
||||
335
bitbake-dev/lib/bb/ui/ncurses.py
Normal file
335
bitbake-dev/lib/bb/ui/ncurses.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#
|
||||
# BitBake Curses UI Implementation
|
||||
#
|
||||
# Implements an ncurses frontend for the BitBake utility.
|
||||
#
|
||||
# Copyright (C) 2006 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2006-2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""
|
||||
We have the following windows:
|
||||
|
||||
1.) Main Window: Shows what we are ultimately building and how far we are. Includes status bar
|
||||
2.) Thread Activity Window: Shows one status line for every concurrent bitbake thread.
|
||||
3.) Command Line Window: Contains an interactive command line where you can interact w/ Bitbake.
|
||||
|
||||
Basic window layout is like that:
|
||||
|
||||
|---------------------------------------------------------|
|
||||
| <Main Window> | <Thread Activity Window> |
|
||||
| | 0: foo do_compile complete|
|
||||
| Building Gtk+-2.6.10 | 1: bar do_patch complete |
|
||||
| Status: 60% | ... |
|
||||
| | ... |
|
||||
| | ... |
|
||||
|---------------------------------------------------------|
|
||||
|<Command Line Window> |
|
||||
|>>> which virtual/kernel |
|
||||
|openzaurus-kernel |
|
||||
|>>> _ |
|
||||
|---------------------------------------------------------|
|
||||
|
||||
"""
|
||||
|
||||
import os, sys, curses, itertools, time
|
||||
import bb
|
||||
import xmlrpclib
|
||||
from bb import ui
|
||||
from bb.ui import uihelper
|
||||
|
||||
parsespin = itertools.cycle( r'|/-\\' )
|
||||
|
||||
X = 0
|
||||
Y = 1
|
||||
WIDTH = 2
|
||||
HEIGHT = 3
|
||||
|
||||
MAXSTATUSLENGTH = 32
|
||||
|
||||
class NCursesUI:
|
||||
"""
|
||||
NCurses UI Class
|
||||
"""
|
||||
class Window:
|
||||
"""Base Window Class"""
|
||||
def __init__( self, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
|
||||
self.win = curses.newwin( height, width, y, x )
|
||||
self.dimensions = ( x, y, width, height )
|
||||
"""
|
||||
if curses.has_colors():
|
||||
color = 1
|
||||
curses.init_pair( color, fg, bg )
|
||||
self.win.bkgdset( ord(' '), curses.color_pair(color) )
|
||||
else:
|
||||
self.win.bkgdset( ord(' '), curses.A_BOLD )
|
||||
"""
|
||||
self.erase()
|
||||
self.setScrolling()
|
||||
self.win.noutrefresh()
|
||||
|
||||
def erase( self ):
|
||||
self.win.erase()
|
||||
|
||||
def setScrolling( self, b = True ):
|
||||
self.win.scrollok( b )
|
||||
self.win.idlok( b )
|
||||
|
||||
def setBoxed( self ):
|
||||
self.boxed = True
|
||||
self.win.box()
|
||||
self.win.noutrefresh()
|
||||
|
||||
def setText( self, x, y, text, *args ):
|
||||
self.win.addstr( y, x, text, *args )
|
||||
self.win.noutrefresh()
|
||||
|
||||
def appendText( self, text, *args ):
|
||||
self.win.addstr( text, *args )
|
||||
self.win.noutrefresh()
|
||||
|
||||
def drawHline( self, y ):
|
||||
self.win.hline( y, 0, curses.ACS_HLINE, self.dimensions[WIDTH] )
|
||||
self.win.noutrefresh()
|
||||
|
||||
class DecoratedWindow( Window ):
|
||||
"""Base class for windows with a box and a title bar"""
|
||||
def __init__( self, title, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
|
||||
NCursesUI.Window.__init__( self, x+1, y+3, width-2, height-4, fg, bg )
|
||||
self.decoration = NCursesUI.Window( x, y, width, height, fg, bg )
|
||||
self.decoration.setBoxed()
|
||||
self.decoration.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
|
||||
self.setTitle( title )
|
||||
|
||||
def setTitle( self, title ):
|
||||
self.decoration.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
|
||||
|
||||
#-------------------------------------------------------------------------#
|
||||
# class TitleWindow( Window ):
|
||||
#-------------------------------------------------------------------------#
|
||||
# """Title Window"""
|
||||
# def __init__( self, x, y, width, height ):
|
||||
# NCursesUI.Window.__init__( self, x, y, width, height )
|
||||
# version = bb.__version__
|
||||
# title = "BitBake %s" % version
|
||||
# credit = "(C) 2003-2007 Team BitBake"
|
||||
# #self.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
|
||||
# self.win.border()
|
||||
# self.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
|
||||
# self.setText( 1, 2, credit.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
|
||||
|
||||
#-------------------------------------------------------------------------#
|
||||
class ThreadActivityWindow( DecoratedWindow ):
|
||||
#-------------------------------------------------------------------------#
|
||||
"""Thread Activity Window"""
|
||||
def __init__( self, x, y, width, height ):
|
||||
NCursesUI.DecoratedWindow.__init__( self, "Thread Activity", x, y, width, height )
|
||||
|
||||
def setStatus( self, thread, text ):
|
||||
line = "%02d: %s" % ( thread, text )
|
||||
width = self.dimensions[WIDTH]
|
||||
if ( len(line) > width ):
|
||||
line = line[:width-3] + "..."
|
||||
else:
|
||||
line = line.ljust( width )
|
||||
self.setText( 0, thread, line )
|
||||
|
||||
#-------------------------------------------------------------------------#
|
||||
class MainWindow( DecoratedWindow ):
|
||||
#-------------------------------------------------------------------------#
|
||||
"""Main Window"""
|
||||
def __init__( self, x, y, width, height ):
|
||||
self.StatusPosition = width - MAXSTATUSLENGTH
|
||||
NCursesUI.DecoratedWindow.__init__( self, None, x, y, width, height )
|
||||
curses.nl()
|
||||
|
||||
def setTitle( self, title ):
|
||||
title = "BitBake %s" % bb.__version__
|
||||
self.decoration.setText( 2, 1, title, curses.A_BOLD )
|
||||
self.decoration.setText( self.StatusPosition - 8, 1, "Status:", curses.A_BOLD )
|
||||
|
||||
def setStatus(self, status):
|
||||
while len(status) < MAXSTATUSLENGTH:
|
||||
status = status + " "
|
||||
self.decoration.setText( self.StatusPosition, 1, status, curses.A_BOLD )
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------#
|
||||
class ShellOutputWindow( DecoratedWindow ):
|
||||
#-------------------------------------------------------------------------#
|
||||
"""Interactive Command Line Output"""
|
||||
def __init__( self, x, y, width, height ):
|
||||
NCursesUI.DecoratedWindow.__init__( self, "Command Line Window", x, y, width, height )
|
||||
|
||||
#-------------------------------------------------------------------------#
|
||||
class ShellInputWindow( Window ):
|
||||
#-------------------------------------------------------------------------#
|
||||
"""Interactive Command Line Input"""
|
||||
def __init__( self, x, y, width, height ):
|
||||
NCursesUI.Window.__init__( self, x, y, width, height )
|
||||
|
||||
# put that to the top again from curses.textpad import Textbox
|
||||
# self.textbox = Textbox( self.win )
|
||||
# t = threading.Thread()
|
||||
# t.run = self.textbox.edit
|
||||
# t.start()
|
||||
|
||||
#-------------------------------------------------------------------------#
|
||||
def main(self, stdscr, server, eventHandler):
|
||||
#-------------------------------------------------------------------------#
|
||||
height, width = stdscr.getmaxyx()
|
||||
|
||||
# for now split it like that:
|
||||
# MAIN_y + THREAD_y = 2/3 screen at the top
|
||||
# MAIN_x = 2/3 left, THREAD_y = 1/3 right
|
||||
# CLI_y = 1/3 of screen at the bottom
|
||||
# CLI_x = full
|
||||
|
||||
main_left = 0
|
||||
main_top = 0
|
||||
main_height = ( height / 3 * 2 )
|
||||
main_width = ( width / 3 ) * 2
|
||||
clo_left = main_left
|
||||
clo_top = main_top + main_height
|
||||
clo_height = height - main_height - main_top - 1
|
||||
clo_width = width
|
||||
cli_left = main_left
|
||||
cli_top = clo_top + clo_height
|
||||
cli_height = 1
|
||||
cli_width = width
|
||||
thread_left = main_left + main_width
|
||||
thread_top = main_top
|
||||
thread_height = main_height
|
||||
thread_width = width - main_width
|
||||
|
||||
#tw = self.TitleWindow( 0, 0, width, main_top )
|
||||
mw = self.MainWindow( main_left, main_top, main_width, main_height )
|
||||
taw = self.ThreadActivityWindow( thread_left, thread_top, thread_width, thread_height )
|
||||
clo = self.ShellOutputWindow( clo_left, clo_top, clo_width, clo_height )
|
||||
cli = self.ShellInputWindow( cli_left, cli_top, cli_width, cli_height )
|
||||
cli.setText( 0, 0, "BB>" )
|
||||
|
||||
mw.setStatus("Idle")
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
shutdown = 0
|
||||
|
||||
try:
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
if not cmdline:
|
||||
return
|
||||
ret = server.runCommand(cmdline)
|
||||
if ret != True:
|
||||
print "Couldn't get default commandlind! %s" % ret
|
||||
return
|
||||
except xmlrpclib.Fault, x:
|
||||
print "XMLRPC Fault getting commandline:\n %s" % x
|
||||
return
|
||||
|
||||
exitflag = False
|
||||
while not exitflag:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if not event:
|
||||
continue
|
||||
helper.eventHandler(event)
|
||||
#mw.appendText("%s\n" % event[0])
|
||||
if event[0].startswith('bb.build.Task'):
|
||||
mw.appendText("NOTE: %s\n" % event[1]['_message'])
|
||||
if event[0].startswith('bb.msg.MsgDebug'):
|
||||
mw.appendText('DEBUG: ' + event[1]['_message'] + '\n')
|
||||
if event[0].startswith('bb.msg.MsgNote'):
|
||||
mw.appendText('NOTE: ' + event[1]['_message'] + '\n')
|
||||
if event[0].startswith('bb.msg.MsgWarn'):
|
||||
mw.appendText('WARNING: ' + event[1]['_message'] + '\n')
|
||||
if event[0].startswith('bb.msg.MsgError'):
|
||||
mw.appendText('ERROR: ' + event[1]['_message'] + '\n')
|
||||
if event[0].startswith('bb.msg.MsgFatal'):
|
||||
mw.appendText('FATAL: ' + event[1]['_message'] + '\n')
|
||||
if event[0].startswith('bb.event.ParseProgress'):
|
||||
x = event[1]['sofar']
|
||||
y = event[1]['total']
|
||||
if x == y:
|
||||
mw.setStatus("Idle")
|
||||
mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked."
|
||||
% ( event[1]['cached'], event[1]['parsed'], event[1]['skipped'], event[1]['masked'] ))
|
||||
else:
|
||||
mw.setStatus("Parsing: %s (%04d/%04d) [%2d %%]" % ( parsespin.next(), x, y, x*100/y ) )
|
||||
# if event[0].startswith('bb.build.TaskFailed'):
|
||||
# if event[1]['logfile']:
|
||||
# if data.getVar("BBINCLUDELOGS", d):
|
||||
# bb.msg.error(bb.msg.domain.Build, "log data follows (%s)" % logfile)
|
||||
# number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
|
||||
# if number_of_lines:
|
||||
# os.system('tail -n%s %s' % (number_of_lines, logfile))
|
||||
# else:
|
||||
# f = open(logfile, "r")
|
||||
# while True:
|
||||
# l = f.readline()
|
||||
# if l == '':
|
||||
# break
|
||||
# l = l.rstrip()
|
||||
# print '| %s' % l
|
||||
# f.close()
|
||||
# else:
|
||||
# bb.msg.error(bb.msg.domain.Build, "see log in %s" % logfile)
|
||||
|
||||
if event[0] == 'bb.command.CookerCommandCompleted':
|
||||
exitflag = True
|
||||
if event[0] == 'bb.command.CookerCommandFailed':
|
||||
mw.appendText("Command execution failed: %s" % event[1]['error'])
|
||||
time.sleep(2)
|
||||
exitflag = True
|
||||
if event[0] == 'bb.cooker.CookerExit':
|
||||
exitflag = True
|
||||
|
||||
if helper.needUpdate:
|
||||
activetasks, failedtasks = helper.getTasks()
|
||||
taw.erase()
|
||||
taw.setText(0, 0, "")
|
||||
if activetasks:
|
||||
taw.appendText("Active Tasks:\n")
|
||||
for task in activetasks:
|
||||
taw.appendText(task)
|
||||
if failedtasks:
|
||||
taw.appendText("Failed Tasks:\n")
|
||||
for task in failedtasks:
|
||||
taw.appendText(task)
|
||||
|
||||
curses.doupdate()
|
||||
except KeyboardInterrupt:
|
||||
if shutdown == 2:
|
||||
mw.appendText("Third Keyboard Interrupt, exit.\n")
|
||||
exitflag = True
|
||||
if shutdown == 1:
|
||||
mw.appendText("Second Keyboard Interrupt, stopping...\n")
|
||||
server.runCommand(["stateStop"])
|
||||
if shutdown == 0:
|
||||
mw.appendText("Keyboard Interrupt, closing down...\n")
|
||||
server.runCommand(["stateShutdown"])
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
def init(server, eventHandler):
|
||||
if not os.isatty(sys.stdout.fileno()):
|
||||
print "FATAL: Unable to run 'ncurses' UI without a TTY."
|
||||
return
|
||||
ui = NCursesUI()
|
||||
try:
|
||||
curses.wrapper(ui.main, server, eventHandler)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
425
bitbake-dev/lib/bb/ui/puccho.py
Normal file
425
bitbake-dev/lib/bb/ui/puccho.py
Normal file
@@ -0,0 +1,425 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2008 Intel Corporation
|
||||
#
|
||||
# Authored by Rob Bradford <rob@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import gtk.glade
|
||||
import threading
|
||||
import urllib2
|
||||
import os
|
||||
|
||||
from bb.ui.crumbs.buildmanager import BuildManager, BuildConfiguration
|
||||
from bb.ui.crumbs.buildmanager import BuildManagerTreeView
|
||||
|
||||
from bb.ui.crumbs.runningbuild import RunningBuild, RunningBuildTreeView
|
||||
|
||||
# The metadata loader is used by the BuildSetupDialog to download the
|
||||
# available options to populate the dialog
|
||||
class MetaDataLoader(gobject.GObject):
|
||||
""" This class provides the mechanism for loading the metadata (the
|
||||
fetching and parsing) from a given URL. The metadata encompasses details
|
||||
on what machines are available. The distribution and images available for
|
||||
the machine and the the uris to use for building the given machine."""
|
||||
__gsignals__ = {
|
||||
'success' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'error' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,))
|
||||
}
|
||||
|
||||
# We use these little helper functions to ensure that we take the gdk lock
|
||||
# when emitting the signal. These functions are called as idles (so that
|
||||
# they happen in the gtk / main thread's main loop.
|
||||
def emit_error_signal (self, remark):
|
||||
gtk.gdk.threads_enter()
|
||||
self.emit ("error", remark)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
def emit_success_signal (self):
|
||||
gtk.gdk.threads_enter()
|
||||
self.emit ("success")
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
def __init__ (self):
|
||||
gobject.GObject.__init__ (self)
|
||||
|
||||
class LoaderThread(threading.Thread):
|
||||
""" This class provides an asynchronous loader for the metadata (by
|
||||
using threads and signals). This is useful since the metadata may be
|
||||
at a remote URL."""
|
||||
class LoaderImportException (Exception):
|
||||
pass
|
||||
|
||||
def __init__(self, loader, url):
|
||||
threading.Thread.__init__ (self)
|
||||
self.url = url
|
||||
self.loader = loader
|
||||
|
||||
def run (self):
|
||||
result = {}
|
||||
try:
|
||||
f = urllib2.urlopen (self.url)
|
||||
|
||||
# Parse the metadata format. The format is....
|
||||
# <machine>;<default distro>|<distro>...;<default image>|<image>...;<type##url>|...
|
||||
for line in f.readlines():
|
||||
components = line.split(";")
|
||||
if (len (components) < 4):
|
||||
raise MetaDataLoader.LoaderThread.LoaderImportException
|
||||
machine = components[0]
|
||||
distros = components[1].split("|")
|
||||
images = components[2].split("|")
|
||||
urls = components[3].split("|")
|
||||
|
||||
result[machine] = (distros, images, urls)
|
||||
|
||||
# Create an object representing this *potential*
|
||||
# configuration. It can become concrete if the machine, distro
|
||||
# and image are all chosen in the UI
|
||||
configuration = BuildConfiguration()
|
||||
configuration.metadata_url = self.url
|
||||
configuration.machine_options = result
|
||||
self.loader.configuration = configuration
|
||||
|
||||
# Emit that we've actually got a configuration
|
||||
gobject.idle_add (MetaDataLoader.emit_success_signal,
|
||||
self.loader)
|
||||
|
||||
except MetaDataLoader.LoaderThread.LoaderImportException, e:
|
||||
gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
|
||||
"Repository metadata corrupt")
|
||||
except Exception, e:
|
||||
gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
|
||||
"Unable to download repository metadata")
|
||||
print e
|
||||
|
||||
def try_fetch_from_url (self, url):
|
||||
# Try and download the metadata. Firing a signal if successful
|
||||
thread = MetaDataLoader.LoaderThread(self, url)
|
||||
thread.start()
|
||||
|
||||
class BuildSetupDialog (gtk.Dialog):
|
||||
RESPONSE_BUILD = 1
|
||||
|
||||
# A little helper method that just sets the states on the widgets based on
|
||||
# whether we've got good metadata or not.
|
||||
def set_configurable (self, configurable):
|
||||
if (self.configurable == configurable):
|
||||
return
|
||||
|
||||
self.configurable = configurable
|
||||
for widget in self.conf_widgets:
|
||||
widget.set_sensitive (configurable)
|
||||
|
||||
if not configurable:
|
||||
self.machine_combo.set_active (-1)
|
||||
self.distribution_combo.set_active (-1)
|
||||
self.image_combo.set_active (-1)
|
||||
|
||||
# GTK widget callbacks
|
||||
def refresh_button_clicked (self, button):
|
||||
# Refresh button clicked.
|
||||
|
||||
url = self.location_entry.get_chars (0, -1)
|
||||
self.loader.try_fetch_from_url(url)
|
||||
|
||||
def repository_entry_editable_changed (self, entry):
|
||||
if (len (entry.get_chars (0, -1)) > 0):
|
||||
self.refresh_button.set_sensitive (True)
|
||||
else:
|
||||
self.refresh_button.set_sensitive (False)
|
||||
self.clear_status_message()
|
||||
|
||||
# If we were previously configurable we are no longer since the
|
||||
# location entry has been changed
|
||||
self.set_configurable (False)
|
||||
|
||||
def machine_combo_changed (self, combobox):
|
||||
active_iter = combobox.get_active_iter()
|
||||
|
||||
if not active_iter:
|
||||
return
|
||||
|
||||
model = combobox.get_model()
|
||||
|
||||
if model:
|
||||
chosen_machine = model.get (active_iter, 0)[0]
|
||||
|
||||
(distros_model, images_model) = \
|
||||
self.loader.configuration.get_distro_and_images_models (chosen_machine)
|
||||
|
||||
self.distribution_combo.set_model (distros_model)
|
||||
self.image_combo.set_model (images_model)
|
||||
|
||||
# Callbacks from the loader
|
||||
def loader_success_cb (self, loader):
|
||||
self.status_image.set_from_icon_name ("info",
|
||||
gtk.ICON_SIZE_BUTTON)
|
||||
self.status_image.show()
|
||||
self.status_label.set_label ("Repository metadata successfully downloaded")
|
||||
|
||||
# Set the models on the combo boxes based on the models generated from
|
||||
# the configuration that the loader has created
|
||||
|
||||
# We just need to set the machine here, that then determines the
|
||||
# distro and image options. Cunning huh? :-)
|
||||
|
||||
self.configuration = self.loader.configuration
|
||||
model = self.configuration.get_machines_model ()
|
||||
self.machine_combo.set_model (model)
|
||||
|
||||
self.set_configurable (True)
|
||||
|
||||
def loader_error_cb (self, loader, message):
|
||||
self.status_image.set_from_icon_name ("error",
|
||||
gtk.ICON_SIZE_BUTTON)
|
||||
self.status_image.show()
|
||||
self.status_label.set_text ("Error downloading repository metadata")
|
||||
for widget in self.conf_widgets:
|
||||
widget.set_sensitive (False)
|
||||
|
||||
def clear_status_message (self):
|
||||
self.status_image.hide()
|
||||
self.status_label.set_label (
|
||||
"""<i>Enter the repository location and press _Refresh</i>""")
|
||||
|
||||
def __init__ (self):
|
||||
gtk.Dialog.__init__ (self)
|
||||
|
||||
# Cancel
|
||||
self.add_button (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
|
||||
|
||||
# Build
|
||||
button = gtk.Button ("_Build", None, True)
|
||||
image = gtk.Image ()
|
||||
image.set_from_stock (gtk.STOCK_EXECUTE,gtk.ICON_SIZE_BUTTON)
|
||||
button.set_image (image)
|
||||
self.add_action_widget (button, BuildSetupDialog.RESPONSE_BUILD)
|
||||
button.show_all ()
|
||||
|
||||
# Pull in *just* the table from the Glade XML data.
|
||||
gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
|
||||
root = "build_table")
|
||||
table = gxml.get_widget ("build_table")
|
||||
self.vbox.pack_start (table, True, False, 0)
|
||||
|
||||
# Grab all the widgets that we need to turn on/off when we refresh...
|
||||
self.conf_widgets = []
|
||||
self.conf_widgets += [gxml.get_widget ("machine_label")]
|
||||
self.conf_widgets += [gxml.get_widget ("distribution_label")]
|
||||
self.conf_widgets += [gxml.get_widget ("image_label")]
|
||||
self.conf_widgets += [gxml.get_widget ("machine_combo")]
|
||||
self.conf_widgets += [gxml.get_widget ("distribution_combo")]
|
||||
self.conf_widgets += [gxml.get_widget ("image_combo")]
|
||||
|
||||
# Grab the status widgets
|
||||
self.status_image = gxml.get_widget ("status_image")
|
||||
self.status_label = gxml.get_widget ("status_label")
|
||||
|
||||
# Grab the refresh button and connect to the clicked signal
|
||||
self.refresh_button = gxml.get_widget ("refresh_button")
|
||||
self.refresh_button.connect ("clicked", self.refresh_button_clicked)
|
||||
|
||||
# Grab the location entry and connect to editable::changed
|
||||
self.location_entry = gxml.get_widget ("location_entry")
|
||||
self.location_entry.connect ("changed",
|
||||
self.repository_entry_editable_changed)
|
||||
|
||||
# Grab the machine combo and hook onto the changed signal. This then
|
||||
# allows us to populate the distro and image combos
|
||||
self.machine_combo = gxml.get_widget ("machine_combo")
|
||||
self.machine_combo.connect ("changed", self.machine_combo_changed)
|
||||
|
||||
# Setup the combo
|
||||
cell = gtk.CellRendererText()
|
||||
self.machine_combo.pack_start(cell, True)
|
||||
self.machine_combo.add_attribute(cell, 'text', 0)
|
||||
|
||||
# Grab the distro and image combos. We need these to populate with
|
||||
# models once the machine is chosen
|
||||
self.distribution_combo = gxml.get_widget ("distribution_combo")
|
||||
cell = gtk.CellRendererText()
|
||||
self.distribution_combo.pack_start(cell, True)
|
||||
self.distribution_combo.add_attribute(cell, 'text', 0)
|
||||
|
||||
self.image_combo = gxml.get_widget ("image_combo")
|
||||
cell = gtk.CellRendererText()
|
||||
self.image_combo.pack_start(cell, True)
|
||||
self.image_combo.add_attribute(cell, 'text', 0)
|
||||
|
||||
# Put the default descriptive text in the status box
|
||||
self.clear_status_message()
|
||||
|
||||
# Mark as non-configurable, this is just greys out the widgets the
|
||||
# user can't yet use
|
||||
self.configurable = False
|
||||
self.set_configurable(False)
|
||||
|
||||
# Show the table
|
||||
table.show_all ()
|
||||
|
||||
# The loader and some signals connected to it to update the status
|
||||
# area
|
||||
self.loader = MetaDataLoader()
|
||||
self.loader.connect ("success", self.loader_success_cb)
|
||||
self.loader.connect ("error", self.loader_error_cb)
|
||||
|
||||
def update_configuration (self):
|
||||
""" A poorly named function but it updates the internal configuration
|
||||
from the widgets. This can make that configuration concrete and can
|
||||
thus be used for building """
|
||||
# Extract the chosen machine from the combo
|
||||
model = self.machine_combo.get_model()
|
||||
active_iter = self.machine_combo.get_active_iter()
|
||||
if (active_iter):
|
||||
self.configuration.machine = model.get(active_iter, 0)[0]
|
||||
|
||||
# Extract the chosen distro from the combo
|
||||
model = self.distribution_combo.get_model()
|
||||
active_iter = self.distribution_combo.get_active_iter()
|
||||
if (active_iter):
|
||||
self.configuration.distro = model.get(active_iter, 0)[0]
|
||||
|
||||
# Extract the chosen image from the combo
|
||||
model = self.image_combo.get_model()
|
||||
active_iter = self.image_combo.get_active_iter()
|
||||
if (active_iter):
|
||||
self.configuration.image = model.get(active_iter, 0)[0]
|
||||
|
||||
# This function operates to pull events out from the event queue and then push
|
||||
# them into the RunningBuild (which then drives the RunningBuild which then
|
||||
# pushes through and updates the progress tree view.)
|
||||
#
|
||||
# TODO: Should be a method on the RunningBuild class
|
||||
def event_handle_timeout (eventHandler, build):
|
||||
# Consume as many messages as we can ...
|
||||
event = eventHandler.getEvent()
|
||||
while event:
|
||||
build.handle_event (event)
|
||||
event = eventHandler.getEvent()
|
||||
return True
|
||||
|
||||
class MainWindow (gtk.Window):
|
||||
|
||||
# Callback that gets fired when the user hits a button in the
|
||||
# BuildSetupDialog.
|
||||
def build_dialog_box_response_cb (self, dialog, response_id):
|
||||
conf = None
|
||||
if (response_id == BuildSetupDialog.RESPONSE_BUILD):
|
||||
dialog.update_configuration()
|
||||
print dialog.configuration.machine, dialog.configuration.distro, \
|
||||
dialog.configuration.image
|
||||
conf = dialog.configuration
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
if conf:
|
||||
self.manager.do_build (conf)
|
||||
|
||||
def build_button_clicked_cb (self, button):
|
||||
dialog = BuildSetupDialog ()
|
||||
|
||||
# For some unknown reason Dialog.run causes nice little deadlocks ... :-(
|
||||
dialog.connect ("response", self.build_dialog_box_response_cb)
|
||||
dialog.show()
|
||||
|
||||
def __init__ (self):
|
||||
gtk.Window.__init__ (self)
|
||||
|
||||
# Pull in *just* the main vbox from the Glade XML data and then pack
|
||||
# that inside the window
|
||||
gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
|
||||
root = "main_window_vbox")
|
||||
vbox = gxml.get_widget ("main_window_vbox")
|
||||
self.add (vbox)
|
||||
|
||||
# Create the tree views for the build manager view and the progress view
|
||||
self.build_manager_view = BuildManagerTreeView()
|
||||
self.running_build_view = RunningBuildTreeView()
|
||||
|
||||
# Grab the scrolled windows that we put the tree views into
|
||||
self.results_scrolledwindow = gxml.get_widget ("results_scrolledwindow")
|
||||
self.progress_scrolledwindow = gxml.get_widget ("progress_scrolledwindow")
|
||||
|
||||
# Put the tree views inside ...
|
||||
self.results_scrolledwindow.add (self.build_manager_view)
|
||||
self.progress_scrolledwindow.add (self.running_build_view)
|
||||
|
||||
# Hook up the build button...
|
||||
self.build_button = gxml.get_widget ("main_toolbutton_build")
|
||||
self.build_button.connect ("clicked", self.build_button_clicked_cb)
|
||||
|
||||
# I'm not very happy about the current ownership of the RunningBuild. I have
|
||||
# my suspicions that this object should be held by the BuildManager since we
|
||||
# care about the signals in the manager
|
||||
|
||||
def running_build_succeeded_cb (running_build, manager):
|
||||
# Notify the manager that a build has succeeded. This is necessary as part
|
||||
# of the 'hack' that we use for making the row in the model / view
|
||||
# representing the ongoing build change into a row representing the
|
||||
# completed build. Since we know only one build can be running a time then
|
||||
# we can handle this.
|
||||
|
||||
# FIXME: Refactor all this so that the RunningBuild is owned by the
|
||||
# BuildManager. It can then hook onto the signals directly and drive
|
||||
# interesting things it cares about.
|
||||
manager.notify_build_succeeded ()
|
||||
print "build succeeded"
|
||||
|
||||
def running_build_failed_cb (running_build, manager):
|
||||
# As above
|
||||
print "build failed"
|
||||
manager.notify_build_failed ()
|
||||
|
||||
def init (server, eventHandler):
|
||||
# Initialise threading...
|
||||
gobject.threads_init()
|
||||
gtk.gdk.threads_init()
|
||||
|
||||
main_window = MainWindow ()
|
||||
main_window.show_all ()
|
||||
|
||||
# Set up the build manager stuff in general
|
||||
builds_dir = os.path.join (os.getcwd(), "results")
|
||||
manager = BuildManager (server, builds_dir)
|
||||
main_window.build_manager_view.set_model (manager.model)
|
||||
|
||||
# Do the running build setup
|
||||
running_build = RunningBuild ()
|
||||
main_window.running_build_view.set_model (running_build.model)
|
||||
running_build.connect ("build-succeeded", running_build_succeeded_cb,
|
||||
manager)
|
||||
running_build.connect ("build-failed", running_build_failed_cb, manager)
|
||||
|
||||
# We need to save the manager into the MainWindow so that the toolbar
|
||||
# button can use it.
|
||||
# FIXME: Refactor ?
|
||||
main_window.manager = manager
|
||||
|
||||
# Use a timeout function for probing the event queue to find out if we
|
||||
# have a message waiting for us.
|
||||
gobject.timeout_add (200,
|
||||
event_handle_timeout,
|
||||
eventHandler,
|
||||
running_build)
|
||||
|
||||
gtk.main()
|
||||
127
bitbake-dev/lib/bb/ui/uievent.py
Normal file
127
bitbake-dev/lib/bb/ui/uievent.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2006 - 2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
"""
|
||||
Use this class to fork off a thread to recieve event callbacks from the bitbake
|
||||
server and queue them for the UI to process. This process must be used to avoid
|
||||
client/server deadlocks.
|
||||
"""
|
||||
|
||||
import socket, threading
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
|
||||
class BBUIEventQueue:
|
||||
def __init__(self, BBServer):
|
||||
|
||||
self.eventQueue = []
|
||||
self.eventQueueLock = threading.Lock()
|
||||
self.eventQueueNotify = threading.Event()
|
||||
|
||||
self.BBServer = BBServer
|
||||
|
||||
self.t = threading.Thread()
|
||||
self.t.setDaemon(True)
|
||||
self.t.run = self.startCallbackHandler
|
||||
self.t.start()
|
||||
|
||||
def getEvent(self):
|
||||
|
||||
self.eventQueueLock.acquire()
|
||||
|
||||
if len(self.eventQueue) == 0:
|
||||
self.eventQueueLock.release()
|
||||
return None
|
||||
|
||||
item = self.eventQueue.pop(0)
|
||||
|
||||
if len(self.eventQueue) == 0:
|
||||
self.eventQueueNotify.clear()
|
||||
|
||||
self.eventQueueLock.release()
|
||||
|
||||
return item
|
||||
|
||||
def waitEvent(self, delay):
|
||||
self.eventQueueNotify.wait(delay)
|
||||
return self.getEvent()
|
||||
|
||||
def queue_event(self, event):
|
||||
|
||||
self.eventQueueLock.acquire()
|
||||
self.eventQueue.append(event)
|
||||
self.eventQueueNotify.set()
|
||||
self.eventQueueLock.release()
|
||||
|
||||
def startCallbackHandler(self):
|
||||
|
||||
server = UIXMLRPCServer()
|
||||
self.host, self.port = server.socket.getsockname()
|
||||
|
||||
server.register_function( self.system_quit, "event.quit" )
|
||||
server.register_function( self.queue_event, "event.send" )
|
||||
server.socket.settimeout(1)
|
||||
|
||||
self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
|
||||
|
||||
self.server = server
|
||||
while not server.quit:
|
||||
server.handle_request()
|
||||
server.server_close()
|
||||
|
||||
def system_quit( self ):
|
||||
"""
|
||||
Shut down the callback thread
|
||||
"""
|
||||
try:
|
||||
self.BBServer.unregisterEventHandler(self.EventHandle)
|
||||
except:
|
||||
pass
|
||||
self.server.quit = True
|
||||
|
||||
class UIXMLRPCServer (SimpleXMLRPCServer):
|
||||
|
||||
def __init__( self, interface = ("localhost", 0) ):
|
||||
self.quit = False
|
||||
SimpleXMLRPCServer.__init__( self,
|
||||
interface,
|
||||
requestHandler=SimpleXMLRPCRequestHandler,
|
||||
logRequests=False, allow_none=True)
|
||||
|
||||
def get_request(self):
|
||||
while not self.quit:
|
||||
try:
|
||||
sock, addr = self.socket.accept()
|
||||
sock.settimeout(1)
|
||||
return (sock, addr)
|
||||
except socket.timeout:
|
||||
pass
|
||||
return (None,None)
|
||||
|
||||
def close_request(self, request):
|
||||
if request is None:
|
||||
return
|
||||
SimpleXMLRPCServer.close_request(self, request)
|
||||
|
||||
def process_request(self, request, client_address):
|
||||
if request is None:
|
||||
return
|
||||
SimpleXMLRPCServer.process_request(self, request, client_address)
|
||||
|
||||
|
||||
49
bitbake-dev/lib/bb/ui/uihelper.py
Normal file
49
bitbake-dev/lib/bb/ui/uihelper.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2006 - 2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
class BBUIHelper:
|
||||
def __init__(self):
|
||||
self.needUpdate = False
|
||||
self.running_tasks = {}
|
||||
self.failed_tasks = {}
|
||||
|
||||
def eventHandler(self, event):
|
||||
if event[0].startswith('bb.build.TaskStarted'):
|
||||
self.running_tasks["%s %s\n" % (event[1]['_package'], event[1]['_task'])] = ""
|
||||
self.needUpdate = True
|
||||
if event[0].startswith('bb.build.TaskSucceeded'):
|
||||
del self.running_tasks["%s %s\n" % (event[1]['_package'], event[1]['_task'])]
|
||||
self.needUpdate = True
|
||||
if event[0].startswith('bb.build.TaskFailed'):
|
||||
del self.running_tasks["%s %s\n" % (event[1]['_package'], event[1]['_task'])]
|
||||
self.failed_tasks["%s %s\n" % (event[1]['_package'], event[1]['_task'])] = ""
|
||||
self.needUpdate = True
|
||||
|
||||
# Add runqueue event handling
|
||||
#if event[0].startswith('bb.runqueue.runQueueTaskCompleted'):
|
||||
# a = 1
|
||||
#if event[0].startswith('bb.runqueue.runQueueTaskStarted'):
|
||||
# a = 1
|
||||
#if event[0].startswith('bb.runqueue.runQueueTaskFailed'):
|
||||
# a = 1
|
||||
#if event[0].startswith('bb.runqueue.runQueueExitWait'):
|
||||
# a = 1
|
||||
|
||||
def getTasks(self):
|
||||
return (self.running_tasks, self.failed_tasks)
|
||||
417
bitbake-dev/lib/bb/utils.py
Normal file
417
bitbake-dev/lib/bb/utils.py
Normal file
@@ -0,0 +1,417 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake Utility Functions
|
||||
"""
|
||||
|
||||
# Copyright (C) 2004 Michael Lauer
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
digits = "0123456789"
|
||||
ascii_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
|
||||
import re, fcntl, os
|
||||
|
||||
def explode_version(s):
|
||||
r = []
|
||||
alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
|
||||
numeric_regexp = re.compile('^(\d+)(.*)$')
|
||||
while (s != ''):
|
||||
if s[0] in digits:
|
||||
m = numeric_regexp.match(s)
|
||||
r.append(int(m.group(1)))
|
||||
s = m.group(2)
|
||||
continue
|
||||
if s[0] in ascii_letters:
|
||||
m = alpha_regexp.match(s)
|
||||
r.append(m.group(1))
|
||||
s = m.group(2)
|
||||
continue
|
||||
s = s[1:]
|
||||
return r
|
||||
|
||||
def vercmp_part(a, b):
|
||||
va = explode_version(a)
|
||||
vb = explode_version(b)
|
||||
while True:
|
||||
if va == []:
|
||||
ca = None
|
||||
else:
|
||||
ca = va.pop(0)
|
||||
if vb == []:
|
||||
cb = None
|
||||
else:
|
||||
cb = vb.pop(0)
|
||||
if ca == None and cb == None:
|
||||
return 0
|
||||
if ca > cb:
|
||||
return 1
|
||||
if ca < cb:
|
||||
return -1
|
||||
|
||||
def vercmp(ta, tb):
|
||||
(ea, va, ra) = ta
|
||||
(eb, vb, rb) = tb
|
||||
|
||||
r = int(ea)-int(eb)
|
||||
if (r == 0):
|
||||
r = vercmp_part(va, vb)
|
||||
if (r == 0):
|
||||
r = vercmp_part(ra, rb)
|
||||
return r
|
||||
|
||||
def explode_deps(s):
|
||||
"""
|
||||
Take an RDEPENDS style string of format:
|
||||
"DEPEND1 (optional version) DEPEND2 (optional version) ..."
|
||||
and return a list of dependencies.
|
||||
Version information is ignored.
|
||||
"""
|
||||
r = []
|
||||
l = s.split()
|
||||
flag = False
|
||||
for i in l:
|
||||
if i[0] == '(':
|
||||
flag = True
|
||||
#j = []
|
||||
if not flag:
|
||||
r.append(i)
|
||||
#else:
|
||||
# j.append(i)
|
||||
if flag and i.endswith(')'):
|
||||
flag = False
|
||||
# Ignore version
|
||||
#r[-1] += ' ' + ' '.join(j)
|
||||
return r
|
||||
|
||||
def explode_dep_versions(s):
|
||||
"""
|
||||
Take an RDEPENDS style string of format:
|
||||
"DEPEND1 (optional version) DEPEND2 (optional version) ..."
|
||||
and return a dictonary of dependencies and versions.
|
||||
"""
|
||||
r = {}
|
||||
l = s.split()
|
||||
lastdep = None
|
||||
lastver = ""
|
||||
inversion = False
|
||||
for i in l:
|
||||
if i[0] == '(':
|
||||
inversion = True
|
||||
lastver = i[1:] or ""
|
||||
#j = []
|
||||
elif inversion and i.endswith(')'):
|
||||
inversion = False
|
||||
lastver = lastver + " " + (i[:-1] or "")
|
||||
r[lastdep] = lastver
|
||||
elif not inversion:
|
||||
r[i] = None
|
||||
lastdep = i
|
||||
lastver = ""
|
||||
elif inversion:
|
||||
lastver = lastver + " " + i
|
||||
|
||||
return r
|
||||
|
||||
def _print_trace(body, line):
|
||||
"""
|
||||
Print the Environment of a Text Body
|
||||
"""
|
||||
import bb
|
||||
|
||||
# print the environment of the method
|
||||
bb.msg.error(bb.msg.domain.Util, "Printing the environment of the function")
|
||||
min_line = max(1,line-4)
|
||||
max_line = min(line+4,len(body)-1)
|
||||
for i in range(min_line,max_line+1):
|
||||
bb.msg.error(bb.msg.domain.Util, "\t%.4d:%s" % (i, body[i-1]) )
|
||||
|
||||
|
||||
def better_compile(text, file, realfile):
|
||||
"""
|
||||
A better compile method. This method
|
||||
will print the offending lines.
|
||||
"""
|
||||
try:
|
||||
return compile(text, file, "exec")
|
||||
except Exception, e:
|
||||
import bb,sys
|
||||
|
||||
# split the text into lines again
|
||||
body = text.split('\n')
|
||||
bb.msg.error(bb.msg.domain.Util, "Error in compiling python function in: ", realfile)
|
||||
bb.msg.error(bb.msg.domain.Util, "The lines resulting into this error were:")
|
||||
bb.msg.error(bb.msg.domain.Util, "\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1]))
|
||||
|
||||
_print_trace(body, e.lineno)
|
||||
|
||||
# exit now
|
||||
sys.exit(1)
|
||||
|
||||
def better_exec(code, context, text, realfile):
|
||||
"""
|
||||
Similiar to better_compile, better_exec will
|
||||
print the lines that are responsible for the
|
||||
error.
|
||||
"""
|
||||
import bb,sys
|
||||
try:
|
||||
exec code in context
|
||||
except:
|
||||
(t,value,tb) = sys.exc_info()
|
||||
|
||||
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
|
||||
raise
|
||||
|
||||
# print the Header of the Error Message
|
||||
bb.msg.error(bb.msg.domain.Util, "Error in executing python function in: %s" % realfile)
|
||||
bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t,value) )
|
||||
|
||||
# let us find the line number now
|
||||
while tb.tb_next:
|
||||
tb = tb.tb_next
|
||||
|
||||
import traceback
|
||||
line = traceback.tb_lineno(tb)
|
||||
|
||||
_print_trace( text.split('\n'), line )
|
||||
|
||||
raise
|
||||
|
||||
def Enum(*names):
|
||||
"""
|
||||
A simple class to give Enum support
|
||||
"""
|
||||
|
||||
assert names, "Empty enums are not supported"
|
||||
|
||||
class EnumClass(object):
|
||||
__slots__ = names
|
||||
def __iter__(self): return iter(constants)
|
||||
def __len__(self): return len(constants)
|
||||
def __getitem__(self, i): return constants[i]
|
||||
def __repr__(self): return 'Enum' + str(names)
|
||||
def __str__(self): return 'enum ' + str(constants)
|
||||
|
||||
class EnumValue(object):
|
||||
__slots__ = ('__value')
|
||||
def __init__(self, value): self.__value = value
|
||||
Value = property(lambda self: self.__value)
|
||||
EnumType = property(lambda self: EnumType)
|
||||
def __hash__(self): return hash(self.__value)
|
||||
def __cmp__(self, other):
|
||||
# C fans might want to remove the following assertion
|
||||
# to make all enums comparable by ordinal value {;))
|
||||
assert self.EnumType is other.EnumType, "Only values from the same enum are comparable"
|
||||
return cmp(self.__value, other.__value)
|
||||
def __invert__(self): return constants[maximum - self.__value]
|
||||
def __nonzero__(self): return bool(self.__value)
|
||||
def __repr__(self): return str(names[self.__value])
|
||||
|
||||
maximum = len(names) - 1
|
||||
constants = [None] * len(names)
|
||||
for i, each in enumerate(names):
|
||||
val = EnumValue(i)
|
||||
setattr(EnumClass, each, val)
|
||||
constants[i] = val
|
||||
constants = tuple(constants)
|
||||
EnumType = EnumClass()
|
||||
return EnumType
|
||||
|
||||
def lockfile(name):
|
||||
"""
|
||||
Use the file fn as a lock file, return when the lock has been acquired.
|
||||
Returns a variable to pass to unlockfile().
|
||||
"""
|
||||
path = os.path.dirname(name)
|
||||
if not os.path.isdir(path):
|
||||
import bb, sys
|
||||
bb.msg.error(bb.msg.domain.Util, "Error, lockfile path does not exist!: %s" % path)
|
||||
sys.exit(1)
|
||||
|
||||
while True:
|
||||
# If we leave the lockfiles lying around there is no problem
|
||||
# but we should clean up after ourselves. This gives potential
|
||||
# for races though. To work around this, when we acquire the lock
|
||||
# we check the file we locked was still the lock file on disk.
|
||||
# by comparing inode numbers. If they don't match or the lockfile
|
||||
# no longer exists, we start again.
|
||||
|
||||
# This implementation is unfair since the last person to request the
|
||||
# lock is the most likely to win it.
|
||||
|
||||
try:
|
||||
lf = open(name, "a+")
|
||||
fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
|
||||
statinfo = os.fstat(lf.fileno())
|
||||
if os.path.exists(lf.name):
|
||||
statinfo2 = os.stat(lf.name)
|
||||
if statinfo.st_ino == statinfo2.st_ino:
|
||||
return lf
|
||||
# File no longer exists or changed, retry
|
||||
lf.close
|
||||
except Exception, e:
|
||||
continue
|
||||
|
||||
def unlockfile(lf):
|
||||
"""
|
||||
Unlock a file locked using lockfile()
|
||||
"""
|
||||
os.unlink(lf.name)
|
||||
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
|
||||
lf.close
|
||||
|
||||
def md5_file(filename):
|
||||
"""
|
||||
Return the hex string representation of the MD5 checksum of filename.
|
||||
"""
|
||||
try:
|
||||
import hashlib
|
||||
m = hashlib.md5()
|
||||
except ImportError:
|
||||
import md5
|
||||
m = md5.new()
|
||||
|
||||
for line in open(filename):
|
||||
m.update(line)
|
||||
return m.hexdigest()
|
||||
|
||||
def sha256_file(filename):
|
||||
"""
|
||||
Return the hex string representation of the 256-bit SHA checksum of
|
||||
filename. On Python 2.4 this will return None, so callers will need to
|
||||
handle that by either skipping SHA checks, or running a standalone sha256sum
|
||||
binary.
|
||||
"""
|
||||
try:
|
||||
import hashlib
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
s = hashlib.sha256()
|
||||
for line in open(filename):
|
||||
s.update(line)
|
||||
return s.hexdigest()
|
||||
|
||||
def preserved_envvars_list():
|
||||
return [
|
||||
'BBPATH',
|
||||
'BB_PRESERVE_ENV',
|
||||
'BB_ENV_WHITELIST',
|
||||
'BB_ENV_EXTRAWHITE',
|
||||
'COLORTERM',
|
||||
'DBUS_SESSION_BUS_ADDRESS',
|
||||
'DESKTOP_SESSION',
|
||||
'DESKTOP_STARTUP_ID',
|
||||
'DISPLAY',
|
||||
'GNOME_KEYRING_PID',
|
||||
'GNOME_KEYRING_SOCKET',
|
||||
'GPG_AGENT_INFO',
|
||||
'GTK_RC_FILES',
|
||||
'HOME',
|
||||
'LANG',
|
||||
'LOGNAME',
|
||||
'PATH',
|
||||
'PWD',
|
||||
'SESSION_MANAGER',
|
||||
'SHELL',
|
||||
'SSH_AUTH_SOCK',
|
||||
'TERM',
|
||||
'USER',
|
||||
'USERNAME',
|
||||
'_',
|
||||
'XAUTHORITY',
|
||||
'XDG_DATA_DIRS',
|
||||
'XDG_SESSION_COOKIE',
|
||||
]
|
||||
|
||||
def filter_environment(good_vars):
|
||||
"""
|
||||
Create a pristine environment for bitbake. This will remove variables that
|
||||
are not known and may influence the build in a negative way.
|
||||
"""
|
||||
|
||||
import bb
|
||||
|
||||
removed_vars = []
|
||||
for key in os.environ.keys():
|
||||
if key in good_vars:
|
||||
continue
|
||||
|
||||
removed_vars.append(key)
|
||||
os.unsetenv(key)
|
||||
del os.environ[key]
|
||||
|
||||
if len(removed_vars):
|
||||
bb.debug(1, "Removed the following variables from the environment:", ",".join(removed_vars))
|
||||
|
||||
return removed_vars
|
||||
|
||||
def clean_environment():
|
||||
"""
|
||||
Clean up any spurious environment variables. This will remove any
|
||||
variables the user hasn't chose to preserve.
|
||||
"""
|
||||
if 'BB_PRESERVE_ENV' not in os.environ:
|
||||
if 'BB_ENV_WHITELIST' in os.environ:
|
||||
good_vars = os.environ['BB_ENV_WHITELIST'].split()
|
||||
else:
|
||||
good_vars = preserved_envvars_list()
|
||||
if 'BB_ENV_EXTRAWHITE' in os.environ:
|
||||
good_vars.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
|
||||
filter_environment(good_vars)
|
||||
|
||||
def empty_environment():
|
||||
"""
|
||||
Remove all variable from the environment.
|
||||
"""
|
||||
for s in os.environ.keys():
|
||||
os.unsetenv(s)
|
||||
del os.environ[s]
|
||||
|
||||
def build_environment(d):
|
||||
"""
|
||||
Build an environment from all exported variables.
|
||||
"""
|
||||
import bb
|
||||
for var in bb.data.keys(d):
|
||||
export = bb.data.getVarFlag(var, "export", d)
|
||||
if export:
|
||||
os.environ[var] = bb.data.getVar(var, d, True)
|
||||
|
||||
def prunedir(topdir):
|
||||
# Delete everything reachable from the directory named in 'topdir'.
|
||||
# CAUTION: This is dangerous!
|
||||
for root, dirs, files in os.walk(topdir, topdown=False):
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
for name in dirs:
|
||||
if os.path.islink(os.path.join(root, name)):
|
||||
os.remove(os.path.join(root, name))
|
||||
else:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
os.rmdir(topdir)
|
||||
|
||||
#
|
||||
# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
|
||||
# but thats possibly insane and suffixes is probably going to be small
|
||||
#
|
||||
def prune_suffix(var, suffixes, d):
|
||||
# See if var ends with any of the suffixes listed and
|
||||
# remove it if found
|
||||
for suffix in suffixes:
|
||||
if var.endswith(suffix):
|
||||
return var.replace(suffix, "")
|
||||
return var
|
||||
145
bitbake-dev/lib/bb/xmlrpcserver.py
Normal file
145
bitbake-dev/lib/bb/xmlrpcserver.py
Normal file
@@ -0,0 +1,145 @@
|
||||
#
|
||||
# BitBake XMLRPC Server
|
||||
#
|
||||
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2006 - 2008 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""
|
||||
This module implements an xmlrpc server for BitBake.
|
||||
|
||||
Use this by deriving a class from BitBakeXMLRPCServer and then adding
|
||||
methods which you want to "export" via XMLRPC. If the methods have the
|
||||
prefix xmlrpc_, then registering those function will happen automatically,
|
||||
if not, you need to call register_function.
|
||||
|
||||
Use register_idle_function() to add a function which the xmlrpc server
|
||||
calls from within server_forever when no requests are pending. Make sure
|
||||
that those functions are non-blocking or else you will introduce latency
|
||||
in the server's main loop.
|
||||
"""
|
||||
|
||||
import bb
|
||||
import xmlrpclib
|
||||
|
||||
DEBUG = False
|
||||
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
import inspect, select
|
||||
|
||||
class BitBakeServerCommands():
|
||||
def __init__(self, server, cooker):
|
||||
self.cooker = cooker
|
||||
self.server = server
|
||||
|
||||
def registerEventHandler(self, host, port):
|
||||
"""
|
||||
Register a remote UI Event Handler
|
||||
"""
|
||||
s = xmlrpclib.Server("http://%s:%d" % (host, port), allow_none=True)
|
||||
return bb.event.register_UIHhandler(s)
|
||||
|
||||
def unregisterEventHandler(self, handlerNum):
|
||||
"""
|
||||
Unregister a remote UI Event Handler
|
||||
"""
|
||||
return bb.event.unregister_UIHhandler(handlerNum)
|
||||
|
||||
def runCommand(self, command):
|
||||
"""
|
||||
Run a cooker command on the server
|
||||
"""
|
||||
return self.cooker.command.runCommand(command)
|
||||
|
||||
def terminateServer(self):
|
||||
"""
|
||||
Trigger the server to quit
|
||||
"""
|
||||
self.server.quit = True
|
||||
print "Server (cooker) exitting"
|
||||
return
|
||||
|
||||
def ping(self):
|
||||
"""
|
||||
Dummy method which can be used to check the server is still alive
|
||||
"""
|
||||
return True
|
||||
|
||||
class BitBakeXMLRPCServer(SimpleXMLRPCServer):
|
||||
# remove this when you're done with debugging
|
||||
# allow_reuse_address = True
|
||||
|
||||
def __init__(self, cooker, interface = ("localhost", 0)):
|
||||
"""
|
||||
Constructor
|
||||
"""
|
||||
SimpleXMLRPCServer.__init__(self, interface,
|
||||
requestHandler=SimpleXMLRPCRequestHandler,
|
||||
logRequests=False, allow_none=True)
|
||||
self._idlefuns = {}
|
||||
self.host, self.port = self.socket.getsockname()
|
||||
#self.register_introspection_functions()
|
||||
commands = BitBakeServerCommands(self, cooker)
|
||||
self.autoregister_all_functions(commands, "")
|
||||
|
||||
def autoregister_all_functions(self, context, prefix):
|
||||
"""
|
||||
Convenience method for registering all functions in the scope
|
||||
of this class that start with a common prefix
|
||||
"""
|
||||
methodlist = inspect.getmembers(context, inspect.ismethod)
|
||||
for name, method in methodlist:
|
||||
if name.startswith(prefix):
|
||||
self.register_function(method, name[len(prefix):])
|
||||
|
||||
def register_idle_function(self, function, data):
|
||||
"""Register a function to be called while the server is idle"""
|
||||
assert callable(function)
|
||||
self._idlefuns[function] = data
|
||||
|
||||
def serve_forever(self):
|
||||
"""
|
||||
Serve Requests. Overloaded to honor a quit command
|
||||
"""
|
||||
self.quit = False
|
||||
while not self.quit:
|
||||
#print "Idle queue length %s" % len(self._idlefuns)
|
||||
if len(self._idlefuns) == 0:
|
||||
self.timeout = None
|
||||
else:
|
||||
self.timeout = 0
|
||||
self.handle_request()
|
||||
#print "Idle timeout, running idle functions"
|
||||
for function, data in self._idlefuns.items():
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
if not retval:
|
||||
del self._idlefuns[function]
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
# Tell idle functions we're exiting
|
||||
for function, data in self._idlefuns.items():
|
||||
try:
|
||||
retval = function(self, data, True)
|
||||
except:
|
||||
pass
|
||||
|
||||
self.server_close()
|
||||
return
|
||||
@@ -1,8 +1,8 @@
|
||||
Tim Ansell <mithro@mithis.net>
|
||||
Phil Blundell <pb@handhelds.org>
|
||||
Seb Frankengul <seb@frankengul.org>
|
||||
Holger Freyther <holger@moiji-mobile.com>
|
||||
Marcin Juszkiewicz <marcin@juszkiewicz.com.pl>
|
||||
Holger Freyther <zecke@handhelds.org>
|
||||
Marcin Juszkiewicz <marcin@haerwu.biz>
|
||||
Chris Larson <kergoth@handhelds.org>
|
||||
Ulrich Luckas <luckas@musoft.de>
|
||||
Mickey Lauer <mickey@Vanille.de>
|
||||
|
||||
@@ -1,99 +1,8 @@
|
||||
Changes in Bitbake 1.9.x:
|
||||
- Add PE (Package Epoch) support from Philipp Zabel (pH5)
|
||||
- Treat python functions the same as shell functions for logging
|
||||
- Use TMPDIR/anonfunc as a __anonfunc temp directory (T)
|
||||
- Catch truncated cache file errors
|
||||
- Allow operations other than assignment on flag variables
|
||||
- Add code to handle inter-task dependencies
|
||||
- Fix cache errors when generation dotGraphs
|
||||
- Make sure __inherit_cache is updated before calling include() (from Michael Krelin)
|
||||
- Fix bug when target was in ASSUME_PROVIDED (#2236)
|
||||
- Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062)
|
||||
- Fix invalid regexp in BBMASK error handling (missing import) (#1124)
|
||||
- Promote certain warnings from debug to note 2 level
|
||||
- Update manual
|
||||
- Correctly redirect stdin when forking
|
||||
- If parsing errors are found, exit, too many users miss the errors
|
||||
- Remove supriours PREFERRED_PROVIDER warnings
|
||||
- svn fetcher: Add _buildsvncommand function
|
||||
- Improve certain error messages
|
||||
- Rewrite svn fetcher to make adding extra operations easier
|
||||
as part of future SRCDATE="now" fixes
|
||||
(requires new FETCHCMD_svn definition in bitbake.conf)
|
||||
- Change SVNDIR layout to be more unique (fixes #2644 and #2624)
|
||||
- Add ConfigParsed Event after configuration parsing is complete
|
||||
- Add SRCREV support for svn fetcher
|
||||
- data.emit_var() - only call getVar if we need the variable
|
||||
- Stop generating the A variable (seems to be legacy code)
|
||||
- Make sure intertask depends get processed correcting in recursive depends
|
||||
- Add pn-PN to overrides when evaluating PREFERRED_VERSION
|
||||
- Improve the progress indicator by skipping tasks that have
|
||||
already run before starting the build rather than during it
|
||||
- Add profiling option (-P)
|
||||
- Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache
|
||||
- Add SRCREV_FORMAT support
|
||||
- Fix local fetcher's localpath return values
|
||||
- Apply OVERRIDES before performing immediate expansions
|
||||
- Allow the -b -e option combination to take regular expressions
|
||||
- Fix handling of variables with expansion in the name using _append/_prepend
|
||||
e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
|
||||
- Add plain message function to bb.msg
|
||||
- Sort the list of providers before processing so dependency problems are
|
||||
reproducible rather than effectively random
|
||||
- Fix/improve bitbake -s output
|
||||
- Add locking for fetchers so only one tries to fetch a given file at a given time
|
||||
- Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains
|
||||
- Expand data in addtasks
|
||||
- Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...."
|
||||
error message.
|
||||
- Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction)
|
||||
- Sort digraph output to make builds more reproducible
|
||||
- Split expandKeys into two for loops to benefit from the expand_cache (12% speedup)
|
||||
- runqueue.py: Fix idepends handling to avoid dependency errors
|
||||
- Clear the terminal TOSTOP flag if set (and warn the user)
|
||||
- Fix regression from r653 and make SRCDATE/CVSDATE work for packages again
|
||||
- Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530)
|
||||
- Warn about malformed PREFERRED_PROVIDERS (#1072)
|
||||
- Add support for BB_NICE_LEVEL option (#1627)
|
||||
- Psyco is used only on x86 as there is no support for other architectures.
|
||||
- Sort initial providers list by default preference (#1145, #2024)
|
||||
- Improve provider sorting so prefered versions have preference over latest versions (#768)
|
||||
- Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359)
|
||||
- Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed
|
||||
- Handle paths in svn fetcher module parameter
|
||||
- Support the syntax "export VARIABLE"
|
||||
- Add bzr fetcher
|
||||
- Add support for cleaning directories before a task in the form:
|
||||
do_taskname[cleandirs] = "dir"
|
||||
- bzr fetcher tweaks from Robert Schuster (#2913)
|
||||
- Add mercurial (hg) fetcher from Robert Schuster (#2913)
|
||||
- Don't add duplicates to BBPATH
|
||||
- Fix preferred_version return values (providers.py)
|
||||
- Fix 'depends' flag splitting
|
||||
- Fix unexport handling (#3135)
|
||||
- Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting)
|
||||
- Allow multiple options for deptask flag
|
||||
- Use git-fetch instead of git-pull removing any need for merges when
|
||||
fetching (we don't care about the index). Fixes fetch errors.
|
||||
- Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches
|
||||
faster at the expense of not creating mirror tarballs.
|
||||
- SRCREV handling updates, improvements and fixes from Poky
|
||||
- Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky
|
||||
- Add support for task selfstamp and lockfiles flags
|
||||
- Disable task number acceleration since it can allow the tasks to run
|
||||
out of sequence
|
||||
- Improve runqueue code comments
|
||||
- Add task scheduler abstraction and some example schedulers
|
||||
- Improve circular dependency chain debugging code and user feedback
|
||||
- Don't give a stacktrace for invalid tasks, have a user friendly message (#3431)
|
||||
- Add support for "-e target" (#3432)
|
||||
- Fix shell showdata command (#3259)
|
||||
- Fix shell data updating problems (#1880)
|
||||
- Properly raise errors for invalid source URI protocols
|
||||
- Change the wget fetcher failure handling to avoid lockfile problems
|
||||
- Add support for branches in git fetcher (Otavio Salvador, Michael Lauer)
|
||||
- Make taskdata and runqueue errors more user friendly
|
||||
- Add norecurse and fullpath options to cvs fetcher
|
||||
Changes in BitBake 1.8.x:
|
||||
- Add bb.utils.prune_suffix function
|
||||
|
||||
Changes in BitBake 1.8.12:
|
||||
- Fix -f (force) in conjunction with -b
|
||||
- Fix exit code for build failures in --continue mode
|
||||
- Fix git branch tags fetching
|
||||
- Change parseConfigurationFile so it works on real data, not a copy
|
||||
@@ -118,10 +27,8 @@ Changes in Bitbake 1.9.x:
|
||||
how extensively stamps are looked at for validity
|
||||
- When handling build target failures make sure idepends are checked and
|
||||
failed where needed. Fixes --continue mode crashes.
|
||||
- Fix -f (force) in conjunction with -b
|
||||
- Fix problems with recrdeptask handling where some idepends weren't handled
|
||||
correctly.
|
||||
- Handle exit codes correctly (from pH5)
|
||||
- Work around refs/HEAD issues with git over http (#3410)
|
||||
- Add proxy support to the CVS fetcher (from Cyril Chemparathy)
|
||||
- Improve runfetchcmd so errors are seen and various GIT variables are exported
|
||||
@@ -137,8 +44,9 @@ Changes in Bitbake 1.9.x:
|
||||
- Add PERSISTENT_DIR to store the PersistData in a persistent
|
||||
directory != the cache dir.
|
||||
- Add md5 and sha256 checksum generation functions to utils.py
|
||||
- Make sure Build Completed events are generated even when tasks fail
|
||||
- Correctly handle '-' characters in class names (#2958)
|
||||
- Make sure expandKeys has been called on the data dictionary before running tasks
|
||||
- Make sure expandKeys has been called on the data dictonary before running tasks
|
||||
- Correctly add a task override in the form task-TASKNAME.
|
||||
- Revert the '-' character fix in class names since it breaks things
|
||||
- When a regexp fails to compile for PACKAGES_DYNAMIC, print a more useful error (#4444)
|
||||
@@ -155,7 +63,6 @@ Changes in Bitbake 1.9.x:
|
||||
used instead of the internal bitbake one. Alternatively, BB_ENV_EXTRAWHITE can be used
|
||||
to extend the internal whitelist.
|
||||
- Perforce fetcher fix to use commandline options instead of being overriden by the environment
|
||||
- bb.utils.prunedir can cope with symlinks to directoriees without exceptions
|
||||
- use @rev when doing a svn checkout
|
||||
- Add osc fetcher (from Joshua Lock in Poky)
|
||||
- When SRCREV autorevisioning for a recipe is in use, don't cache the recipe
|
||||
@@ -169,15 +76,109 @@ Changes in Bitbake 1.9.x:
|
||||
proxies to work better. (from Poky)
|
||||
- Also allow user and pswd options in SRC_URIs globally (from Poky)
|
||||
- Improve proxy handling when using mirrors (from Poky)
|
||||
- Add bb.utils.prune_suffix function
|
||||
- Fix hg checkouts of specific revisions (from Poky)
|
||||
- Fix wget fetching of urls with parameters specified (from Poky)
|
||||
- Add username handling to git fetcher (from Poky)
|
||||
- Set HOME environmental variable when running fetcher commands (from Poky)
|
||||
- Make sure allowed variables inherited from the environment are exported again (from Poky)
|
||||
- When running a stage task in bbshell, run populate_staging, not the stage task (from Poky)
|
||||
- Fix + character escaping from PACKAGES_DYNAMIC (thanks Otavio Salvador)
|
||||
- Addition of BBCLASSEXTEND support for allowing one recipe to provide multiple targets (from Poky)
|
||||
|
||||
Changes in BitBake 1.8.10:
|
||||
- Psyco is available only for x86 - do not use it on other architectures.
|
||||
- Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530)
|
||||
- Warn about malformed PREFERRED_PROVIDERS (#1072)
|
||||
- Add support for BB_NICE_LEVEL option (#1627)
|
||||
- Sort initial providers list by default preference (#1145, #2024)
|
||||
- Improve provider sorting so prefered versions have preference over latest versions (#768)
|
||||
- Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359)
|
||||
- Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed
|
||||
- Handle paths in svn fetcher module parameter
|
||||
- Support the syntax "export VARIABLE"
|
||||
- Add bzr fetcher
|
||||
- Add support for cleaning directories before a task in the form:
|
||||
do_taskname[cleandirs] = "dir"
|
||||
- bzr fetcher tweaks from Robert Schuster (#2913)
|
||||
- Add mercurial (hg) fetcher from Robert Schuster (#2913)
|
||||
- Fix bogus preferred_version return values
|
||||
- Fix 'depends' flag splitting
|
||||
- Fix unexport handling (#3135)
|
||||
- Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting)
|
||||
- Allow multiple options for deptask flag
|
||||
- Use git-fetch instead of git-pull removing any need for merges when
|
||||
fetching (we don't care about the index). Fixes fetch errors.
|
||||
- Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches
|
||||
faster at the expense of not creating mirror tarballs.
|
||||
- SRCREV handling updates, improvements and fixes from Poky
|
||||
- Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky
|
||||
- Add support for task selfstamp and lockfiles flags
|
||||
- Disable task number acceleration since it can allow the tasks to run
|
||||
out of sequence
|
||||
- Improve runqueue code comments
|
||||
- Add task scheduler abstraction and some example schedulers
|
||||
- Improve circular dependency chain debugging code and user feedback
|
||||
- Don't give a stacktrace for invalid tasks, have a user friendly message (#3431)
|
||||
- Add support for "-e target" (#3432)
|
||||
- Fix shell showdata command (#3259)
|
||||
- Fix shell data updating problems (#1880)
|
||||
- Properly raise errors for invalid source URI protocols
|
||||
- Change the wget fetcher failure handling to avoid lockfile problems
|
||||
- Add git branch support
|
||||
- Add support for branches in git fetcher (Otavio Salvador, Michael Lauer)
|
||||
- Make taskdata and runqueue errors more user friendly
|
||||
- Add norecurse and fullpath options to cvs fetcher
|
||||
- bb.utils.prunedir can cope with symlinks to directories without exceptions
|
||||
|
||||
Changes in Bitbake 1.8.8:
|
||||
- Rewrite svn fetcher to make adding extra operations easier
|
||||
as part of future SRCDATE="now" fixes
|
||||
(requires new FETCHCMD_svn definition in bitbake.conf)
|
||||
- Change SVNDIR layout to be more unique (fixes #2644 and #2624)
|
||||
- Import persistent data store from trunk
|
||||
- Sync fetcher code with that in trunk, adding SRCREV support for svn
|
||||
- Add ConfigParsed Event after configuration parsing is complete
|
||||
- data.emit_var() - only call getVar if we need the variable
|
||||
- Stop generating the A variable (seems to be legacy code)
|
||||
- Make sure intertask depends get processed correcting in recursive depends
|
||||
- Add pn-PN to overrides when evaluating PREFERRED_VERSION
|
||||
- Improve the progress indicator by skipping tasks that have
|
||||
already run before starting the build rather than during it
|
||||
- Add profiling option (-P)
|
||||
- Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache
|
||||
- Add SRCREV_FORMAT support
|
||||
- Fix local fetcher's localpath return values
|
||||
- Apply OVERRIDES before performing immediate expansions
|
||||
- Allow the -b -e option combination to take regular expressions
|
||||
- Add plain message function to bb.msg
|
||||
- Sort the list of providers before processing so dependency problems are
|
||||
reproducible rather than effectively random
|
||||
- Add locking for fetchers so only one tries to fetch a given file at a given time
|
||||
- Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains
|
||||
- Fix handling of variables with expansion in the name using _append/_prepend
|
||||
e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
|
||||
- Expand data in addtasks
|
||||
- Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...."
|
||||
error message.
|
||||
- Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction)
|
||||
- Sort digraph output to make builds more reproducible
|
||||
- Split expandKeys into two for loops to benefit from the expand_cache (12% speedup)
|
||||
- runqueue.py: Fix idepends handling to avoid dependency errors
|
||||
- Clear the terminal TOSTOP flag if set (and warn the user)
|
||||
- Fix regression from r653 and make SRCDATE/CVSDATE work for packages again
|
||||
|
||||
Changes in Bitbake 1.8.6:
|
||||
- Correctly redirect stdin when forking
|
||||
- If parsing errors are found, exit, too many users miss the errors
|
||||
- Remove supriours PREFERRED_PROVIDER warnings
|
||||
|
||||
Changes in Bitbake 1.8.4:
|
||||
- Make sure __inherit_cache is updated before calling include() (from Michael Krelin)
|
||||
- Fix bug when target was in ASSUME_PROVIDED (#2236)
|
||||
- Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062)
|
||||
- Fix invalid regexp in BBMASK error handling (missing import) (#1124)
|
||||
- Don't run build sanity checks on incomplete builds
|
||||
- Promote certain warnings from debug to note 2 level
|
||||
- Update manual
|
||||
|
||||
Changes in Bitbake 1.8.2:
|
||||
- Catch truncated cache file errors
|
||||
- Add PE (Package Epoch) support from Philipp Zabel (pH5)
|
||||
- Add code to handle inter-task dependencies
|
||||
- Allow operations other than assignment on flag variables
|
||||
- Fix cache errors when generation dotGraphs
|
||||
|
||||
Changes in Bitbake 1.8.0:
|
||||
- Release 1.7.x as a stable series
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
BitBake is licensed under the GNU General Public License version 2.0. See COPYING for further details.
|
||||
|
||||
The following external components are distributed with this software:
|
||||
|
||||
* The Toaster Simple UI application is based upon the Django project template, the files of which are covered by the BSD license and are copyright (c) Django Software
|
||||
Foundation and individual contributors.
|
||||
|
||||
* Twitter Bootstrap (including Glyphicons), redistributed under the MIT license
|
||||
* jQuery is redistributed under the MIT license.
|
||||
|
||||
* Twitter typeahead.js redistributed under the MIT license. Note that the JS source has one small modification, so the full unminified file is currently included to make it obvious where this is.
|
||||
|
||||
* jsrender is redistributed under the MIT license.
|
||||
|
||||
* QUnit is redistributed under the MIT license.
|
||||
53
bitbake/MANIFEST
Normal file
53
bitbake/MANIFEST
Normal file
@@ -0,0 +1,53 @@
|
||||
AUTHORS
|
||||
COPYING
|
||||
ChangeLog
|
||||
MANIFEST
|
||||
setup.py
|
||||
bin/bitdoc
|
||||
bin/bbimage
|
||||
bin/bitbake
|
||||
lib/bb/__init__.py
|
||||
lib/bb/build.py
|
||||
lib/bb/cache.py
|
||||
lib/bb/cooker.py
|
||||
lib/bb/COW.py
|
||||
lib/bb/data.py
|
||||
lib/bb/data_smart.py
|
||||
lib/bb/event.py
|
||||
lib/bb/fetch/__init__.py
|
||||
lib/bb/fetch/bzr.py
|
||||
lib/bb/fetch/cvs.py
|
||||
lib/bb/fetch/git.py
|
||||
lib/bb/fetch/hg.py
|
||||
lib/bb/fetch/local.py
|
||||
lib/bb/fetch/osc.py
|
||||
lib/bb/fetch/perforce.py
|
||||
lib/bb/fetch/ssh.py
|
||||
lib/bb/fetch/svk.py
|
||||
lib/bb/fetch/svn.py
|
||||
lib/bb/fetch/wget.py
|
||||
lib/bb/manifest.py
|
||||
lib/bb/methodpool.py
|
||||
lib/bb/msg.py
|
||||
lib/bb/parse/__init__.py
|
||||
lib/bb/parse/parse_py/__init__.py
|
||||
lib/bb/parse/parse_py/BBHandler.py
|
||||
lib/bb/parse/parse_py/ConfHandler.py
|
||||
lib/bb/persist_data.py
|
||||
lib/bb/providers.py
|
||||
lib/bb/runqueue.py
|
||||
lib/bb/shell.py
|
||||
lib/bb/taskdata.py
|
||||
lib/bb/utils.py
|
||||
setup.py
|
||||
doc/COPYING.GPL
|
||||
doc/COPYING.MIT
|
||||
doc/bitbake.1
|
||||
doc/manual/html.css
|
||||
doc/manual/Makefile
|
||||
doc/manual/usermanual.xml
|
||||
contrib/bbdev.sh
|
||||
contrib/vim/syntax/bitbake.vim
|
||||
contrib/vim/ftdetect/bitbake.vim
|
||||
conf/bitbake.conf
|
||||
classes/base.bbclass
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
@@ -22,35 +22,125 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sys, os, getopt, re, time, optparse
|
||||
sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
import bb
|
||||
from bb import cooker
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
||||
'lib'))
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
__version__ = "1.8.13"
|
||||
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
||||
#============================================================================#
|
||||
# BBOptions
|
||||
#============================================================================#
|
||||
class BBConfiguration( object ):
|
||||
"""
|
||||
Manages build options and configurations for one run
|
||||
"""
|
||||
def __init__( self, options ):
|
||||
for key, val in options.__dict__.items():
|
||||
setattr( self, key, val )
|
||||
|
||||
if sys.getfilesystemencoding() != "utf-8":
|
||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
||||
|
||||
__version__ = "1.31.1"
|
||||
#============================================================================#
|
||||
# main
|
||||
#============================================================================#
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser( version = "BitBake Build Tool Core version %s, %%prog version %s" % ( bb.__version__, __version__ ),
|
||||
usage = """%prog [options] [package ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of BitBake files.
|
||||
It expects that BBFILES is defined, which is a space separated list of files to
|
||||
be executed. BBFILES does support wildcards.
|
||||
Default BBFILES are the .bb files in the current directory.""" )
|
||||
|
||||
parser.add_option( "-b", "--buildfile", help = "execute the task against this .bb file, rather than a package from BBFILES.",
|
||||
action = "store", dest = "buildfile", default = None )
|
||||
|
||||
parser.add_option( "-k", "--continue", help = "continue as much as possible after an error. While the target that failed, and those that depend on it, cannot be remade, the other dependencies of these targets can be processed all the same.",
|
||||
action = "store_false", dest = "abort", default = True )
|
||||
|
||||
parser.add_option( "-a", "--tryaltconfigs", help = "continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False )
|
||||
|
||||
parser.add_option( "-f", "--force", help = "force run of specified cmd, regardless of stamp status",
|
||||
action = "store_true", dest = "force", default = False )
|
||||
|
||||
parser.add_option( "-i", "--interactive", help = "drop into the interactive mode also called the BitBake shell.",
|
||||
action = "store_true", dest = "interactive", default = False )
|
||||
|
||||
parser.add_option( "-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing). Depending on the base.bbclass a listtasks tasks is defined and will show available tasks",
|
||||
action = "store", dest = "cmd" )
|
||||
|
||||
parser.add_option( "-r", "--read", help = "read the specified file before bitbake.conf",
|
||||
action = "append", dest = "file", default = [] )
|
||||
|
||||
parser.add_option( "-v", "--verbose", help = "output more chit-chat to the terminal",
|
||||
action = "store_true", dest = "verbose", default = False )
|
||||
|
||||
parser.add_option( "-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option( "-n", "--dry-run", help = "don't execute, just go through the motions",
|
||||
action = "store_true", dest = "dry_run", default = False )
|
||||
|
||||
parser.add_option( "-p", "--parse-only", help = "quit after parsing the BB files (developers only)",
|
||||
action = "store_true", dest = "parse_only", default = False )
|
||||
|
||||
parser.add_option( "-d", "--disable-psyco", help = "disable using the psyco just-in-time compiler (not recommended)",
|
||||
action = "store_true", dest = "disable_psyco", default = False )
|
||||
|
||||
parser.add_option( "-s", "--show-versions", help = "show current and preferred versions of all packages",
|
||||
action = "store_true", dest = "show_versions", default = False )
|
||||
|
||||
parser.add_option( "-e", "--environment", help = "show the global or per-package environment (this is what used to be bbread)",
|
||||
action = "store_true", dest = "show_environment", default = False )
|
||||
|
||||
parser.add_option( "-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
|
||||
action = "store_true", dest = "dot_graph", default = False )
|
||||
|
||||
parser.add_option( "-I", "--ignore-deps", help = """Stop processing at the given list of dependencies when generating dependency graphs. This can help to make the graph more appealing""",
|
||||
action = "append", dest = "ignored_dot_deps", default = [] )
|
||||
|
||||
parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [] )
|
||||
|
||||
parser.add_option( "-P", "--profile", help = "profile the command and print a report",
|
||||
action = "store_true", dest = "profile", default = False )
|
||||
|
||||
parser.add_option( "", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not",
|
||||
action = "store_true", dest = "revisions_changed", default = False )
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
configuration = BBConfiguration(options)
|
||||
configuration.pkgs_to_build = []
|
||||
configuration.pkgs_to_build.extend(args[1:])
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration)
|
||||
|
||||
# Clear away any spurious environment variables. But don't wipe the
|
||||
# environment totally.
|
||||
bb.utils.clean_environment()
|
||||
|
||||
cooker.parseConfiguration()
|
||||
|
||||
if configuration.profile:
|
||||
try:
|
||||
import cProfile as profile
|
||||
except:
|
||||
import profile
|
||||
|
||||
profile.runctx("cooker.cook()", globals(), locals(), "profile.log")
|
||||
import pstats
|
||||
p = pstats.Stats('profile.log')
|
||||
p.sort_stats('time')
|
||||
p.print_stats()
|
||||
p.print_callers()
|
||||
p.sort_stats('cumulative')
|
||||
p.print_stats()
|
||||
else:
|
||||
cooker.cook()
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __version__ != bb.__version__:
|
||||
sys.exit("Bitbake core version and program version mismatch!")
|
||||
try:
|
||||
sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration()))
|
||||
except BBMainException as err:
|
||||
sys.exit(err)
|
||||
except bb.BBHandledException:
|
||||
sys.exit(1)
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
main()
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# bitbake-diffsigs
|
||||
# BitBake task signature data comparison utility
|
||||
#
|
||||
# Copyright (C) 2012-2013 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import fnmatch
|
||||
import optparse
|
||||
import logging
|
||||
import pickle
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
|
||||
import bb.tinfoil
|
||||
import bb.siggen
|
||||
|
||||
def logger_create(name, output=sys.stderr):
|
||||
logger = logging.getLogger(name)
|
||||
console = logging.StreamHandler(output)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if output.isatty():
|
||||
format.enable_color()
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
logger.setLevel(logging.INFO)
|
||||
return logger
|
||||
|
||||
logger = logger_create('bitbake-diffsigs')
|
||||
|
||||
def find_compare_task(bbhandler, pn, taskname):
|
||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
||||
|
||||
def get_hashval(siginfo):
|
||||
if siginfo.endswith('.siginfo'):
|
||||
return siginfo.rpartition(':')[2].partition('_')[0]
|
||||
else:
|
||||
return siginfo.rpartition('.')[2]
|
||||
|
||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
||||
logger.error('Metadata does not support finding signature data files')
|
||||
sys.exit(1)
|
||||
|
||||
if not taskname.startswith('do_'):
|
||||
taskname = 'do_%s' % taskname
|
||||
|
||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
|
||||
if not latestfiles:
|
||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
elif len(latestfiles) < 2:
|
||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# It's possible that latestfiles contain 3 elements and the first two have the same hash value.
|
||||
# In this case, we delete the second element.
|
||||
# The above case is actually the most common one. Because we may have sigdata file and siginfo
|
||||
# file having the same hash value. Comparing such two files makes no sense.
|
||||
if len(latestfiles) == 3:
|
||||
hash0 = get_hashval(latestfiles[0])
|
||||
hash1 = get_hashval(latestfiles[1])
|
||||
if hash0 == hash1:
|
||||
latestfiles.pop(1)
|
||||
|
||||
# Define recursion callback
|
||||
def recursecb(key, hash1, hash2):
|
||||
hashes = [hash1, hash2]
|
||||
hashfiles = bb.siggen.find_siginfo(key, None, hashes, bbhandler.config_data)
|
||||
|
||||
recout = []
|
||||
if len(hashfiles) == 2:
|
||||
out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb)
|
||||
recout.extend(list(' ' + l for l in out2))
|
||||
else:
|
||||
recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
|
||||
|
||||
return recout
|
||||
|
||||
# Recurse into signature comparison
|
||||
output = bb.siggen.compare_sigfiles(latestfiles[0], latestfiles[1], recursecb)
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
|
||||
parser = optparse.OptionParser(
|
||||
description = "Compares siginfo/sigdata files written out by BitBake",
|
||||
usage = """
|
||||
%prog -t recipename taskname
|
||||
%prog sigdatafile1 sigdatafile2
|
||||
%prog sigdatafile1""")
|
||||
|
||||
parser.add_option("-t", "--task",
|
||||
help = "find the signature data files for last two runs of the specified task and compare them",
|
||||
action="store", dest="taskargs", nargs=2, metavar='recipename taskname')
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
if options.taskargs:
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=True)
|
||||
find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1])
|
||||
else:
|
||||
if len(args) == 1:
|
||||
parser.print_help()
|
||||
else:
|
||||
try:
|
||||
if len(args) == 2:
|
||||
output = bb.siggen.dump_sigfile(sys.argv[1])
|
||||
else:
|
||||
output = bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2])
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
sys.exit(1)
|
||||
except (pickle.UnpicklingError, EOFError):
|
||||
logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files')
|
||||
sys.exit(1)
|
||||
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
@@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# bitbake-dumpsig
|
||||
# BitBake task signature dump utility
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import optparse
|
||||
import logging
|
||||
import pickle
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
|
||||
import bb.siggen
|
||||
|
||||
def logger_create(name, output=sys.stderr):
|
||||
logger = logging.getLogger(name)
|
||||
console = logging.StreamHandler(output)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if output.isatty():
|
||||
format.enable_color()
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
logger.setLevel(logging.INFO)
|
||||
return logger
|
||||
|
||||
logger = logger_create('bitbake-dumpsig')
|
||||
|
||||
parser = optparse.OptionParser(
|
||||
description = "Dumps siginfo/sigdata files written out by BitBake",
|
||||
usage = """
|
||||
%prog sigdatafile""")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
if len(args) == 1:
|
||||
parser.print_help()
|
||||
else:
|
||||
try:
|
||||
output = bb.siggen.dump_sigfile(args[1])
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
sys.exit(1)
|
||||
except (pickle.UnpicklingError, EOFError):
|
||||
logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file')
|
||||
sys.exit(1)
|
||||
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
@@ -1,129 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# This script has subcommands which operate against your bitbake layers, either
|
||||
# displaying useful information, or acting against them.
|
||||
# See the help output for details on available commands.
|
||||
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
# Copyright (C) 2011-2015 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
bindir = os.path.dirname(__file__)
|
||||
topdir = os.path.dirname(bindir)
|
||||
sys.path[0:0] = [os.path.join(topdir, 'lib')]
|
||||
|
||||
import bb.tinfoil
|
||||
|
||||
|
||||
def tinfoil_init(parserecipes):
|
||||
import bb.tinfoil
|
||||
tinfoil = bb.tinfoil.Tinfoil(tracking=True)
|
||||
tinfoil.prepare(not parserecipes)
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
return tinfoil
|
||||
|
||||
|
||||
def logger_create(name, output=sys.stderr):
|
||||
logger = logging.getLogger(name)
|
||||
loggerhandler = logging.StreamHandler(output)
|
||||
loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
|
||||
logger.addHandler(loggerhandler)
|
||||
logger.setLevel(logging.INFO)
|
||||
return logger
|
||||
|
||||
def logger_setup_color(logger, color='auto'):
|
||||
from bb.msg import BBLogFormatter
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
formatter = BBLogFormatter("%(levelname)s: %(message)s")
|
||||
console.setFormatter(formatter)
|
||||
logger.handlers = [console]
|
||||
if color == 'always' or (color == 'auto' and console.stream.isatty()):
|
||||
formatter.enable_color()
|
||||
|
||||
|
||||
logger = logger_create('bitbake-layers', sys.stdout)
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="BitBake layers utility",
|
||||
epilog="Use %(prog)s <subcommand> --help to get help on a specific command",
|
||||
add_help=False)
|
||||
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
|
||||
parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
|
||||
|
||||
global_args, unparsed_args = parser.parse_known_args()
|
||||
|
||||
# Help is added here rather than via add_help=True, as we don't want it to
|
||||
# be handled by parse_known_args()
|
||||
parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
|
||||
help='show this help message and exit')
|
||||
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
|
||||
subparsers.required = True
|
||||
|
||||
if global_args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif global_args.quiet:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
logger_setup_color(logger, global_args.color)
|
||||
|
||||
plugins = []
|
||||
tinfoil = tinfoil_init(False)
|
||||
try:
|
||||
for path in ([topdir] +
|
||||
tinfoil.config_data.getVar('BBPATH', True).split(':')):
|
||||
pluginpath = os.path.join(path, 'lib', 'bblayers')
|
||||
bb.utils.load_plugins(logger, plugins, pluginpath)
|
||||
|
||||
registered = False
|
||||
for plugin in plugins:
|
||||
if hasattr(plugin, 'register_commands'):
|
||||
registered = True
|
||||
plugin.register_commands(subparsers)
|
||||
if hasattr(plugin, 'tinfoil_init'):
|
||||
plugin.tinfoil_init(tinfoil)
|
||||
|
||||
if not registered:
|
||||
logger.error("No commands registered - missing plugins?")
|
||||
sys.exit(1)
|
||||
|
||||
args = parser.parse_args(unparsed_args, namespace=global_args)
|
||||
|
||||
if getattr(args, 'parserecipes', False):
|
||||
tinfoil.config_data.disableTracking()
|
||||
tinfoil.parseRecipes()
|
||||
tinfoil.config_data.enableTracking()
|
||||
|
||||
return args.func(args)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = main()
|
||||
except bb.BBHandledException:
|
||||
ret = 1
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(ret)
|
||||
@@ -1,55 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys,logging
|
||||
import optparse
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib'))
|
||||
|
||||
import prserv
|
||||
import prserv.serv
|
||||
|
||||
__version__="1.0.0"
|
||||
|
||||
PRHOST_DEFAULT='0.0.0.0'
|
||||
PRPORT_DEFAULT=8585
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
|
||||
usage = "%prog < --start | --stop > [options]")
|
||||
|
||||
parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store",
|
||||
dest="dbfile", type="string", default="prserv.sqlite3")
|
||||
parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store",
|
||||
dest="logfile", type="string", default="prserv.log")
|
||||
parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
|
||||
action = "store", type="string", dest="loglevel", default = "INFO")
|
||||
parser.add_option("--start", help="start daemon",
|
||||
action="store_true", dest="start")
|
||||
parser.add_option("--stop", help="stop daemon",
|
||||
action="store_true", dest="stop")
|
||||
parser.add_option("--host", help="ip address to bind", action="store",
|
||||
dest="host", type="string", default=PRHOST_DEFAULT)
|
||||
parser.add_option("--port", help="port number(default: 8585)", action="store",
|
||||
dest="port", type="int", default=PRPORT_DEFAULT)
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
|
||||
|
||||
if options.start:
|
||||
ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile))
|
||||
elif options.stop:
|
||||
ret=prserv.serv.stop_daemon(options.host, options.port)
|
||||
else:
|
||||
ret=parser.print_help()
|
||||
return ret
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = main()
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(ret)
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
|
||||
|
||||
import unittest
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.parse",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
t = '.'.join(t.split('.')[:3])
|
||||
__import__(t)
|
||||
|
||||
|
||||
# Set-up logging
|
||||
class StdoutStreamHandler(logging.StreamHandler):
|
||||
"""Special handler so that unittest is able to capture stdout"""
|
||||
def __init__(self):
|
||||
# Override __init__() because we don't want to set self.stream here
|
||||
logging.Handler.__init__(self)
|
||||
|
||||
@property
|
||||
def stream(self):
|
||||
# We want to dynamically write wherever sys.stdout is pointing to
|
||||
return sys.stdout
|
||||
|
||||
|
||||
handler = StdoutStreamHandler()
|
||||
bb.logger.addHandler(handler)
|
||||
bb.logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
ENV_HELP = """\
|
||||
Environment variables:
|
||||
BB_SKIP_NETTESTS set to 'yes' in order to skip tests using network
|
||||
connection
|
||||
BB_TMPDIR_NOCLEAN set to 'yes' to preserve test tmp directories
|
||||
"""
|
||||
|
||||
class main(unittest.main):
|
||||
def _print_help(self, *args, **kwargs):
|
||||
super(main, self)._print_help(*args, **kwargs)
|
||||
print(ENV_HELP)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(defaultTest=tests, buffer=True)
|
||||
@@ -1,464 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
from bb import fetch2
|
||||
import logging
|
||||
import bb
|
||||
import select
|
||||
import errno
|
||||
import signal
|
||||
import pickle
|
||||
from multiprocessing import Lock
|
||||
|
||||
if sys.getfilesystemencoding() != "utf-8":
|
||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
||||
|
||||
# Users shouldn't be running this code directly
|
||||
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
||||
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
|
||||
sys.exit(1)
|
||||
|
||||
profiling = False
|
||||
if sys.argv[1].startswith("decafbadbad"):
|
||||
profiling = True
|
||||
try:
|
||||
import cProfile as profile
|
||||
except:
|
||||
import profile
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
import fcntl
|
||||
fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
|
||||
fl |= os.O_SYNC
|
||||
fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
|
||||
#sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
worker_pipe = sys.stdout.fileno()
|
||||
bb.utils.nonblockingfd(worker_pipe)
|
||||
# Need to guard against multiprocessing being used in child processes
|
||||
# and multiple processes trying to write to the parent at the same time
|
||||
worker_pipe_lock = None
|
||||
|
||||
handler = bb.event.LogHandler()
|
||||
logger.addHandler(handler)
|
||||
|
||||
if 0:
|
||||
# Code to write out a log file of all events passing through the worker
|
||||
logfilename = "/tmp/workerlogfile"
|
||||
format_str = "%(levelname)s: %(message)s"
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
consolelog = logging.FileHandler(logfilename)
|
||||
bb.msg.addDefaultlogFilter(consolelog)
|
||||
consolelog.setFormatter(conlogformat)
|
||||
logger.addHandler(consolelog)
|
||||
|
||||
worker_queue = b""
|
||||
|
||||
def worker_fire(event, d):
|
||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
||||
worker_fire_prepickled(data)
|
||||
|
||||
def worker_fire_prepickled(event):
|
||||
global worker_queue
|
||||
|
||||
worker_queue = worker_queue + event
|
||||
worker_flush()
|
||||
|
||||
def worker_flush():
|
||||
global worker_queue, worker_pipe
|
||||
|
||||
if not worker_queue:
|
||||
return
|
||||
|
||||
try:
|
||||
written = os.write(worker_pipe, worker_queue)
|
||||
worker_queue = worker_queue[written:]
|
||||
except (IOError, OSError) as e:
|
||||
if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
def worker_child_fire(event, d):
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
|
||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
||||
try:
|
||||
worker_pipe_lock.acquire()
|
||||
worker_pipe.write(data)
|
||||
worker_pipe_lock.release()
|
||||
except IOError:
|
||||
sigterm_handler(None, None)
|
||||
raise
|
||||
|
||||
bb.event.worker_fire = worker_fire
|
||||
|
||||
lf = None
|
||||
#lf = open("/tmp/workercommandlog", "w+")
|
||||
def workerlog_write(msg):
|
||||
if lf:
|
||||
lf.write(msg)
|
||||
lf.flush()
|
||||
|
||||
def sigterm_handler(signum, frame):
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
sys.exit()
|
||||
|
||||
def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
|
||||
# We need to setup the environment BEFORE the fork, since
|
||||
# a fork() or exec*() activates PSEUDO...
|
||||
|
||||
envbackup = {}
|
||||
fakeenv = {}
|
||||
umask = None
|
||||
|
||||
taskdep = workerdata["taskdeps"][fn]
|
||||
if 'umask' in taskdep and taskname in taskdep['umask']:
|
||||
# umask might come in as a number or text string..
|
||||
try:
|
||||
umask = int(taskdep['umask'][taskname],8)
|
||||
except TypeError:
|
||||
umask = taskdep['umask'][taskname]
|
||||
|
||||
# We can't use the fakeroot environment in a dry run as it possibly hasn't been built
|
||||
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not cfg.dry_run:
|
||||
envvars = (workerdata["fakerootenv"][fn] or "").split()
|
||||
for key, value in (var.split('=') for var in envvars):
|
||||
envbackup[key] = os.environ.get(key)
|
||||
os.environ[key] = value
|
||||
fakeenv[key] = value
|
||||
|
||||
fakedirs = (workerdata["fakerootdirs"][fn] or "").split()
|
||||
for p in fakedirs:
|
||||
bb.utils.mkdirhier(p)
|
||||
logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' %
|
||||
(fn, taskname, ', '.join(fakedirs)))
|
||||
else:
|
||||
envvars = (workerdata["fakerootnoenv"][fn] or "").split()
|
||||
for key, value in (var.split('=') for var in envvars):
|
||||
envbackup[key] = os.environ.get(key)
|
||||
os.environ[key] = value
|
||||
fakeenv[key] = value
|
||||
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
try:
|
||||
pipein, pipeout = os.pipe()
|
||||
pipein = os.fdopen(pipein, 'rb', 4096)
|
||||
pipeout = os.fdopen(pipeout, 'wb', 0)
|
||||
pid = os.fork()
|
||||
except OSError as e:
|
||||
logger.critical("fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
|
||||
if pid == 0:
|
||||
def child():
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
pipein.close()
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, sigterm_handler)
|
||||
bb.utils.signal_on_parent_exit("SIGTERM")
|
||||
|
||||
# Save out the PID so that the event can include it the
|
||||
# events
|
||||
bb.event.worker_pid = os.getpid()
|
||||
bb.event.worker_fire = worker_child_fire
|
||||
worker_pipe = pipeout
|
||||
worker_pipe_lock = Lock()
|
||||
|
||||
# Make the child the process group leader and ensure no
|
||||
# child process will be controlled by the current terminal
|
||||
# This ensures signals sent to the controlling terminal like Ctrl+C
|
||||
# don't stop the child processes.
|
||||
os.setsid()
|
||||
# No stdin
|
||||
newsi = os.open(os.devnull, os.O_RDWR)
|
||||
os.dup2(newsi, sys.stdin.fileno())
|
||||
|
||||
if umask:
|
||||
os.umask(umask)
|
||||
|
||||
try:
|
||||
bb_cache = bb.cache.NoCache(databuilder)
|
||||
(realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
|
||||
the_data = databuilder.mcdata[mc]
|
||||
the_data.setVar("BB_WORKERCONTEXT", "1")
|
||||
the_data.setVar("BB_TASKDEPDATA", taskdepdata)
|
||||
the_data.setVar("BUILDNAME", workerdata["buildname"])
|
||||
the_data.setVar("DATE", workerdata["date"])
|
||||
the_data.setVar("TIME", workerdata["time"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["sigdata"])
|
||||
ret = 0
|
||||
|
||||
the_data = bb_cache.loadDataFull(fn, appends)
|
||||
the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
|
||||
|
||||
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", "")))
|
||||
|
||||
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
|
||||
# successfully. We also need to unset anything from the environment which shouldn't be there
|
||||
exports = bb.data.exported_vars(the_data)
|
||||
|
||||
bb.utils.empty_environment()
|
||||
for e, v in exports:
|
||||
os.environ[e] = v
|
||||
|
||||
for e in fakeenv:
|
||||
os.environ[e] = fakeenv[e]
|
||||
the_data.setVar(e, fakeenv[e])
|
||||
the_data.setVarFlag(e, 'export', "1")
|
||||
|
||||
task_exports = the_data.getVarFlag(taskname, 'exports', True)
|
||||
if task_exports:
|
||||
for e in task_exports.split():
|
||||
the_data.setVarFlag(e, 'export', '1')
|
||||
v = the_data.getVar(e, True)
|
||||
if v is not None:
|
||||
os.environ[e] = v
|
||||
|
||||
if quieterrors:
|
||||
the_data.setVarFlag(taskname, "quieterrors", "1")
|
||||
|
||||
except Exception as exc:
|
||||
if not quieterrors:
|
||||
logger.critical(str(exc))
|
||||
os._exit(1)
|
||||
try:
|
||||
if cfg.dry_run:
|
||||
return 0
|
||||
return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
except:
|
||||
os._exit(1)
|
||||
if not profiling:
|
||||
os._exit(child())
|
||||
else:
|
||||
profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
ret = profile.Profile.runcall(prof, child)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
os._exit(ret)
|
||||
else:
|
||||
for key, value in iter(envbackup.items()):
|
||||
if value is None:
|
||||
del os.environ[key]
|
||||
else:
|
||||
os.environ[key] = value
|
||||
|
||||
return pid, pipein, pipeout
|
||||
|
||||
class runQueueWorkerPipe():
|
||||
"""
|
||||
Abstraction for a pipe between a worker thread and the worker server
|
||||
"""
|
||||
def __init__(self, pipein, pipeout):
|
||||
self.input = pipein
|
||||
if pipeout:
|
||||
pipeout.close()
|
||||
bb.utils.nonblockingfd(self.input)
|
||||
self.queue = b""
|
||||
|
||||
def read(self):
|
||||
start = len(self.queue)
|
||||
try:
|
||||
self.queue = self.queue + (self.input.read(102400) or b"")
|
||||
except (OSError, IOError) as e:
|
||||
if e.errno != errno.EAGAIN:
|
||||
raise
|
||||
|
||||
end = len(self.queue)
|
||||
index = self.queue.find(b"</event>")
|
||||
while index != -1:
|
||||
worker_fire_prepickled(self.queue[:index+8])
|
||||
self.queue = self.queue[index+8:]
|
||||
index = self.queue.find(b"</event>")
|
||||
return (end > start)
|
||||
|
||||
def close(self):
|
||||
while self.read():
|
||||
continue
|
||||
if len(self.queue) > 0:
|
||||
print("Warning, worker child left partial message: %s" % self.queue)
|
||||
self.input.close()
|
||||
|
||||
normalexit = False
|
||||
|
||||
class BitbakeWorker(object):
|
||||
def __init__(self, din):
|
||||
self.input = din
|
||||
bb.utils.nonblockingfd(self.input)
|
||||
self.queue = b""
|
||||
self.cookercfg = None
|
||||
self.databuilder = None
|
||||
self.data = None
|
||||
self.build_pids = {}
|
||||
self.build_pipes = {}
|
||||
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
if "beef" in sys.argv[1]:
|
||||
bb.utils.set_process_name("Worker (Fakeroot)")
|
||||
else:
|
||||
bb.utils.set_process_name("Worker")
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Worker received SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Worker received SIGHUP, shutting down...")
|
||||
self.handle_finishnow(None)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
def serve(self):
|
||||
while True:
|
||||
(ready, _, _) = select.select([self.input] + [i.input for i in self.build_pipes.values()], [] , [], 1)
|
||||
if self.input in ready:
|
||||
try:
|
||||
r = self.input.read()
|
||||
if len(r) == 0:
|
||||
# EOF on pipe, server must have terminated
|
||||
self.sigterm_exception(signal.SIGTERM, None)
|
||||
self.queue = self.queue + r
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
if len(self.queue):
|
||||
self.handle_item(b"cookerconfig", self.handle_cookercfg)
|
||||
self.handle_item(b"workerdata", self.handle_workerdata)
|
||||
self.handle_item(b"runtask", self.handle_runtask)
|
||||
self.handle_item(b"finishnow", self.handle_finishnow)
|
||||
self.handle_item(b"ping", self.handle_ping)
|
||||
self.handle_item(b"quit", self.handle_quit)
|
||||
|
||||
for pipe in self.build_pipes:
|
||||
self.build_pipes[pipe].read()
|
||||
if len(self.build_pids):
|
||||
self.process_waitpid()
|
||||
worker_flush()
|
||||
|
||||
|
||||
def handle_item(self, item, func):
|
||||
if self.queue.startswith(b"<" + item + b">"):
|
||||
index = self.queue.find(b"</" + item + b">")
|
||||
while index != -1:
|
||||
func(self.queue[(len(item) + 2):index])
|
||||
self.queue = self.queue[(index + len(item) + 3):]
|
||||
index = self.queue.find(b"</" + item + b">")
|
||||
|
||||
def handle_cookercfg(self, data):
|
||||
self.cookercfg = pickle.loads(data)
|
||||
self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True)
|
||||
self.databuilder.parseBaseConfiguration()
|
||||
self.data = self.databuilder.data
|
||||
|
||||
def handle_workerdata(self, data):
|
||||
self.workerdata = pickle.loads(data)
|
||||
bb.msg.loggerDefaultDebugLevel = self.workerdata["logdefaultdebug"]
|
||||
bb.msg.loggerDefaultVerbose = self.workerdata["logdefaultverbose"]
|
||||
bb.msg.loggerVerboseLogs = self.workerdata["logdefaultverboselogs"]
|
||||
bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"]
|
||||
for mc in self.databuilder.mcdata:
|
||||
self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
|
||||
|
||||
def handle_ping(self, _):
|
||||
workerlog_write("Handling ping\n")
|
||||
|
||||
logger.warning("Pong from bitbake-worker!")
|
||||
|
||||
def handle_quit(self, data):
|
||||
workerlog_write("Handling quit\n")
|
||||
|
||||
global normalexit
|
||||
normalexit = True
|
||||
sys.exit(0)
|
||||
|
||||
def handle_runtask(self, data):
|
||||
fn, task, taskname, quieterrors, appends, taskdepdata = pickle.loads(data)
|
||||
workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
|
||||
|
||||
pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, appends, taskdepdata, quieterrors)
|
||||
|
||||
self.build_pids[pid] = task
|
||||
self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
|
||||
|
||||
def process_waitpid(self):
|
||||
"""
|
||||
Return none is there are no processes awaiting result collection, otherwise
|
||||
collect the process exit codes and close the information pipe.
|
||||
"""
|
||||
try:
|
||||
pid, status = os.waitpid(-1, os.WNOHANG)
|
||||
if pid == 0 or os.WIFSTOPPED(status):
|
||||
return None
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
workerlog_write("Exit code of %s for pid %s\n" % (status, pid))
|
||||
|
||||
if os.WIFEXITED(status):
|
||||
status = os.WEXITSTATUS(status)
|
||||
elif os.WIFSIGNALED(status):
|
||||
# Per shell conventions for $?, when a process exits due to
|
||||
# a signal, we return an exit code of 128 + SIGNUM
|
||||
status = 128 + os.WTERMSIG(status)
|
||||
|
||||
task = self.build_pids[pid]
|
||||
del self.build_pids[pid]
|
||||
|
||||
self.build_pipes[pid].close()
|
||||
del self.build_pipes[pid]
|
||||
|
||||
worker_fire_prepickled(b"<exitcode>" + pickle.dumps((task, status)) + b"</exitcode>")
|
||||
|
||||
def handle_finishnow(self, _):
|
||||
if self.build_pids:
|
||||
logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids))
|
||||
for k, v in iter(self.build_pids.items()):
|
||||
try:
|
||||
os.kill(-k, signal.SIGTERM)
|
||||
os.waitpid(-1, 0)
|
||||
except:
|
||||
pass
|
||||
for pipe in self.build_pipes:
|
||||
self.build_pipes[pipe].read()
|
||||
|
||||
try:
|
||||
worker = BitbakeWorker(os.fdopen(sys.stdin.fileno(), 'rb'))
|
||||
if not profiling:
|
||||
worker.serve()
|
||||
else:
|
||||
profname = "profile-worker.log"
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
profile.Profile.runcall(prof, worker.serve)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
except BaseException as e:
|
||||
if not normalexit:
|
||||
import traceback
|
||||
sys.stderr.write(traceback.format_exc())
|
||||
sys.stderr.write(str(e))
|
||||
while len(worker_queue):
|
||||
worker_flush()
|
||||
workerlog_write("exitting")
|
||||
sys.exit(0)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
@@ -20,7 +20,7 @@
|
||||
import optparse, os, sys
|
||||
|
||||
# bitbake
|
||||
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__), 'lib'))
|
||||
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
import bb
|
||||
import bb.parse
|
||||
from string import split, join
|
||||
@@ -48,7 +48,7 @@ class HTMLFormatter:
|
||||
From pydoc... almost identical at least
|
||||
"""
|
||||
while pairs:
|
||||
(a, b) = pairs[0]
|
||||
(a,b) = pairs[0]
|
||||
text = join(split(text, a), b)
|
||||
pairs = pairs[1:]
|
||||
return text
|
||||
@@ -87,7 +87,7 @@ class HTMLFormatter:
|
||||
|
||||
return txt + ",".join(txts)
|
||||
|
||||
def groups(self, item):
|
||||
def groups(self,item):
|
||||
"""
|
||||
Create HTML to link to related groups
|
||||
"""
|
||||
@@ -99,12 +99,12 @@ class HTMLFormatter:
|
||||
txt = "<p><b>See also:</b><br>"
|
||||
txts = []
|
||||
for group in item.groups():
|
||||
txts.append( """<a href="group%s.html">%s</a> """ % (group, group) )
|
||||
txts.append( """<a href="group%s.html">%s</a> """ % (group,group) )
|
||||
|
||||
return txt + ",".join(txts)
|
||||
|
||||
|
||||
def createKeySite(self, item):
|
||||
def createKeySite(self,item):
|
||||
"""
|
||||
Create a site for a key. It contains the header/navigator, a heading,
|
||||
the description, links to related keys and to the groups.
|
||||
@@ -149,7 +149,8 @@ class HTMLFormatter:
|
||||
"""
|
||||
|
||||
groups = ""
|
||||
sorted_groups = sorted(doc.groups())
|
||||
sorted_groups = doc.groups()
|
||||
sorted_groups.sort()
|
||||
for group in sorted_groups:
|
||||
groups += """<a href="group%s.html">%s</a><br>""" % (group, group)
|
||||
|
||||
@@ -184,7 +185,8 @@ class HTMLFormatter:
|
||||
Create Overview of all avilable keys
|
||||
"""
|
||||
keys = ""
|
||||
sorted_keys = sorted(doc.doc_keys())
|
||||
sorted_keys = doc.doc_keys()
|
||||
sorted_keys.sort()
|
||||
for key in sorted_keys:
|
||||
keys += """<a href="key%s.html">%s</a><br>""" % (key, key)
|
||||
|
||||
@@ -212,7 +214,7 @@ class HTMLFormatter:
|
||||
description += "<h2 Description of Grozp %s</h2>" % gr
|
||||
description += _description
|
||||
|
||||
items.sort(lambda x, y:cmp(x.name(), y.name()))
|
||||
items.sort(lambda x,y:cmp(x.name(),y.name()))
|
||||
for group in items:
|
||||
groups += """<a href="key%s.html">%s</a><br>""" % (group.name(), group.name())
|
||||
|
||||
@@ -341,7 +343,7 @@ class DocumentationItem:
|
||||
def addGroup(self, group):
|
||||
self._groups.append(group)
|
||||
|
||||
def addRelation(self, relation):
|
||||
def addRelation(self,relation):
|
||||
self._related.append(relation)
|
||||
|
||||
def sort(self):
|
||||
@@ -394,7 +396,7 @@ class Documentation:
|
||||
"""
|
||||
return self.__groups.keys()
|
||||
|
||||
def group_content(self, group_name):
|
||||
def group_content(self,group_name):
|
||||
"""
|
||||
Return a list of keys/names that are in a specefic
|
||||
group or the empty list
|
||||
@@ -410,7 +412,7 @@ def parse_cmdline(args):
|
||||
Parse the CMD line and return the result as a n-tuple
|
||||
"""
|
||||
|
||||
parser = optparse.OptionParser( version = "Bitbake Documentation Tool Core version %s, %%prog version %s" % (bb.__version__, __version__))
|
||||
parser = optparse.OptionParser( version = "Bitbake Documentation Tool Core version %s, %%prog version %s" % (bb.__version__,__version__))
|
||||
usage = """%prog [options]
|
||||
|
||||
Create a set of html pages (documentation) for a bitbake.conf....
|
||||
@@ -426,12 +428,13 @@ Create a set of html pages (documentation) for a bitbake.conf....
|
||||
parser.add_option( "-D", "--debug", help = "Increase the debug level",
|
||||
action = "count", dest = "debug", default = 0 )
|
||||
|
||||
parser.add_option( "-v", "--verbose", help = "output more chit-char to the terminal",
|
||||
parser.add_option( "-v","--verbose", help = "output more chit-char to the terminal",
|
||||
action = "store_true", dest = "verbose", default = False )
|
||||
|
||||
options, args = parser.parse_args( sys.argv )
|
||||
|
||||
bb.msg.init_msgconfig(options.verbose, options.debug)
|
||||
|
||||
if options.debug:
|
||||
bb.msg.set_debug_level(options.debug)
|
||||
|
||||
return options.config, options.output
|
||||
|
||||
@@ -440,7 +443,7 @@ def main():
|
||||
The main Method
|
||||
"""
|
||||
|
||||
(config_file, output_dir) = parse_cmdline( sys.argv )
|
||||
(config_file,output_dir) = parse_cmdline( sys.argv )
|
||||
|
||||
# right to let us load the file now
|
||||
try:
|
||||
@@ -450,8 +453,6 @@ def main():
|
||||
except bb.parse.ParseError:
|
||||
bb.fatal( "Unable to parse %s" % config_file )
|
||||
|
||||
if isinstance(documentation, dict):
|
||||
documentation = documentation[""]
|
||||
|
||||
# Assuming we've the file loaded now, we will initialize the 'tree'
|
||||
doc = Documentation()
|
||||
@@ -462,7 +463,7 @@ def main():
|
||||
state_group = 2
|
||||
|
||||
for key in bb.data.keys(documentation):
|
||||
data = documentation.getVarFlag(key, "doc", False)
|
||||
data = bb.data.getVarFlag(key, "doc", documentation)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
@@ -496,7 +497,7 @@ def main():
|
||||
doc.insert_doc_item(doc_ins)
|
||||
|
||||
# let us create the HTML now
|
||||
bb.utils.mkdirhier(output_dir)
|
||||
bb.mkdirhier(output_dir)
|
||||
os.chdir(output_dir)
|
||||
|
||||
# Let us create the sites now. We do it in the following order
|
||||
|
||||
@@ -1,265 +0,0 @@
|
||||
#!/bin/echo ERROR: This script needs to be sourced. Please run as .
|
||||
|
||||
# toaster - shell script to start Toaster
|
||||
|
||||
# Copyright (C) 2013-2015 Intel Corp.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see http://www.gnu.org/licenses/.
|
||||
|
||||
HELP="
|
||||
Usage: source toaster start|stop [webport=<address:port>] [noweb]
|
||||
Optional arguments:
|
||||
[noweb] Setup the environment for building with toaster but don't start the development server
|
||||
[webport] Set the development server (default: localhost:8000)
|
||||
"
|
||||
|
||||
webserverKillAll()
|
||||
{
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid ${BUILDDIR}/.runbuilds.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
pid=`cat ${pidfile}`
|
||||
while kill -0 $pid 2>/dev/null; do
|
||||
kill -SIGTERM -$pid 2>/dev/null
|
||||
sleep 1
|
||||
done
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
webserverStartAll()
|
||||
{
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
|
||||
retval=0
|
||||
# you can always add a superuser later via
|
||||
# ../bitbake/lib/toaster/manage.py createsuperuser --username=<ME>
|
||||
$MANAGE migrate --noinput || retval=1
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed migrations, aborting system start" 1>&2
|
||||
return $retval
|
||||
fi
|
||||
# Make sure that checksettings can pick up any value for TEMPLATECONF
|
||||
export TEMPLATECONF
|
||||
$MANAGE checksettings --traceback || retval=1
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
printf "\nError while checking settings; aborting\n"
|
||||
return $retval
|
||||
fi
|
||||
|
||||
echo "Starting webserver..."
|
||||
|
||||
$MANAGE runserver "$ADDR_PORT" \
|
||||
</dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
|
||||
& echo $! >${BUILDDIR}/.toastermain.pid
|
||||
|
||||
sleep 1
|
||||
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Toaster development webserver started at http://$ADDR_PORT"
|
||||
echo -e "\nYou can now run 'bitbake <target>' on the command line and monitor your build in Toaster.\nYou can also use a Toaster project to configure and run a build.\n"
|
||||
fi
|
||||
|
||||
return $retval
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
|
||||
# define the stop command
|
||||
stop_system()
|
||||
{
|
||||
# prevent reentry
|
||||
if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
|
||||
INSTOPSYSTEM=1
|
||||
webserverKillAll
|
||||
# unset exported variables
|
||||
unset TOASTER_DIR
|
||||
unset BITBAKE_UI
|
||||
unset BBBASEDIR
|
||||
trap - SIGHUP
|
||||
#trap - SIGCHLD
|
||||
INSTOPSYSTEM=0
|
||||
}
|
||||
|
||||
verify_prereq() {
|
||||
# Verify Django version
|
||||
reqfile=$(python3 -c "import os; print(os.path.realpath('$BBBASEDIR/toaster-requirements.txt'))")
|
||||
exp='s/Django\([><=]\+\)\([^,]\+\),\([><=]\+\)\(.\+\)/'
|
||||
exp=$exp'import sys,django;version=django.get_version().split(".");'
|
||||
exp=$exp'sys.exit(not (version \1 "\2".split(".") and version \3 "\4".split(".")))/p'
|
||||
if ! sed -n "$exp" $reqfile | python3 - ; then
|
||||
req=`grep ^Django $reqfile`
|
||||
echo "This program needs $req"
|
||||
echo "Please install with pip install -r $reqfile"
|
||||
return 2
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# read command line parameters
|
||||
if [ -n "$BASH_SOURCE" ] ; then
|
||||
TOASTER=${BASH_SOURCE}
|
||||
elif [ -n "$ZSH_NAME" ] ; then
|
||||
TOASTER=${(%):-%x}
|
||||
else
|
||||
TOASTER=$0
|
||||
fi
|
||||
|
||||
export BBBASEDIR=`dirname $TOASTER`/..
|
||||
MANAGE="python3 $BBBASEDIR/lib/toaster/manage.py"
|
||||
OEROOT=`dirname $TOASTER`/../..
|
||||
|
||||
# this is the configuraton file we are using for toaster
|
||||
# we are using the same logic that oe-setup-builddir uses
|
||||
# (based on TEMPLATECONF and .templateconf) to determine
|
||||
# which toasterconf.json to use.
|
||||
# note: There are a number of relative path assumptions
|
||||
# in the local layers that currently make using an arbitrary
|
||||
# toasterconf.json difficult.
|
||||
|
||||
. $OEROOT/.templateconf
|
||||
if [ -n "$TEMPLATECONF" ]; then
|
||||
if [ ! -d "$TEMPLATECONF" ]; then
|
||||
# Allow TEMPLATECONF=meta-xyz/conf as a shortcut
|
||||
if [ -d "$OEROOT/$TEMPLATECONF" ]; then
|
||||
TEMPLATECONF="$OEROOT/$TEMPLATECONF"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
# 3) the sqlite db if that is being used.
|
||||
# 4) pid's we need to clean up on exit/shutdown
|
||||
# note: for future. in order to make this an arbitrary directory, we need to
|
||||
# make sure that the toaster.sqlite file doesn't default to `pwd` like it currently does.
|
||||
export TOASTER_DIR=`pwd`
|
||||
|
||||
WEBSERVER=1
|
||||
ADDR_PORT="localhost:8000"
|
||||
unset CMD
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
start )
|
||||
CMD=$param
|
||||
;;
|
||||
stop )
|
||||
CMD=$param
|
||||
;;
|
||||
webport=*)
|
||||
ADDR_PORT="${param#*=}"
|
||||
# Split the addr:port string
|
||||
ADDR=`echo $ADDR_PORT | cut -f 1 -d ':'`
|
||||
PORT=`echo $ADDR_PORT | cut -f 2 -d ':'`
|
||||
# If only a port has been speified then set address to localhost.
|
||||
if [ $ADDR = $PORT ] ; then
|
||||
ADDR_PORT="localhost:$PORT"
|
||||
fi
|
||||
;;
|
||||
--help)
|
||||
echo "$HELP"
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo "$HELP"
|
||||
return 1
|
||||
;;
|
||||
|
||||
esac
|
||||
done
|
||||
|
||||
if [ `basename \"$0\"` = `basename \"${TOASTER}\"` ]; then
|
||||
echo "Error: This script needs to be sourced. Please run as . $TOASTER"
|
||||
return 1
|
||||
fi
|
||||
|
||||
verify_prereq || return 1
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || ! which bitbake >/dev/null 2>&1 ; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
|
||||
return 2
|
||||
fi
|
||||
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
# 3) the sqlite db if that is being used.
|
||||
# 4) pid's we need to clean up on exit/shutdown
|
||||
# note: for future. in order to make this an arbitrary directory, we need to
|
||||
# make sure that the toaster.sqlite file doesn't default to `pwd`
|
||||
# like it currently does.
|
||||
export TOASTER_DIR=`dirname $BUILDDIR`
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "$CMD" = "start" ] ; then
|
||||
if [ -n "$BBSERVER" ]; then
|
||||
echo " Toaster is already running. Exiting..."
|
||||
return 1
|
||||
fi
|
||||
elif [ "$CMD" = "" ]; then
|
||||
echo "No command specified"
|
||||
echo "$HELP"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "The system will $CMD."
|
||||
|
||||
# Execute the commands
|
||||
|
||||
case $CMD in
|
||||
start )
|
||||
# check if addr:port is not in use
|
||||
if [ "$CMD" == 'start' ]; then
|
||||
if [ $WEBSERVER -gt 0 ]; then
|
||||
$MANAGE checksocket "$ADDR_PORT" || return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create configuration file
|
||||
conf=${BUILDDIR}/conf/local.conf
|
||||
line='INHERIT+="toaster buildhistory"'
|
||||
grep -q "$line" $conf || echo $line >> $conf
|
||||
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
echo "Failed ${CMD}."
|
||||
return 4
|
||||
fi
|
||||
export BITBAKE_UI='toasterui'
|
||||
$MANAGE runbuilds & echo $! >${BUILDDIR}/.runbuilds.pid
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
;;
|
||||
stop )
|
||||
stop_system
|
||||
echo "Successful ${CMD}."
|
||||
;;
|
||||
esac
|
||||
@@ -1,126 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2014 Alex Damian
|
||||
#
|
||||
# This file re-uses code spread throughout other Bitbake source files.
|
||||
# As such, all other copyrights belong to their own right holders.
|
||||
#
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""
|
||||
This command takes a filename as a single parameter. The filename is read
|
||||
as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
and log data in the database
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import pickle
|
||||
import codecs
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
# mangle syspath to allow easy import of modules
|
||||
from os.path import join, dirname, abspath
|
||||
sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
|
||||
|
||||
import bb.cooker
|
||||
from bb.ui import toasterui
|
||||
|
||||
class EventPlayer:
|
||||
"""Emulate a connection to a bitbake server."""
|
||||
|
||||
def __init__(self, eventfile, variables):
|
||||
self.eventfile = eventfile
|
||||
self.variables = variables
|
||||
self.eventmask = []
|
||||
|
||||
def waitEvent(self, _timeout):
|
||||
"""Read event from the file."""
|
||||
line = self.eventfile.readline().strip()
|
||||
if not line:
|
||||
return
|
||||
try:
|
||||
event_str = json.loads(line)['vars'].encode('utf-8')
|
||||
event = pickle.loads(codecs.decode(event_str, 'base64'))
|
||||
event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
|
||||
if event_name not in self.eventmask:
|
||||
return
|
||||
return event
|
||||
except ValueError as err:
|
||||
print("Failed loading ", line)
|
||||
raise err
|
||||
|
||||
def runCommand(self, command_line):
|
||||
"""Emulate running a command on the server."""
|
||||
name = command_line[0]
|
||||
|
||||
if name == "getVariable":
|
||||
var_name = command_line[1]
|
||||
variable = self.variables.get(var_name)
|
||||
if variable:
|
||||
return variable['v'], None
|
||||
return None, "Missing variable %s" % var_name
|
||||
|
||||
elif name == "getAllKeysWithFlags":
|
||||
dump = {}
|
||||
flaglist = command_line[1]
|
||||
for key, val in self.variables.items():
|
||||
try:
|
||||
if not key.startswith("__"):
|
||||
dump[key] = {
|
||||
'v': val['v'],
|
||||
'history' : val['history'],
|
||||
}
|
||||
for flag in flaglist:
|
||||
dump[key][flag] = val[flag]
|
||||
except Exception as err:
|
||||
print(err)
|
||||
return (dump, None)
|
||||
|
||||
elif name == 'setEventMask':
|
||||
self.eventmask = command_line[-1]
|
||||
return True, None
|
||||
|
||||
else:
|
||||
raise Exception("Command %s not implemented" % command_line[0])
|
||||
|
||||
def getEventHandle(self):
|
||||
"""
|
||||
This method is called by toasterui.
|
||||
The return value is passed to self.runCommand but not used there.
|
||||
"""
|
||||
pass
|
||||
|
||||
def main(argv):
|
||||
with open(argv[-1]) as eventfile:
|
||||
# load variables from the first line
|
||||
variables = json.loads(eventfile.readline().strip())['allvariables']
|
||||
|
||||
params = namedtuple('ConfigParams', ['observe_only'])(True)
|
||||
player = EventPlayer(eventfile, variables)
|
||||
|
||||
return toasterui.main(player, player, params)
|
||||
|
||||
# run toaster ui on our mock bitbake class
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: %s <event file>" % os.path.basename(sys.argv[0]))
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(main(sys.argv))
|
||||
@@ -1,68 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2012 Wind River Systems, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
#
|
||||
# This is used for dumping the bb_cache.dat, the output format is:
|
||||
# recipe_path PN PV PACKAGES
|
||||
#
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# For importing bb.cache
|
||||
sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib'))
|
||||
from bb.cache import CoreRecipeInfo
|
||||
|
||||
import pickle as pickle
|
||||
|
||||
def main(argv=None):
|
||||
"""
|
||||
Get the mapping for the target recipe.
|
||||
"""
|
||||
if len(argv) != 1:
|
||||
print("Error, need one argument!", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
cachefile = argv[0]
|
||||
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
while cachefile:
|
||||
try:
|
||||
key = pickled.load()
|
||||
val = pickled.load()
|
||||
except Exception:
|
||||
break
|
||||
if isinstance(val, CoreRecipeInfo) and (not val.skipped):
|
||||
pn = val.pn
|
||||
# Filter out the native recipes.
|
||||
if key.startswith('virtual:native:') or pn.endswith("-native"):
|
||||
continue
|
||||
|
||||
# 1.0 is the default version for a no PV recipe.
|
||||
if "pv" in val.__dict__:
|
||||
pv = val.pv
|
||||
else:
|
||||
pv = "1.0"
|
||||
|
||||
print("%s %s %s %s" % (key, pn, pv, ' '.join(val.packages)))
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
@@ -1,24 +1,4 @@
|
||||
" Vim filetype detection file
|
||||
" Language: BitBake
|
||||
" Author: Ricardo Salveti <rsalveti@rsalveti.net>
|
||||
" Copyright: Copyright (C) 2008 Ricardo Salveti <rsalveti@rsalveti.net>
|
||||
" Licence: You may redistribute this under the same terms as Vim itself
|
||||
"
|
||||
" This sets up the syntax highlighting for BitBake files, like .bb, .bbclass and .inc
|
||||
|
||||
if &compatible || version < 600
|
||||
finish
|
||||
endif
|
||||
|
||||
" .bb, .bbappend and .bbclass
|
||||
au BufNewFile,BufRead *.{bb,bbappend,bbclass} set filetype=bitbake
|
||||
|
||||
" .inc
|
||||
au BufNewFile,BufRead *.inc set filetype=bitbake
|
||||
|
||||
" .conf
|
||||
au BufNewFile,BufRead *.conf
|
||||
\ if (match(expand("%:p:h"), "conf") > 0) |
|
||||
\ set filetype=bitbake |
|
||||
\ endif
|
||||
|
||||
au BufNewFile,BufRead *.bb setfiletype bitbake
|
||||
au BufNewFile,BufRead *.bbclass setfiletype bitbake
|
||||
au BufNewFile,BufRead *.inc setfiletype bitbake
|
||||
" au BufNewFile,BufRead *.conf setfiletype bitbake
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
set sts=4 sw=4 et
|
||||
set cms=#%s
|
||||
@@ -1,84 +0,0 @@
|
||||
" Vim plugin file
|
||||
" Purpose: Create a template for new bb files
|
||||
" Author: Ricardo Salveti <rsalveti@gmail.com>
|
||||
" Copyright: Copyright (C) 2008 Ricardo Salveti <rsalveti@gmail.com>
|
||||
"
|
||||
" This file is licensed under the MIT license, see COPYING.MIT in
|
||||
" this source distribution for the terms.
|
||||
"
|
||||
" Based on the gentoo-syntax package
|
||||
"
|
||||
" Will try to use git to find the user name and email
|
||||
|
||||
if &compatible || v:version < 600
|
||||
finish
|
||||
endif
|
||||
|
||||
fun! <SID>GetUserName()
|
||||
let l:user_name = system("git config --get user.name")
|
||||
if v:shell_error
|
||||
return "Unknown User"
|
||||
else
|
||||
return substitute(l:user_name, "\n", "", "")
|
||||
endfun
|
||||
|
||||
fun! <SID>GetUserEmail()
|
||||
let l:user_email = system("git config --get user.email")
|
||||
if v:shell_error
|
||||
return "unknow@user.org"
|
||||
else
|
||||
return substitute(l:user_email, "\n", "", "")
|
||||
endfun
|
||||
|
||||
fun! BBHeader()
|
||||
let l:current_year = strftime("%Y")
|
||||
let l:user_name = <SID>GetUserName()
|
||||
let l:user_email = <SID>GetUserEmail()
|
||||
0 put ='# Copyright (C) ' . l:current_year .
|
||||
\ ' ' . l:user_name . ' <' . l:user_email . '>'
|
||||
put ='# Released under the MIT license (see COPYING.MIT for the terms)'
|
||||
$
|
||||
endfun
|
||||
|
||||
fun! NewBBTemplate()
|
||||
let l:paste = &paste
|
||||
set nopaste
|
||||
|
||||
" Get the header
|
||||
call BBHeader()
|
||||
|
||||
" New the bb template
|
||||
put ='DESCRIPTION = \"\"'
|
||||
put ='HOMEPAGE = \"\"'
|
||||
put ='LICENSE = \"\"'
|
||||
put ='SECTION = \"\"'
|
||||
put ='DEPENDS = \"\"'
|
||||
put =''
|
||||
put ='SRC_URI = \"\"'
|
||||
|
||||
" Go to the first place to edit
|
||||
0
|
||||
/^DESCRIPTION =/
|
||||
exec "normal 2f\""
|
||||
|
||||
if paste == 1
|
||||
set paste
|
||||
endif
|
||||
endfun
|
||||
|
||||
if !exists("g:bb_create_on_empty")
|
||||
let g:bb_create_on_empty = 1
|
||||
endif
|
||||
|
||||
" disable in case of vimdiff
|
||||
if v:progname =~ "vimdiff"
|
||||
let g:bb_create_on_empty = 0
|
||||
endif
|
||||
|
||||
augroup NewBB
|
||||
au BufNewFile *.bb
|
||||
\ if g:bb_create_on_empty |
|
||||
\ call NewBBTemplate() |
|
||||
\ endif
|
||||
augroup END
|
||||
|
||||
@@ -1,126 +1,120 @@
|
||||
" Vim syntax file
|
||||
" Language: BitBake bb/bbclasses/inc
|
||||
" Author: Chris Larson <kergoth@handhelds.org>
|
||||
" Ricardo Salveti <rsalveti@rsalveti.net>
|
||||
" Copyright: Copyright (C) 2004 Chris Larson <kergoth@handhelds.org>
|
||||
" Copyright (C) 2008 Ricardo Salveti <rsalveti@rsalveti.net>
|
||||
"
|
||||
" Copyright (C) 2004 Chris Larson <kergoth@handhelds.org>
|
||||
" This file is licensed under the MIT license, see COPYING.MIT in
|
||||
" this source distribution for the terms.
|
||||
"
|
||||
" Syntax highlighting for bb, bbclasses and inc files.
|
||||
" Language: BitBake
|
||||
" Maintainer: Chris Larson <kergoth@handhelds.org>
|
||||
" Filenames: *.bb, *.bbclass
|
||||
|
||||
if version < 600
|
||||
syntax clear
|
||||
elseif exists("b:current_syntax")
|
||||
finish
|
||||
endif
|
||||
|
||||
syn case match
|
||||
|
||||
|
||||
" Catch incorrect syntax (only matches if nothing else does)
|
||||
"
|
||||
" It's an entirely new type, just has specific syntax in shell and python code
|
||||
syn match bbUnmatched "."
|
||||
|
||||
if &compatible || v:version < 600
|
||||
finish
|
||||
endif
|
||||
if exists("b:current_syntax")
|
||||
finish
|
||||
endif
|
||||
|
||||
" Other
|
||||
|
||||
syn match bbComment "^#.*$" display contains=bbTodo
|
||||
syn keyword bbTodo TODO FIXME XXX contained
|
||||
syn match bbDelimiter "[(){}=]" contained
|
||||
syn match bbQuote /['"]/ contained
|
||||
syn match bbArrayBrackets "[\[\]]" contained
|
||||
|
||||
|
||||
" BitBake strings
|
||||
|
||||
syn match bbContinue "\\$"
|
||||
syn region bbString matchgroup=bbQuote start=/"/ skip=/\\$/ excludenl end=/"/ contained keepend contains=bbTodo,bbContinue,bbVarDeref
|
||||
syn region bbString matchgroup=bbQuote start=/'/ skip=/\\$/ excludenl end=/'/ contained keepend contains=bbTodo,bbContinue,bbVarDeref
|
||||
|
||||
|
||||
" BitBake variable metadata
|
||||
|
||||
syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
|
||||
syn match bbVarDeref "${[a-zA-Z0-9\-_\.]\+}" contained
|
||||
syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.]\+\(_[${}a-zA-Z0-9\-_\.]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
|
||||
|
||||
syn match bbIdentifier "[a-zA-Z0-9\-_\.]\+" display contained
|
||||
"syn keyword bbVarEq = display contained nextgroup=bbVarValue
|
||||
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
|
||||
syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref
|
||||
|
||||
|
||||
" BitBake variable metadata flags
|
||||
syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
|
||||
syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\)\@=" keepend excludenl contained contains=bbIdentifier nextgroup=bbVarEq
|
||||
"syn match bbVarFlagFlag "\[\([a-zA-Z0-9\-_\.]\+\)\]\s*\(=\)\@=" contains=bbIdentifier nextgroup=bbVarEq
|
||||
|
||||
|
||||
" Functions!
|
||||
syn match bbFunction "\h\w*" display contained
|
||||
|
||||
|
||||
" BitBake python metadata
|
||||
syn include @python syntax/python.vim
|
||||
if exists("b:current_syntax")
|
||||
unlet b:current_syntax
|
||||
endif
|
||||
|
||||
" BitBake syntax
|
||||
syn keyword bbPythonFlag python contained nextgroup=bbFunction
|
||||
syn match bbPythonFuncDef "^\(python\s\+\)\(\w\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPythonFlag,bbFunction,bbDelimiter nextgroup=bbPythonFuncRegion skipwhite
|
||||
syn region bbPythonFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@python
|
||||
"hi def link bbPythonFuncRegion Comment
|
||||
|
||||
" Matching case
|
||||
syn case match
|
||||
|
||||
" Indicates the error when nothing is matched
|
||||
syn match bbUnmatched "."
|
||||
|
||||
" Comments
|
||||
syn cluster bbCommentGroup contains=bbTodo,@Spell
|
||||
syn keyword bbTodo COMBAK FIXME TODO XXX contained
|
||||
syn match bbComment "#.*$" contains=@bbCommentGroup
|
||||
|
||||
" String helpers
|
||||
syn match bbQuote +['"]+ contained
|
||||
syn match bbDelimiter "[(){}=]" contained
|
||||
syn match bbArrayBrackets "[\[\]]" contained
|
||||
|
||||
" BitBake strings
|
||||
syn match bbContinue "\\$"
|
||||
syn region bbString matchgroup=bbQuote start=+"+ skip=+\\$+ end=+"+ contained contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
|
||||
syn region bbString matchgroup=bbQuote start=+'+ skip=+\\$+ end=+'+ contained contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
|
||||
|
||||
" Vars definition
|
||||
syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
|
||||
syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
|
||||
syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
|
||||
syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
|
||||
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue
|
||||
syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
|
||||
syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
|
||||
syn region bbVarPyValue start=+${@+ skip=+\\$+ end=+}+ contained contains=@python
|
||||
|
||||
" Vars metadata flags
|
||||
syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
|
||||
syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\|+=\|=+\|?=\)\@=" contained contains=bbIdentifier nextgroup=bbVarEq
|
||||
|
||||
" Includes and requires
|
||||
syn keyword bbInclude inherit include require contained
|
||||
syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref
|
||||
syn match bbIncludeLine "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest
|
||||
|
||||
" Add taks and similar
|
||||
syn keyword bbStatement addtask addhandler after before EXPORT_FUNCTIONS contained
|
||||
syn match bbStatementRest ".*$" skipwhite contained contains=bbStatement
|
||||
syn match bbStatementLine "^\(addtask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
|
||||
|
||||
" OE Important Functions
|
||||
syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained
|
||||
|
||||
" Generic Functions
|
||||
syn match bbFunction "\h[0-9A-Za-z_-]*" display contained contains=bbOEFunctions
|
||||
|
||||
" BitBake shell metadata
|
||||
syn include @shell syntax/sh.vim
|
||||
if exists("b:current_syntax")
|
||||
unlet b:current_syntax
|
||||
endif
|
||||
syn keyword bbShFakeRootFlag fakeroot contained
|
||||
syn match bbShFuncDef "^\(fakeroot\s*\)\?\([0-9A-Za-z_${}-]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbShFuncRegion skipwhite
|
||||
syn region bbShFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@shell
|
||||
|
||||
" Python value inside shell functions
|
||||
syn region shDeref start=+${@+ skip=+\\$+ excludenl end=+}+ contained contains=@python
|
||||
syn keyword bbFakerootFlag fakeroot contained nextgroup=bbFunction
|
||||
syn match bbShellFuncDef "^\(fakeroot\s*\)\?\(\w\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbFakerootFlag,bbFunction,bbDelimiter nextgroup=bbShellFuncRegion skipwhite
|
||||
syn region bbShellFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@shell
|
||||
"hi def link bbShellFuncRegion Comment
|
||||
|
||||
" BitBake python metadata
|
||||
syn keyword bbPyFlag python contained
|
||||
syn match bbPyFuncDef "^\(python\s\+\)\([0-9A-Za-z_${}-]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPyFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
|
||||
syn region bbPyFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@python
|
||||
|
||||
" BitBake 'def'd python functions
|
||||
syn keyword bbPyDef def contained
|
||||
syn region bbPyDefRegion start='^\(def\s\+\)\([0-9A-Za-z_-]\+\)\(\s*(.*)\s*\):\s*$' end='^\(\s\|$\)\@!' contains=@python
|
||||
syn keyword bbDef def contained
|
||||
syn region bbDefRegion start='^def\s\+\w\+\s*([^)]*)\s*:\s*$' end='^\(\s\|$\)\@!' contains=@python
|
||||
|
||||
" Highlighting Definitions
|
||||
hi def link bbUnmatched Error
|
||||
hi def link bbInclude Include
|
||||
hi def link bbTodo Todo
|
||||
hi def link bbComment Comment
|
||||
hi def link bbQuote String
|
||||
hi def link bbString String
|
||||
hi def link bbDelimiter Keyword
|
||||
hi def link bbArrayBrackets Statement
|
||||
hi def link bbContinue Special
|
||||
hi def link bbExport Type
|
||||
hi def link bbExportFlag Type
|
||||
hi def link bbIdentifier Identifier
|
||||
hi def link bbVarDeref PreProc
|
||||
hi def link bbVarDef Identifier
|
||||
hi def link bbVarValue String
|
||||
hi def link bbShFakeRootFlag Type
|
||||
hi def link bbFunction Function
|
||||
hi def link bbPyFlag Type
|
||||
hi def link bbPyDef Statement
|
||||
hi def link bbStatement Statement
|
||||
hi def link bbStatementRest Identifier
|
||||
hi def link bbOEFunctions Special
|
||||
hi def link bbVarPyValue PreProc
|
||||
|
||||
" BitBake statements
|
||||
syn keyword bbStatement include inherit require addtask addhandler EXPORT_FUNCTIONS display contained
|
||||
syn match bbStatementLine "^\(include\|inherit\|require\|addtask\|addhandler\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
|
||||
syn match bbStatementRest ".*$" contained contains=bbString,bbVarDeref
|
||||
|
||||
" Highlight
|
||||
"
|
||||
hi def link bbArrayBrackets Statement
|
||||
hi def link bbUnmatched Error
|
||||
hi def link bbVarDeref String
|
||||
hi def link bbContinue Special
|
||||
hi def link bbDef Statement
|
||||
hi def link bbPythonFlag Type
|
||||
hi def link bbExportFlag Type
|
||||
hi def link bbFakerootFlag Type
|
||||
hi def link bbStatement Statement
|
||||
hi def link bbString String
|
||||
hi def link bbTodo Todo
|
||||
hi def link bbComment Comment
|
||||
hi def link bbOperator Operator
|
||||
hi def link bbError Error
|
||||
hi def link bbFunction Function
|
||||
hi def link bbDelimiter Delimiter
|
||||
hi def link bbIdentifier Identifier
|
||||
hi def link bbVarEq Operator
|
||||
hi def link bbQuote String
|
||||
hi def link bbVarValue String
|
||||
|
||||
let b:current_syntax = "bb"
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
# This is a single Makefile to handle all generated BitBake documents.
|
||||
# The Makefile needs to live in the documentation directory and all figures used
|
||||
# in any manuals must be .PNG files and live in the individual book's figures
|
||||
# directory.
|
||||
#
|
||||
# The Makefile has these targets:
|
||||
#
|
||||
# pdf: generates a PDF version of a manual.
|
||||
# html: generates an HTML version of a manual.
|
||||
# tarball: creates a tarball for the doc files.
|
||||
# validate: validates
|
||||
# clean: removes files
|
||||
#
|
||||
# The Makefile generates an HTML version of every document. The
|
||||
# variable DOC indicates the folder name for a given manual.
|
||||
#
|
||||
# To build a manual, you must invoke 'make' with the DOC argument.
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
# make DOC=bitbake-user-manual
|
||||
# make pdf DOC=bitbake-user-manual
|
||||
#
|
||||
# The first example generates the HTML version of the User Manual.
|
||||
# The second example generates the PDF version of the User Manual.
|
||||
#
|
||||
|
||||
ifeq ($(DOC),bitbake-user-manual)
|
||||
XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \
|
||||
--stringparam chapter.autolabel 1 \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
|
||||
endif
|
||||
|
||||
##
|
||||
# These URI should be rewritten by your distribution's xml catalog to
|
||||
# match your localy installed XSL stylesheets.
|
||||
XSL_BASE_URI = http://docbook.sourceforge.net/release/xsl/current
|
||||
XSL_XHTML_URI = $(XSL_BASE_URI)/xhtml/docbook.xsl
|
||||
|
||||
all: $(ALLPREQ)
|
||||
|
||||
pdf:
|
||||
ifeq ($(DOC),bitbake-user-manual)
|
||||
@echo " "
|
||||
@echo "********** Building."$(DOC)
|
||||
@echo " "
|
||||
cd $(DOC); ../tools/docbook-to-pdf $(DOC).xml ../template; cd ..
|
||||
endif
|
||||
|
||||
html:
|
||||
ifeq ($(DOC),bitbake-user-manual)
|
||||
# See http://www.sagehill.net/docbookxsl/HtmlOutput.html
|
||||
@echo " "
|
||||
@echo "******** Building "$(DOC)
|
||||
@echo " "
|
||||
cd $(DOC); xsltproc $(XSLTOPTS) -o $(DOC).html $(DOC)-customization.xsl $(DOC).xml; cd ..
|
||||
endif
|
||||
|
||||
tarball: html
|
||||
@echo " "
|
||||
@echo "******** Creating Tarball of document files"
|
||||
@echo " "
|
||||
cd $(DOC); tar -cvzf $(DOC).tgz $(TARFILES); cd ..
|
||||
|
||||
validate:
|
||||
cd $(DOC); xmllint --postvalid --xinclude --noout $(DOC).xml; cd ..
|
||||
|
||||
publish:
|
||||
@if test -f $(DOC)/$(DOC).html; \
|
||||
then \
|
||||
echo " "; \
|
||||
echo "******** Publishing "$(DOC)".html"; \
|
||||
echo " "; \
|
||||
scp -r $(MANUALS) $(STYLESHEET) docs.yp:/var/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \
|
||||
cd $(DOC); scp -r $(FIGURES) docs.yp:/var/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \
|
||||
else \
|
||||
echo " "; \
|
||||
echo $(DOC)".html missing. Generate the file first then try again."; \
|
||||
echo " "; \
|
||||
fi
|
||||
|
||||
clean:
|
||||
rm -rf $(MANUALS); rm $(DOC)/$(DOC).tgz;
|
||||
@@ -1,39 +0,0 @@
|
||||
Documentation
|
||||
=============
|
||||
|
||||
This is the directory that contains the BitBake documentation.
|
||||
|
||||
Manual Organization
|
||||
===================
|
||||
|
||||
Folders exist for individual manuals as follows:
|
||||
|
||||
* bitbake-user-manual - The BitBake User Manual
|
||||
|
||||
Each folder is self-contained regarding content and figures.
|
||||
|
||||
If you want to find HTML versions of the BitBake manuals on the web,
|
||||
go to http://www.openembedded.org/wiki/Documentation.
|
||||
|
||||
Makefile
|
||||
========
|
||||
|
||||
The Makefile processes manual directories to create HTML, PDF,
|
||||
tarballs, etc. Details on how the Makefile work are documented
|
||||
inside the Makefile. See that file for more information.
|
||||
|
||||
To build a manual, you run the make command and pass it the name
|
||||
of the folder containing the manual's contents.
|
||||
For example, the following command run from the documentation directory
|
||||
creates an HTML and a PDF version of the BitBake User Manual.
|
||||
The DOC variable specifies the manual you are making:
|
||||
|
||||
$ make DOC=bitbake-user-manual
|
||||
|
||||
template
|
||||
========
|
||||
Contains various templates, fonts, and some old PNG files.
|
||||
|
||||
tools
|
||||
=====
|
||||
Contains a tool to convert the DocBook files to PDF format.
|
||||
@@ -1,29 +0,0 @@
|
||||
<?xml version='1.0'?>
|
||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
||||
|
||||
<xsl:import href="http://downloads.yoctoproject.org/mirror/docbook-mirror/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<!--
|
||||
|
||||
<xsl:import href="../template/1.76.1/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
-->
|
||||
|
||||
<xsl:include href="../template/permalinks.xsl"/>
|
||||
<xsl:include href="../template/section.title.xsl"/>
|
||||
<xsl:include href="../template/component.title.xsl"/>
|
||||
<xsl:include href="../template/division.title.xsl"/>
|
||||
<xsl:include href="../template/formal.object.heading.xsl"/>
|
||||
<xsl:include href="../template/gloss-permalinks.xsl"/>
|
||||
|
||||
<xsl:param name="html.stylesheet" select="'user-manual-style.css'" />
|
||||
<xsl:param name="chapter.autolabel" select="1" />
|
||||
<xsl:param name="section.autolabel" select="1" />
|
||||
<xsl:param name="section.label.includes.component.label" select="1" />
|
||||
<xsl:param name="appendix.autolabel">A</xsl:param>
|
||||
|
||||
<!-- <xsl:param name="generate.toc" select="'article nop'"></xsl:param> -->
|
||||
|
||||
</xsl:stylesheet>
|
||||
@@ -1,932 +0,0 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
|
||||
<chapter id="bitbake-user-manual-execution">
|
||||
<title>Execution</title>
|
||||
|
||||
<para>
|
||||
The primary purpose for running BitBake is to produce some kind
|
||||
of output such as a single installable package, a kernel, a software
|
||||
development kit, or even a full, board-specific bootable Linux image,
|
||||
complete with bootloader, kernel, and root filesystem.
|
||||
Of course, you can execute the <filename>bitbake</filename>
|
||||
command with options that cause it to execute single tasks,
|
||||
compile single recipe files, capture or clear data, or simply
|
||||
return information about the execution environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
This chapter describes BitBake's execution process from start
|
||||
to finish when you use it to create an image.
|
||||
The execution process is launched using the following command
|
||||
form:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
</literallayout>
|
||||
For information on the BitBake command and its options,
|
||||
see
|
||||
"<link linkend='bitbake-user-manual-command'>The BitBake Command</link>"
|
||||
section.
|
||||
<note>
|
||||
<para>
|
||||
Prior to executing BitBake, you should take advantage of available
|
||||
parallel thread execution on your build host by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your project's <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common method to determine this value for your build host is to run
|
||||
the following:
|
||||
<literallayout class='monospaced'>
|
||||
$ grep processor /proc/cpuinfo
|
||||
</literallayout>
|
||||
This command returns the number of processors, which takes into
|
||||
account hyper-threading.
|
||||
Thus, a quad-core build host with hyper-threading most likely
|
||||
shows eight processors, which is the value you would then assign to
|
||||
<filename>BB_NUMBER_THREADS</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A possibly simpler solution is that some Linux distributions
|
||||
(e.g. Debian and Ubuntu) provide the <filename>ncpus</filename> command.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<section id='parsing-the-base-configuration-metadata'>
|
||||
<title>Parsing the Base Configuration Metadata</title>
|
||||
|
||||
<para>
|
||||
The first thing BitBake does is parse base configuration
|
||||
metadata.
|
||||
Base configuration metadata consists of your project's
|
||||
<filename>bblayers.conf</filename> file to determine what
|
||||
layers BitBake needs to recognize, all necessary
|
||||
<filename>layer.conf</filename> files (one from each layer),
|
||||
and <filename>bitbake.conf</filename>.
|
||||
The data itself is of various types:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Recipes:</emphasis>
|
||||
Details about particular pieces of software.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Class Data:</emphasis>
|
||||
An abstraction of common build information
|
||||
(e.g. how to build a Linux kernel).
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Configuration Data:</emphasis>
|
||||
Machine-specific settings, policy decisions,
|
||||
and so forth.
|
||||
Configuration data acts as the glue to bind everything
|
||||
together.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>layer.conf</filename> files are used to
|
||||
construct key variables such as
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
and
|
||||
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
|
||||
<filename>BBPATH</filename> is used to search for
|
||||
configuration and class files under the
|
||||
<filename>conf</filename> and <filename>classes</filename>
|
||||
directories, respectively.
|
||||
<filename>BBFILES</filename> is used to locate both recipe
|
||||
and recipe append files
|
||||
(<filename>.bb</filename> and <filename>.bbappend</filename>).
|
||||
If there is no <filename>bblayers.conf</filename> file,
|
||||
it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> directly in the environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Next, the <filename>bitbake.conf</filename> file is located
|
||||
using the <filename>BBPATH</filename> variable that was
|
||||
just constructed.
|
||||
The <filename>bitbake.conf</filename> file may also include other
|
||||
configuration files using the
|
||||
<filename>include</filename> or
|
||||
<filename>require</filename> directives.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Prior to parsing configuration files, Bitbake looks
|
||||
at certain variables, including:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
The first four variables in this list relate to how BitBake treats shell
|
||||
environment variables during task execution.
|
||||
By default, BitBake cleans the environment variables and provides tight
|
||||
control over the shell execution environment.
|
||||
However, through the use of these first four variables, you can
|
||||
apply your control regarding the
|
||||
environment variables allowed to be used by BitBake in the shell
|
||||
during execution of tasks.
|
||||
See the
|
||||
"<link linkend='passing-information-into-the-build-task-environment'>Passing Information Into the Build Task Environment</link>"
|
||||
section and the information about these variables in the
|
||||
variable glossary for more information on how they work and
|
||||
on how to use them.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The base configuration metadata is global
|
||||
and therefore affects all recipes and tasks that are executed.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake first searches the current working directory for an
|
||||
optional <filename>conf/bblayers.conf</filename> configuration file.
|
||||
This file is expected to contain a
|
||||
<link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
|
||||
variable that is a space-delimited list of 'layer' directories.
|
||||
Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
|
||||
file, then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> variables directly in the environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
|
||||
file is located and parsed with the
|
||||
<link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
|
||||
variable being set to the directory where the layer was found.
|
||||
The idea is these files automatically set up
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
and other variables correctly for a given build directory.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake then expects to find the <filename>conf/bitbake.conf</filename>
|
||||
file somewhere in the user-specified <filename>BBPATH</filename>.
|
||||
That configuration file generally has include directives to pull
|
||||
in any other metadata such as files specific to the architecture,
|
||||
the machine, the local environment, and so forth.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Only variable definitions and include directives are allowed
|
||||
in BitBake <filename>.conf</filename> files.
|
||||
Some variables directly influence BitBake's behavior.
|
||||
These variables might have been set from the environment
|
||||
depending on the environment variables previously
|
||||
mentioned or set in the configuration files.
|
||||
The
|
||||
"<link linkend='ref-variables-glos'>Variables Glossary</link>"
|
||||
chapter presents a full list of variables.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
After parsing configuration files, BitBake uses its rudimentary
|
||||
inheritance mechanism, which is through class files, to inherit
|
||||
some standard classes.
|
||||
BitBake parses a class when the inherit directive responsible
|
||||
for getting that class is encountered.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>base.bbclass</filename> file is always included.
|
||||
Other classes that are specified in the configuration using the
|
||||
<link linkend='var-INHERIT'><filename>INHERIT</filename></link>
|
||||
variable are also included.
|
||||
BitBake searches for class files in a
|
||||
<filename>classes</filename> subdirectory under
|
||||
the paths in <filename>BBPATH</filename> in the same way as
|
||||
configuration files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A good way to get an idea of the configuration files and
|
||||
the class files used in your execution environment is to
|
||||
run the following BitBake command:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -e > mybb.log
|
||||
</literallayout>
|
||||
Examining the top of the <filename>mybb.log</filename>
|
||||
shows you the many configuration files and class files
|
||||
used in your execution environment.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
You need to be aware of how BitBake parses curly braces.
|
||||
If a recipe uses a closing curly brace within the function and
|
||||
the character has no leading spaces, BitBake produces a parsing
|
||||
error.
|
||||
If you use a pair of curly braces in a shell function, the
|
||||
closing curly brace must not be located at the start of the line
|
||||
without leading spaces.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is an example that causes BitBake to produce a parsing
|
||||
error:
|
||||
<literallayout class='monospaced'>
|
||||
fakeroot create_shar() {
|
||||
cat << "EOF" > ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
|
||||
usage()
|
||||
{
|
||||
echo "test"
|
||||
###### The following "}" at the start of the line causes a parsing error ######
|
||||
}
|
||||
EOF
|
||||
}
|
||||
</literallayout>
|
||||
Writing the recipe this way avoids the error:
|
||||
<literallayout class='monospaced'>
|
||||
fakeroot create_shar() {
|
||||
cat << "EOF" > ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
|
||||
usage()
|
||||
{
|
||||
echo "test"
|
||||
######The following "}" with a leading space at the start of the line avoids the error ######
|
||||
}
|
||||
EOF
|
||||
}
|
||||
</literallayout>
|
||||
</para>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='locating-and-parsing-recipes'>
|
||||
<title>Locating and Parsing Recipes</title>
|
||||
|
||||
<para>
|
||||
During the configuration phase, BitBake will have set
|
||||
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
|
||||
BitBake now uses it to construct a list of recipes to parse,
|
||||
along with any append files (<filename>.bbappend</filename>)
|
||||
to apply.
|
||||
<filename>BBFILES</filename> is a space-separated list of
|
||||
available files and supports wildcards.
|
||||
An example would be:
|
||||
<literallayout class='monospaced'>
|
||||
BBFILES = "/path/to/bbfiles/*.bb /path/to/appends/*.bbappend"
|
||||
</literallayout>
|
||||
BitBake parses each recipe and append file located
|
||||
with <filename>BBFILES</filename> and stores the values of
|
||||
various variables into the datastore.
|
||||
<note>
|
||||
Append files are applied in the order they are encountered in
|
||||
<filename>BBFILES</filename>.
|
||||
</note>
|
||||
For each file, a fresh copy of the base configuration is
|
||||
made, then the recipe is parsed line by line.
|
||||
Any inherit statements cause BitBake to find and
|
||||
then parse class files (<filename>.bbclass</filename>)
|
||||
using
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
as the search path.
|
||||
Finally, BitBake parses in order any append files found in
|
||||
<filename>BBFILES</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
One common convention is to use the recipe filename to define
|
||||
pieces of metadata.
|
||||
For example, in <filename>bitbake.conf</filename> the recipe
|
||||
name and version are used to set the variables
|
||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
In this example, a recipe called "something_1.2.3.bb" would set
|
||||
<filename>PN</filename> to "something" and
|
||||
<filename>PV</filename> to "1.2.3".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By the time parsing is complete for a recipe, BitBake
|
||||
has a list of tasks that the recipe defines and a set of
|
||||
data consisting of keys and values as well as
|
||||
dependency information about the tasks.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake does not need all of this information.
|
||||
It only needs a small subset of the information to make
|
||||
decisions about the recipe.
|
||||
Consequently, BitBake caches the values in which it is
|
||||
interested and does not store the rest of the information.
|
||||
Experience has shown it is faster to re-parse the metadata than to
|
||||
try and write it out to the disk and then reload it.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Where possible, subsequent BitBake commands reuse this cache of
|
||||
recipe information.
|
||||
The validity of this cache is determined by first computing a
|
||||
checksum of the base configuration data (see
|
||||
<link linkend='var-BB_HASHCONFIG_WHITELIST'><filename>BB_HASHCONFIG_WHITELIST</filename></link>)
|
||||
and then checking if the checksum matches.
|
||||
If that checksum matches what is in the cache and the recipe
|
||||
and class files have not changed, Bitbake is able to use
|
||||
the cache.
|
||||
BitBake then reloads the cached information about the recipe
|
||||
instead of reparsing it from scratch.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Recipe file collections exist to allow the user to
|
||||
have multiple repositories of
|
||||
<filename>.bb</filename> files that contain the same
|
||||
exact package.
|
||||
For example, one could easily use them to make one's
|
||||
own local copy of an upstream repository, but with
|
||||
custom modifications that one does not want upstream.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb"
|
||||
BBFILE_COLLECTIONS = "upstream local"
|
||||
BBFILE_PATTERN_upstream = "^/stuff/openembedded/"
|
||||
BBFILE_PATTERN_local = "^/stuff/openembedded.modified/"
|
||||
BBFILE_PRIORITY_upstream = "5"
|
||||
BBFILE_PRIORITY_local = "10"
|
||||
</literallayout>
|
||||
<note>
|
||||
The layers mechanism is now the preferred method of collecting
|
||||
code.
|
||||
While the collections code remains, its main use is to set layer
|
||||
priorities and to deal with overlap (conflicts) between layers.
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-providers'>
|
||||
<title>Providers</title>
|
||||
|
||||
<para>
|
||||
Assuming BitBake has been instructed to execute a target
|
||||
and that all the recipe files have been parsed, BitBake
|
||||
starts to figure out how to build the target.
|
||||
BitBake looks through the <filename>PROVIDES</filename> list
|
||||
for each of the recipes.
|
||||
A <filename>PROVIDES</filename> list is the list of names by which
|
||||
the recipe can be known.
|
||||
Each recipe's <filename>PROVIDES</filename> list is created
|
||||
implicitly through the recipe's
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
and explicitly through the recipe's
|
||||
<link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
|
||||
variable, which is optional.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When a recipe uses <filename>PROVIDES</filename>, that recipe's
|
||||
functionality can be found under an alternative name or names other
|
||||
than the implicit <filename>PN</filename> name.
|
||||
As an example, suppose a recipe named <filename>keyboard_1.0.bb</filename>
|
||||
contained the following:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "fullkeyboard"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> list for this recipe becomes
|
||||
"keyboard", which is implicit, and "fullkeyboard", which is explicit.
|
||||
Consequently, the functionality found in
|
||||
<filename>keyboard_1.0.bb</filename> can be found under two
|
||||
different names.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-preferences'>
|
||||
<title>Preferences</title>
|
||||
|
||||
<para>
|
||||
The <filename>PROVIDES</filename> list is only part of the solution
|
||||
for figuring out a target's recipes.
|
||||
Because targets might have multiple providers, BitBake needs
|
||||
to prioritize providers by determining provider preferences.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common example in which a target has multiple providers
|
||||
is "virtual/kernel", which is on the
|
||||
<filename>PROVIDES</filename> list for each kernel recipe.
|
||||
Each machine often selects the best kernel provider by using a
|
||||
line similar to the following in the machine configuration file:
|
||||
<literallayout class='monospaced'>
|
||||
PREFERRED_PROVIDER_virtual/kernel = "linux-yocto"
|
||||
</literallayout>
|
||||
The default
|
||||
<link linkend='var-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>
|
||||
is the provider with the same name as the target.
|
||||
Bitbake iterates through each target it needs to build and
|
||||
resolves them and their dependencies using this process.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Understanding how providers are chosen is made complicated by the fact
|
||||
that multiple versions might exist for a given provider.
|
||||
BitBake defaults to the highest version of a provider.
|
||||
Version comparisons are made using the same method as Debian.
|
||||
You can use the
|
||||
<link linkend='var-PREFERRED_VERSION'><filename>PREFERRED_VERSION</filename></link>
|
||||
variable to specify a particular version.
|
||||
You can influence the order by using the
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, files have a preference of "0".
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
recipe unlikely to be used unless it is explicitly referenced.
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "1" makes it
|
||||
likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any
|
||||
<filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer
|
||||
and more experimental recipe versions until they have undergone
|
||||
sufficient testing to be considered stable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When there are multiple “versions” of a given recipe,
|
||||
BitBake defaults to selecting the most recent
|
||||
version, unless otherwise specified.
|
||||
If the recipe in question has a
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
set lower than the other recipes (default is 0), then
|
||||
it will not be selected.
|
||||
This allows the person or persons maintaining
|
||||
the repository of recipe files to specify
|
||||
their preference for the default selected version.
|
||||
Additionally, the user can specify their preferred version.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>, then the
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
will be set to “a”, and the
|
||||
<link linkend='var-PV'><filename>PV</filename></link>
|
||||
variable will be set to 1.1.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Thus, if a recipe named <filename>a_1.2.bb</filename> exists, BitBake
|
||||
will choose 1.2 by default.
|
||||
However, if you define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, you
|
||||
can change that preference:
|
||||
<literallayout class='monospaced'>
|
||||
PREFERRED_VERSION_a = "1.1"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
It is common for a recipe to provide two versions -- a stable,
|
||||
numbered (and preferred) version, and a version that is
|
||||
automatically checked out from a source code repository that
|
||||
is considered more "bleeding edge" but can be selected only
|
||||
explicitly.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For example, in the OpenEmbedded codebase, there is a standard,
|
||||
versioned recipe file for BusyBox,
|
||||
<filename>busybox_1.22.1.bb</filename>,
|
||||
but there is also a Git-based version,
|
||||
<filename>busybox_git.bb</filename>, which explicitly contains the line
|
||||
<literallayout class='monospaced'>
|
||||
DEFAULT_PREFERENCE = "-1"
|
||||
</literallayout>
|
||||
to ensure that the numbered, stable version is always preferred
|
||||
unless the developer selects otherwise.
|
||||
</para>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-dependencies'>
|
||||
<title>Dependencies</title>
|
||||
|
||||
<para>
|
||||
Each target BitBake builds consists of multiple tasks such as
|
||||
<filename>fetch</filename>, <filename>unpack</filename>,
|
||||
<filename>patch</filename>, <filename>configure</filename>,
|
||||
and <filename>compile</filename>.
|
||||
For best performance on multi-core systems, BitBake considers each
|
||||
task as an independent
|
||||
entity with its own set of dependencies.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Dependencies are defined through several variables.
|
||||
You can find information about variables BitBake uses in
|
||||
the <link linkend='ref-variables-glos'>Variables Glossary</link>
|
||||
near the end of this manual.
|
||||
At a basic level, it is sufficient to know that BitBake uses the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link> and
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link> variables when
|
||||
calculating dependencies.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For more information on how BitBake handles dependencies, see the
|
||||
"<link linkend='dependencies'>Dependencies</link>" section.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='ref-bitbake-tasklist'>
|
||||
<title>The Task List</title>
|
||||
|
||||
<para>
|
||||
Based on the generated list of providers and the dependency information,
|
||||
BitBake can now calculate exactly what tasks it needs to run and in what
|
||||
order it needs to run them.
|
||||
The
|
||||
"<link linkend='executing-tasks'>Executing Tasks</link>" section has more
|
||||
information on how BitBake chooses which task to execute next.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The build now starts with BitBake forking off threads up to the limit set in the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable.
|
||||
BitBake continues to fork threads as long as there are tasks ready to run,
|
||||
those tasks have all their dependencies met, and the thread threshold has not been
|
||||
exceeded.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
It is worth noting that you can greatly speed up the build time by properly setting
|
||||
the <filename>BB_NUMBER_THREADS</filename> variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
As each task completes, a timestamp is written to the directory specified by the
|
||||
<link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
|
||||
On subsequent runs, BitBake looks in the build directory within
|
||||
<filename>tmp/stamps</filename> and does not rerun
|
||||
tasks that are already completed unless a timestamp is found to be invalid.
|
||||
Currently, invalid timestamps are only considered on a per
|
||||
recipe file basis.
|
||||
So, for example, if the configure stamp has a timestamp greater than the
|
||||
compile timestamp for a given target, then the compile task would rerun.
|
||||
Running the compile task again, however, has no effect on other providers
|
||||
that depend on that target.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The exact format of the stamps is partly configurable.
|
||||
In modern versions of BitBake, a hash is appended to the
|
||||
stamp so that if the configuration changes, the stamp becomes
|
||||
invalid and the task is automatically rerun.
|
||||
This hash, or signature used, is governed by the signature policy
|
||||
that is configured (see the
|
||||
"<link linkend='checksums'>Checksums (Signatures)</link>"
|
||||
section for information).
|
||||
It is also possible to append extra metadata to the stamp using
|
||||
the <filename>[stamp-extra-info]</filename> task flag.
|
||||
For example, OpenEmbedded uses this flag to make some tasks machine-specific.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Some tasks are marked as "nostamp" tasks.
|
||||
No timestamp file is created when these tasks are run.
|
||||
Consequently, "nostamp" tasks are always rerun.
|
||||
</note>
|
||||
|
||||
<para>
|
||||
For more information on tasks, see the
|
||||
"<link linkend='tasks'>Tasks</link>" section.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='executing-tasks'>
|
||||
<title>Executing Tasks</title>
|
||||
|
||||
<para>
|
||||
Tasks can be either a shell task or a Python task.
|
||||
For shell tasks, BitBake writes a shell script to
|
||||
<filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
|
||||
and then executes the script.
|
||||
The generated shell script contains all the exported variables,
|
||||
and the shell functions with all variables expanded.
|
||||
Output from the shell script goes to the file
|
||||
<filename>${T}/log.do_taskname.pid</filename>.
|
||||
Looking at the expanded shell functions in the run file and
|
||||
the output in the log files is a useful debugging technique.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For Python tasks, BitBake executes the task internally and logs
|
||||
information to the controlling terminal.
|
||||
Future versions of BitBake will write the functions to files
|
||||
similar to the way shell tasks are handled.
|
||||
Logging will be handled in a way similar to shell tasks as well.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The order in which BitBake runs the tasks is controlled by its
|
||||
task scheduler.
|
||||
It is possible to configure the scheduler and define custom
|
||||
implementations for specific use cases.
|
||||
For more information, see these variables that control the
|
||||
behavior:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_SCHEDULERS'><filename>BB_SCHEDULERS</filename></link>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
It is possible to have functions run before and after a task's main
|
||||
function.
|
||||
This is done using the <filename>[prefuncs]</filename>
|
||||
and <filename>[postfuncs]</filename> flags of the task
|
||||
that lists the functions to run.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='checksums'>
|
||||
<title>Checksums (Signatures)</title>
|
||||
|
||||
<para>
|
||||
A checksum is a unique signature of a task's inputs.
|
||||
The signature of a task can be used to determine if a task
|
||||
needs to be run.
|
||||
Because it is a change in a task's inputs that triggers running
|
||||
the task, BitBake needs to detect all the inputs to a given task.
|
||||
For shell tasks, this turns out to be fairly easy because
|
||||
BitBake generates a "run" shell script for each task and
|
||||
it is possible to create a checksum that gives you a good idea of when
|
||||
the task's data changes.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To complicate the problem, some things should not be included in
|
||||
the checksum.
|
||||
First, there is the actual specific build path of a given task -
|
||||
the working directory.
|
||||
It does not matter if the working directory changes because it should not
|
||||
affect the output for target packages.
|
||||
The simplistic approach for excluding the working directory is to set
|
||||
it to some fixed value and create the checksum for the "run" script.
|
||||
BitBake goes one step better and uses the
|
||||
<link linkend='var-BB_HASHBASE_WHITELIST'><filename>BB_HASHBASE_WHITELIST</filename></link>
|
||||
variable to define a list of variables that should never be included
|
||||
when generating the signatures.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Another problem results from the "run" scripts containing functions that
|
||||
might or might not get called.
|
||||
The incremental build solution contains code that figures out dependencies
|
||||
between shell functions.
|
||||
This code is used to prune the "run" scripts down to the minimum set,
|
||||
thereby alleviating this problem and making the "run" scripts much more
|
||||
readable as a bonus.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
So far we have solutions for shell scripts.
|
||||
What about Python tasks?
|
||||
The same approach applies even though these tasks are more difficult.
|
||||
The process needs to figure out what variables a Python function accesses
|
||||
and what functions it calls.
|
||||
Again, the incremental build solution contains code that first figures out
|
||||
the variable and function dependencies, and then creates a checksum for the data
|
||||
used as the input to the task.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Like the working directory case, situations exist where dependencies
|
||||
should be ignored.
|
||||
For these cases, you can instruct the build process to ignore a dependency
|
||||
by using a line like the following:
|
||||
<literallayout class='monospaced'>
|
||||
PACKAGE_ARCHS[vardepsexclude] = "MACHINE"
|
||||
</literallayout>
|
||||
This example ensures that the <filename>PACKAGE_ARCHS</filename> variable does not
|
||||
depend on the value of <filename>MACHINE</filename>, even if it does reference it.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Equally, there are cases where we need to add dependencies BitBake
|
||||
is not able to find.
|
||||
You can accomplish this by using a line like the following:
|
||||
<literallayout class='monospaced'>
|
||||
PACKAGE_ARCHS[vardeps] = "MACHINE"
|
||||
</literallayout>
|
||||
This example explicitly adds the <filename>MACHINE</filename> variable as a
|
||||
dependency for <filename>PACKAGE_ARCHS</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Consider a case with in-line Python, for example, where BitBake is not
|
||||
able to figure out dependencies.
|
||||
When running in debug mode (i.e. using <filename>-DDD</filename>), BitBake
|
||||
produces output when it discovers something for which it cannot figure out
|
||||
dependencies.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Thus far, this section has limited discussion to the direct inputs into a task.
|
||||
Information based on direct inputs is referred to as the "basehash" in the
|
||||
code.
|
||||
However, there is still the question of a task's indirect inputs - the
|
||||
things that were already built and present in the build directory.
|
||||
The checksum (or signature) for a particular task needs to add the hashes
|
||||
of all the tasks on which the particular task depends.
|
||||
Choosing which dependencies to add is a policy decision.
|
||||
However, the effect is to generate a master checksum that combines the basehash
|
||||
and the hashes of the task's dependencies.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
At the code level, there are a variety of ways both the basehash and the
|
||||
dependent task hashes can be influenced.
|
||||
Within the BitBake configuration file, we can give BitBake some extra information
|
||||
to help it construct the basehash.
|
||||
The following statement effectively results in a list of global variable
|
||||
dependency excludes - variables never included in any checksum.
|
||||
This example uses variables from OpenEmbedded to help illustrate
|
||||
the concept:
|
||||
<literallayout class='monospaced'>
|
||||
BB_HASHBASE_WHITELIST ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \
|
||||
SSTATE_DIR THISDIR FILESEXTRAPATHS FILE_DIRNAME HOME LOGNAME SHELL TERM \
|
||||
USER FILESPATH STAGING_DIR_HOST STAGING_DIR_TARGET COREBASE PRSERV_HOST \
|
||||
PRSERV_DUMPDIR PRSERV_DUMPFILE PRSERV_LOCKDOWN PARALLEL_MAKE \
|
||||
CCACHE_DIR EXTERNAL_TOOLCHAIN CCACHE CCACHE_DISABLE LICENSE_PATH SDKPKGSUFFIX"
|
||||
</literallayout>
|
||||
The previous example excludes the work directory, which is part of
|
||||
<filename>TMPDIR</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The rules for deciding which hashes of dependent tasks to include through
|
||||
dependency chains are more complex and are generally accomplished with a
|
||||
Python function.
|
||||
The code in <filename>meta/lib/oe/sstatesig.py</filename> shows two examples
|
||||
of this and also illustrates how you can insert your own policy into the system
|
||||
if so desired.
|
||||
This file defines the two basic signature generators OpenEmbedded Core
|
||||
uses: "OEBasic" and "OEBasicHash".
|
||||
By default, there is a dummy "noop" signature handler enabled in BitBake.
|
||||
This means that behavior is unchanged from previous versions.
|
||||
<filename>OE-Core</filename> uses the "OEBasicHash" signature handler by default
|
||||
through this setting in the <filename>bitbake.conf</filename> file:
|
||||
<literallayout class='monospaced'>
|
||||
BB_SIGNATURE_HANDLER ?= "OEBasicHash"
|
||||
</literallayout>
|
||||
The "OEBasicHash" <filename>BB_SIGNATURE_HANDLER</filename> is the same as the
|
||||
"OEBasic" version but adds the task hash to the stamp files.
|
||||
This results in any metadata change that changes the task hash, automatically
|
||||
causing the task to be run again.
|
||||
This removes the need to bump
|
||||
<link linkend='var-PR'><filename>PR</filename></link>
|
||||
values, and changes to metadata automatically ripple across the build.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
It is also worth noting that the end result of these signature generators is to
|
||||
make some dependency and hash information available to the build.
|
||||
This information includes:
|
||||
<itemizedlist>
|
||||
<listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
|
||||
The base hashes for each task in the recipe.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
The base hashes for each dependent task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
The task dependencies for each task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_TASKHASH</filename>:
|
||||
The hash of the currently running task.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
It is worth noting that BitBake's "-S" option lets you
|
||||
debug Bitbake's processing of signatures.
|
||||
The options passed to -S allow different debugging modes
|
||||
to be used, either using BitBake's own debug functions
|
||||
or possibly those defined in the metadata/signature handler
|
||||
itself.
|
||||
The simplest parameter to pass is "none", which causes a
|
||||
set of signature information to be written out into
|
||||
<filename>STAMPS_DIR</filename>
|
||||
corresponding to the targets specified.
|
||||
The other currently available parameter is "printdiff",
|
||||
which causes BitBake to try to establish the closest
|
||||
signature match it can (e.g. in the sstate cache) and then
|
||||
run <filename>bitbake-diffsigs</filename> over the matches
|
||||
to determine the stamps and delta where these two
|
||||
stamp trees diverge.
|
||||
<note>
|
||||
It is likely that future versions of BitBake will
|
||||
provide other signature handlers triggered through
|
||||
additional "-S" parameters.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
You can find more information on checksum metadata in the
|
||||
"<link linkend='task-checksums-and-setscene'>Task Checksums and Setscene</link>"
|
||||
section.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='setscene'>
|
||||
<title>Setscene</title>
|
||||
|
||||
<para>
|
||||
The setscene process enables BitBake to handle "pre-built" artifacts.
|
||||
The ability to handle and reuse these artifacts allows BitBake
|
||||
the luxury of not having to build something from scratch every time.
|
||||
Instead, BitBake can use, when possible, existing build artifacts.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake needs to have reliable data indicating whether or not an
|
||||
artifact is compatible.
|
||||
Signatures, described in the previous section, provide an ideal
|
||||
way of representing whether an artifact is compatible.
|
||||
If a signature is the same, an object can be reused.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If an object can be reused, the problem then becomes how to
|
||||
replace a given task or set of tasks with the pre-built artifact.
|
||||
BitBake solves the problem with the "setscene" process.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When BitBake is asked to build a given target, before building anything,
|
||||
it first asks whether cached information is available for any of the
|
||||
targets it's building, or any of the intermediate targets.
|
||||
If cached information is available, BitBake uses this information instead of
|
||||
running the main tasks.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake first calls the function defined by the
|
||||
<link linkend='var-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>
|
||||
variable with a list of tasks and corresponding
|
||||
hashes it wants to build.
|
||||
This function is designed to be fast and returns a list
|
||||
of the tasks for which it believes in can obtain artifacts.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Next, for each of the tasks that were returned as possibilities,
|
||||
BitBake executes a setscene version of the task that the possible
|
||||
artifact covers.
|
||||
Setscene versions of a task have the string "_setscene" appended to the
|
||||
task name.
|
||||
So, for example, the task with the name <filename>xxx</filename> has
|
||||
a setscene task named <filename>xxx_setscene</filename>.
|
||||
The setscene version of the task executes and provides the necessary
|
||||
artifacts returning either success or failure.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
As previously mentioned, an artifact can cover more than one task.
|
||||
For example, it is pointless to obtain a compiler if you
|
||||
already have the compiled binary.
|
||||
To handle this, BitBake calls the
|
||||
<link linkend='var-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>
|
||||
function for each successful setscene task to know whether or not it needs
|
||||
to obtain the dependencies of that task.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Finally, after all the setscene tasks have executed, BitBake calls the
|
||||
function listed in
|
||||
<link linkend='var-BB_SETSCENE_VERIFY_FUNCTION'><filename>BB_SETSCENE_VERIFY_FUNCTION</filename></link>
|
||||
with the list of tasks BitBake thinks has been "covered".
|
||||
The metadata can then ensure that this list is correct and can
|
||||
inform BitBake that it wants specific tasks to be run regardless
|
||||
of the setscene result.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
You can find more information on setscene metadata in the
|
||||
"<link linkend='task-checksums-and-setscene'>Task Checksums and Setscene</link>"
|
||||
section.
|
||||
</para>
|
||||
</section>
|
||||
</chapter>
|
||||
@@ -1,824 +0,0 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
|
||||
<chapter>
|
||||
<title>File Download Support</title>
|
||||
|
||||
<para>
|
||||
BitBake's fetch module is a standalone piece of library code
|
||||
that deals with the intricacies of downloading source code
|
||||
and files from remote systems.
|
||||
Fetching source code is one of the cornerstones of building software.
|
||||
As such, this module forms an important part of BitBake.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The current fetch module is called "fetch2" and refers to the
|
||||
fact that it is the second major version of the API.
|
||||
The original version is obsolete and has been removed from the codebase.
|
||||
Thus, in all cases, "fetch" refers to "fetch2" in this
|
||||
manual.
|
||||
</para>
|
||||
|
||||
<section id='the-download-fetch'>
|
||||
<title>The Download (Fetch)</title>
|
||||
|
||||
<para>
|
||||
BitBake takes several steps when fetching source code or files.
|
||||
The fetcher codebase deals with two distinct processes in order:
|
||||
obtaining the files from somewhere (cached or otherwise)
|
||||
and then unpacking those files into a specific location and
|
||||
perhaps in a specific way.
|
||||
Getting and unpacking the files is often optionally followed
|
||||
by patching.
|
||||
Patching, however, is not covered by this module.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The code to execute the first part of this process, a fetch,
|
||||
looks something like the following:
|
||||
<literallayout class='monospaced'>
|
||||
src_uri = (d.getVar('SRC_URI', True) or "").split()
|
||||
fetcher = bb.fetch2.Fetch(src_uri, d)
|
||||
fetcher.download()
|
||||
</literallayout>
|
||||
This code sets up an instance of the fetch class.
|
||||
The instance uses a space-separated list of URLs from the
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
variable and then calls the <filename>download</filename>
|
||||
method to download the files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The instantiation of the fetch class is usually followed by:
|
||||
<literallayout class='monospaced'>
|
||||
rootdir = l.getVar('WORKDIR', True)
|
||||
fetcher.unpack(rootdir)
|
||||
</literallayout>
|
||||
This code unpacks the downloaded files to the
|
||||
specified by <filename>WORKDIR</filename>.
|
||||
<note>
|
||||
For convenience, the naming in these examples matches
|
||||
the variables used by OpenEmbedded.
|
||||
If you want to see the above code in action, examine
|
||||
the OpenEmbedded class file <filename>base.bbclass</filename>.
|
||||
</note>
|
||||
The <filename>SRC_URI</filename> and <filename>WORKDIR</filename>
|
||||
variables are not hardcoded into the fetcher, since those fetcher
|
||||
methods can be (and are) called with different variable names.
|
||||
In OpenEmbedded for example, the shared state (sstate) code uses
|
||||
the fetch module to fetch the sstate files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When the <filename>download()</filename> method is called,
|
||||
BitBake tries to resolve the URLs by looking for source files
|
||||
in a specific search order:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Pre-mirror Sites:</emphasis>
|
||||
BitBake first uses pre-mirrors to try and find source files.
|
||||
These locations are defined using the
|
||||
<link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
|
||||
variable.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Source URI:</emphasis>
|
||||
If pre-mirrors fail, BitBake uses the original URL (e.g from
|
||||
<filename>SRC_URI</filename>).
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Mirror Sites:</emphasis>
|
||||
If fetch failures occur, BitBake next uses mirror locations as
|
||||
defined by the
|
||||
<link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
|
||||
variable.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For each URL passed to the fetcher, the fetcher
|
||||
calls the submodule that handles that particular URL type.
|
||||
This behavior can be the source of some confusion when you
|
||||
are providing URLs for the <filename>SRC_URI</filename>
|
||||
variable.
|
||||
Consider the following two URLs:
|
||||
<literallayout class='monospaced'>
|
||||
http://git.yoctoproject.org/git/poky;protocol=git
|
||||
git://git.yoctoproject.org/git/poky;protocol=http
|
||||
</literallayout>
|
||||
In the former case, the URL is passed to the
|
||||
<filename>wget</filename> fetcher, which does not
|
||||
understand "git".
|
||||
Therefore, the latter case is the correct form since the
|
||||
Git fetcher does know how to use HTTP as a transport.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are some examples that show commonly used mirror
|
||||
definitions:
|
||||
<literallayout class='monospaced'>
|
||||
PREMIRRORS ?= "\
|
||||
bzr://.*/.* http://somemirror.org/sources/ \n \
|
||||
cvs://.*/.* http://somemirror.org/sources/ \n \
|
||||
git://.*/.* http://somemirror.org/sources/ \n \
|
||||
hg://.*/.* http://somemirror.org/sources/ \n \
|
||||
osc://.*/.* http://somemirror.org/sources/ \n \
|
||||
p4://.*/.* http://somemirror.org/sources/ \n \
|
||||
svn://.*/.* http://somemirror.org/sources/ \n"
|
||||
|
||||
MIRRORS =+ "\
|
||||
ftp://.*/.* http://somemirror.org/sources/ \n \
|
||||
http://.*/.* http://somemirror.org/sources/ \n \
|
||||
https://.*/.* http://somemirror.org/sources/ \n"
|
||||
</literallayout>
|
||||
It is useful to note that BitBake supports
|
||||
cross-URLs.
|
||||
It is possible to mirror a Git repository on an HTTP
|
||||
server as a tarball.
|
||||
This is what the <filename>git://</filename> mapping in
|
||||
the previous example does.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Since network accesses are slow, Bitbake maintains a
|
||||
cache of files downloaded from the network.
|
||||
Any source files that are not local (i.e.
|
||||
downloaded from the Internet) are placed into the download
|
||||
directory, which is specified by the
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
File integrity is of key importance for reproducing builds.
|
||||
For non-local archive downloads, the fetcher code can verify
|
||||
SHA-256 and MD5 checksums to ensure the archives have been
|
||||
downloaded correctly.
|
||||
You can specify these checksums by using the
|
||||
<filename>SRC_URI</filename> variable with the appropriate
|
||||
varflags as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI[md5sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[sha256sum] = "<replaceable>value</replaceable>"
|
||||
</literallayout>
|
||||
You can also specify the checksums as parameters on the
|
||||
<filename>SRC_URI</filename> as shown below:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d"
|
||||
</literallayout>
|
||||
If multiple URIs exist, you can specify the checksums either
|
||||
directly as in the previous example, or you can name the URLs.
|
||||
The following syntax shows how you name the URIs:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://example.com/foobar.tar.bz2;name=foo"
|
||||
SRC_URI[foo.md5sum] = 4a8e0f237e961fd7785d19d07fdb994d
|
||||
</literallayout>
|
||||
After a file has been downloaded and has had its checksum checked,
|
||||
a ".done" stamp is placed in <filename>DL_DIR</filename>.
|
||||
BitBake uses this stamp during subsequent builds to avoid
|
||||
downloading or comparing a checksum for the file again.
|
||||
<note>
|
||||
It is assumed that local storage is safe from data corruption.
|
||||
If this were not the case, there would be bigger issues to worry about.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If
|
||||
<link linkend='var-BB_STRICT_CHECKSUM'><filename>BB_STRICT_CHECKSUM</filename></link>
|
||||
is set, any download without a checksum triggers an
|
||||
error message.
|
||||
The
|
||||
<link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
|
||||
variable can be used to make any attempted network access a fatal
|
||||
error, which is useful for checking that mirrors are complete
|
||||
as well as other things.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-the-unpack'>
|
||||
<title>The Unpack</title>
|
||||
|
||||
<para>
|
||||
The unpack process usually immediately follows the download.
|
||||
For all URLs except Git URLs, BitBake uses the common
|
||||
<filename>unpack</filename> method.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A number of parameters exist that you can specify within the
|
||||
URL to govern the behavior of the unpack stage:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>unpack:</emphasis>
|
||||
Controls whether the URL components are unpacked.
|
||||
If set to "1", which is the default, the components
|
||||
are unpacked.
|
||||
If set to "0", the unpack stage leaves the file alone.
|
||||
This parameter is useful when you want an archive to be
|
||||
copied in and not be unpacked.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>dos:</emphasis>
|
||||
Applies to <filename>.zip</filename> and
|
||||
<filename>.jar</filename> files and specifies whether to
|
||||
use DOS line ending conversion on text files.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>basepath:</emphasis>
|
||||
Instructs the unpack stage to strip the specified
|
||||
directories from the source path when unpacking.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>subdir:</emphasis>
|
||||
Unpacks the specific URL to the specified subdirectory
|
||||
within the root directory.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
The unpack call automatically decompresses and extracts files
|
||||
with ".Z", ".z", ".gz", ".xz", ".zip", ".jar", ".ipk", ".rpm".
|
||||
".srpm", ".deb" and ".bz2" extensions as well as various combinations
|
||||
of tarball extensions.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
As mentioned, the Git fetcher has its own unpack method that
|
||||
is optimized to work with Git trees.
|
||||
Basically, this method works by cloning the tree into the final
|
||||
directory.
|
||||
The process is completed using references so that there is
|
||||
only one central copy of the Git metadata needed.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-fetchers'>
|
||||
<title>Fetchers</title>
|
||||
|
||||
<para>
|
||||
As mentioned earlier, the URL prefix determines which
|
||||
fetcher submodule BitBake uses.
|
||||
Each submodule can support different URL parameters,
|
||||
which are described in the following sections.
|
||||
</para>
|
||||
|
||||
<section id='local-file-fetcher'>
|
||||
<title>Local file fetcher (<filename>file://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This submodule handles URLs that begin with
|
||||
<filename>file://</filename>.
|
||||
The filename you specify within the URL can be
|
||||
either an absolute or relative path to a file.
|
||||
If the filename is relative, the contents of the
|
||||
<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
|
||||
variable is used in the same way
|
||||
<filename>PATH</filename> is used to find executables.
|
||||
Failing that,
|
||||
<link linkend='var-FILESDIR'><filename>FILESDIR</filename></link>
|
||||
is used to find the appropriate relative file.
|
||||
<note>
|
||||
<filename>FILESDIR</filename> is deprecated and can
|
||||
be replaced with <filename>FILESPATH</filename>.
|
||||
Because <filename>FILESDIR</filename> is likely to be
|
||||
removed, you should not use this variable in any new code.
|
||||
</note>
|
||||
If the file cannot be found, it is assumed that it is available in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
by the time the <filename>download()</filename> method is called.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you specify a directory, the entire directory is
|
||||
unpacked.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of example URLs, the first relative and
|
||||
the second absolute:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "file://relativefile.patch"
|
||||
SRC_URI = "file:///Users/ich/very_important_software"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
<note>
|
||||
Because URL parameters are delimited by semi-colons, this can
|
||||
introduce ambiguity when parsing URLs that also contain semi-colons,
|
||||
for example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47"
|
||||
</literallayout>
|
||||
Such URLs should should be modified by replacing semi-colons with '&' characters:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47"
|
||||
</literallayout>
|
||||
In most cases this should work. Treating semi-colons and '&' in queries
|
||||
identically is recommended by the World Wide Web Consortium (W3C).
|
||||
Note that due to the nature of the URL, you may have to specify the name
|
||||
of the downloaded file as well:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47;downloadfilename=myfile.bz2"
|
||||
</literallayout>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='cvs-fetcher'>
|
||||
<title>CVS fetcher (<filename>(cvs://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This submodule handles checking out files from the
|
||||
CVS version control system.
|
||||
You can configure it using a number of different variables:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>FETCHCMD_cvs</filename>:</emphasis>
|
||||
The name of the executable to use when running
|
||||
the <filename>cvs</filename> command.
|
||||
This name is usually "cvs".
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>SRCDATE</filename>:</emphasis>
|
||||
The date to use when fetching the CVS source code.
|
||||
A special value of "now" causes the checkout to
|
||||
be updated on every build.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
|
||||
Specifies where a temporary checkout is saved.
|
||||
The location is often <filename>DL_DIR/cvs</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>CVS_PROXY_HOST</filename>:</emphasis>
|
||||
The name to use as a "proxy=" parameter to the
|
||||
<filename>cvs</filename> command.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>CVS_PROXY_PORT</filename>:</emphasis>
|
||||
The port number to use as a "proxyport=" parameter to
|
||||
the <filename>cvs</filename> command.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
As well as the standard username and password URL syntax,
|
||||
you can also configure the fetcher with various URL parameters:
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The supported parameters are as follows:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>"method":</emphasis>
|
||||
The protocol over which to communicate with the CVS server.
|
||||
By default, this protocol is "pserver".
|
||||
If "method" is set to "ext", BitBake examines the
|
||||
"rsh" parameter and sets <filename>CVS_RSH</filename>.
|
||||
You can use "dir" for local directories.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"module":</emphasis>
|
||||
Specifies the module to check out.
|
||||
You must supply this parameter.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"tag":</emphasis>
|
||||
Describes which CVS TAG should be used for
|
||||
the checkout.
|
||||
By default, the TAG is empty.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"date":</emphasis>
|
||||
Specifies a date.
|
||||
If no "date" is specified, the
|
||||
<link linkend='var-SRCDATE'><filename>SRCDATE</filename></link>
|
||||
of the configuration is used to checkout a specific date.
|
||||
The special value of "now" causes the checkout to be
|
||||
updated on every build.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"localdir":</emphasis>
|
||||
Used to rename the module.
|
||||
Effectively, you are renaming the output directory
|
||||
to which the module is unpacked.
|
||||
You are forcing the module into a special
|
||||
directory relative to
|
||||
<link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rsh"</emphasis>
|
||||
Used in conjunction with the "method" parameter.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"scmdata":</emphasis>
|
||||
Causes the CVS metadata to be maintained in the tarball
|
||||
the fetcher creates when set to "keep".
|
||||
The tarball is expanded into the work directory.
|
||||
By default, the CVS metadata is removed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"fullpath":</emphasis>
|
||||
Controls whether the resulting checkout is at the
|
||||
module level, which is the default, or is at deeper
|
||||
paths.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"norecurse":</emphasis>
|
||||
Causes the fetcher to only checkout the specified
|
||||
directory with no recurse into any subdirectories.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"port":</emphasis>
|
||||
The port to which the CVS server connects.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext"
|
||||
SRC_URI = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='svn-fetcher'>
|
||||
<title>Subversion (SVN) Fetcher (<filename>svn://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from the
|
||||
Subversion source control system.
|
||||
The executable used is specified by
|
||||
<filename>FETCHCMD_svn</filename>, which defaults
|
||||
to "svn".
|
||||
The fetcher's temporary working directory is set by
|
||||
<link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/svn</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The supported parameters are as follows:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>"module":</emphasis>
|
||||
The name of the svn module to checkout.
|
||||
You must provide this parameter.
|
||||
You can think of this parameter as the top-level
|
||||
directory of the repository data you want.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"protocol":</emphasis>
|
||||
The protocol to use, which defaults to "svn".
|
||||
Other options are "svn+ssh" and "rsh".
|
||||
For "rsh", the "rsh" parameter is also used.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rev":</emphasis>
|
||||
The revision of the source code to checkout.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"date":</emphasis>
|
||||
The date of the source code to checkout.
|
||||
Specific revisions are generally much safer to checkout
|
||||
rather than by date as they do not involve timezones
|
||||
(e.g. they are much more deterministic).
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"scmdata":</emphasis>
|
||||
Causes the “.svn” directories to be available during
|
||||
compile-time when set to "keep".
|
||||
By default, these directories are removed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"transportuser":</emphasis>
|
||||
When required, sets the username for the transport.
|
||||
By default, this parameter is empty.
|
||||
The transport username is different than the username
|
||||
used in the main URL, which is passed to the subversion
|
||||
command.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Following are two examples using svn:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "svn://svn.oe.handhelds.org/svn;module=vip;proto=http;rev=667"
|
||||
SRC_URI = "svn://svn.oe.handhelds.org/svn/;module=opie;proto=svn+ssh;date=20060126"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='git-fetcher'>
|
||||
<title>Git Fetcher (<filename>git://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from the Git
|
||||
source control system.
|
||||
The fetcher works by creating a bare clone of the
|
||||
remote into
|
||||
<link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/git2</filename>.
|
||||
This bare clone is then cloned into the work directory during the
|
||||
unpack stage when a specific tree is checked out.
|
||||
This is done using alternates and by reference to
|
||||
minimize the amount of duplicate data on the disk and
|
||||
make the unpack process fast.
|
||||
The executable used can be set with
|
||||
<filename>FETCHCMD_git</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
This fetcher supports the following parameters:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>"protocol":</emphasis>
|
||||
The protocol used to fetch the files.
|
||||
The default is "git" when a hostname is set.
|
||||
If a hostname is not set, the Git protocol is "file".
|
||||
You can also use "http", "https", "ssh" and "rsync".
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"nocheckout":</emphasis>
|
||||
Tells the fetcher to not checkout source code when
|
||||
unpacking when set to "1".
|
||||
Set this option for the URL where there is a custom
|
||||
routine to checkout code.
|
||||
The default is "0".
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rebaseable":</emphasis>
|
||||
Indicates that the upstream Git repository can be rebased.
|
||||
You should set this parameter to "1" if
|
||||
revisions can become detached from branches.
|
||||
In this case, the source mirror tarball is done per
|
||||
revision, which has a loss of efficiency.
|
||||
Rebasing the upstream Git repository could cause the
|
||||
current revision to disappear from the upstream repository.
|
||||
This option reminds the fetcher to preserve the local cache
|
||||
carefully for future use.
|
||||
The default value for this parameter is "0".
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"nobranch":</emphasis>
|
||||
Tells the fetcher to not check the SHA validation
|
||||
for the branch when set to "1".
|
||||
The default is "0".
|
||||
Set this option for the recipe that refers to
|
||||
the commit that is valid for a tag instead of
|
||||
the branch.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"bareclone":</emphasis>
|
||||
Tells the fetcher to clone a bare clone into the
|
||||
destination directory without checking out a working tree.
|
||||
Only the raw Git metadata is provided.
|
||||
This parameter implies the "nocheckout" parameter as well.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"branch":</emphasis>
|
||||
The branch(es) of the Git tree to clone.
|
||||
If unset, this is assumed to be "master".
|
||||
The number of branch parameters much match the number of
|
||||
name parameters.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rev":</emphasis>
|
||||
The revision to use for the checkout.
|
||||
The default is "master".
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"tag":</emphasis>
|
||||
Specifies a tag to use for the checkout.
|
||||
To correctly resolve tags, BitBake must access the
|
||||
network.
|
||||
For that reason, tags are often not used.
|
||||
As far as Git is concerned, the "tag" parameter behaves
|
||||
effectively the same as the "rev" parameter.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"subpath":</emphasis>
|
||||
Limits the checkout to a specific subpath of the tree.
|
||||
By default, the whole tree is checked out.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"destsuffix":</emphasis>
|
||||
The name of the path in which to place the checkout.
|
||||
By default, the path is <filename>git/</filename>.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Here are some example URLs:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
|
||||
SRC_URI = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='gitsm-fetcher'>
|
||||
<title>Git Submodule Fetcher (<filename>gitsm://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule inherits from the
|
||||
<link linkend='git-fetcher'>Git fetcher</link> and extends
|
||||
that fetcher's behavior by fetching a repository's submodules.
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
is passed to the Git fetcher as described in the
|
||||
"<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
|
||||
section.
|
||||
<note>
|
||||
<title>Notes and Warnings</title>
|
||||
<para>
|
||||
You must clean a recipe when switching between
|
||||
'<filename>git://</filename>' and
|
||||
'<filename>gitsm://</filename>' URLs.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The Git Submodules fetcher is not a complete fetcher
|
||||
implementation.
|
||||
The fetcher has known issues where it does not use the
|
||||
normal source mirroring infrastructure properly.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='clearcase-fetcher'>
|
||||
<title>ClearCase Fetcher (<filename>ccrc://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from a
|
||||
<ulink url='http://en.wikipedia.org/wiki/Rational_ClearCase'>ClearCase</ulink>
|
||||
repository.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To use this fetcher, make sure your recipe has proper
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
||||
<link linkend='var-PV'><filename>PV</filename></link> settings.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
</literallayout>
|
||||
The fetcher uses the <filename>rcleartool</filename> or
|
||||
<filename>cleartool</filename> remote client, depending on
|
||||
which one is available.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following are options for the <filename>SRC_URI</filename>
|
||||
statement:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>vob</filename></emphasis>:
|
||||
The name, which must include the
|
||||
prepending "/" character, of the ClearCase VOB.
|
||||
This option is required.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
||||
The module, which must include the
|
||||
prepending "/" character, in the selected VOB.
|
||||
<note>
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the <filename>load</filename> rule in
|
||||
the view config spec.
|
||||
As an example, consider the <filename>vob</filename> and
|
||||
<filename>module</filename> values from the
|
||||
<filename>SRC_URI</filename> statement at the start of this section.
|
||||
Combining those values results in the following:
|
||||
<literallayout class='monospaced'>
|
||||
load /example_vob/example_module
|
||||
</literallayout>
|
||||
</note>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
||||
The protocol, which can be either <filename>http</filename> or
|
||||
<filename>https</filename>.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, the fetcher creates a configuration specification.
|
||||
If you want this specification written to an area other than the default,
|
||||
use the <filename>CCASE_CUSTOM_CONFIG_SPEC</filename> variable
|
||||
in your recipe to define where the specification is written.
|
||||
<note>
|
||||
the <filename>SRCREV</filename> loses its functionality if you
|
||||
specify this variable.
|
||||
However, <filename>SRCREV</filename> is still used to label the
|
||||
archive after a fetch even though it does not define what is
|
||||
fetched.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of other behaviors worth mentioning:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
When using <filename>cleartool</filename>, the login of
|
||||
<filename>cleartool</filename> is handled by the system.
|
||||
The login require no special steps.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
In order to use <filename>rcleartool</filename> with authenticated
|
||||
users, an "rcleartool login" is necessary before using the fetcher.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='perforce-fetcher'>
|
||||
<title>Perforce Fetcher (<filename>p4://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from the
|
||||
<ulink url='https://www.perforce.com/'>Perforce</ulink>
|
||||
source control system.
|
||||
The executable used is specified by
|
||||
<filename>FETCHCMD_p4</filename>, which defaults
|
||||
to "p4".
|
||||
The fetcher's temporary working directory is set by
|
||||
<link linkend='var-P4DIR'><filename>P4DIR</filename></link>,
|
||||
which defaults to "DL_DIR/p4".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To use this fetcher, make sure your recipe has proper
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
||||
<link linkend='var-PV'><filename>PV</filename></link> values.
|
||||
The p4 executable is able to use the config file defined by your
|
||||
system's <filename>P4CONFIG</filename> environment variable in
|
||||
order to define the Perforce server URL and port, username, and
|
||||
password if you do not wish to keep those values in a recipe
|
||||
itself.
|
||||
If you choose not to use <filename>P4CONFIG</filename>,
|
||||
or to explicitly set variables that <filename>P4CONFIG</filename>
|
||||
can contain, you can specify the <filename>P4PORT</filename> value,
|
||||
which is the server's URL and port number, and you can
|
||||
specify a username and password directly in your recipe within
|
||||
<filename>SRC_URI</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is an example that relies on <filename>P4CONFIG</filename>
|
||||
to specify the server URL and port, username, and password, and
|
||||
fetches the Head Revision:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "p4://example-depot/main/source/..."
|
||||
SRCREV = "${AUTOREV}"
|
||||
PV = "p4-${SRCPV}"
|
||||
S = "${WORKDIR}/p4"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is an example that specifies the server URL and port,
|
||||
username, and password, and fetches a Revision based on a Label:
|
||||
<literallayout class='monospaced'>
|
||||
P4PORT = "tcp:p4server.example.net:1666"
|
||||
SRC_URI = "p4://user:passwd@example-depot/main/source/..."
|
||||
SRCREV = "release-1.0"
|
||||
PV = "p4-${SRCPV}"
|
||||
S = "${WORKDIR}/p4"
|
||||
</literallayout>
|
||||
<note>
|
||||
You should always set <filename>S</filename>
|
||||
to <filename>"${WORKDIR}/p4"</filename> in your recipe.
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='other-fetchers'>
|
||||
<title>Other Fetchers</title>
|
||||
|
||||
<para>
|
||||
Fetch submodules also exist for the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
Bazaar (<filename>bzr://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Trees using Git Annex (<filename>gitannex://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Secure FTP (<filename>sftp://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Secure Shell (<filename>ssh://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Repo (<filename>repo://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
OSC (<filename>osc://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Mercurial (<filename>hg://</filename>)
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
No documentation currently exists for these lesser used
|
||||
fetcher submodules.
|
||||
However, you might find the code helpful and readable.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='auto-revisions'>
|
||||
<title>Auto Revisions</title>
|
||||
|
||||
<para>
|
||||
We need to document <filename>AUTOREV</filename> and
|
||||
<filename>SRCREV_FORMAT</filename> here.
|
||||
</para>
|
||||
</section>
|
||||
</chapter>
|
||||
@@ -1,505 +0,0 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
|
||||
<appendix id='hello-world-example'>
|
||||
<title>Hello World Example</title>
|
||||
|
||||
<section id='bitbake-hello-world'>
|
||||
<title>BitBake Hello World</title>
|
||||
|
||||
<para>
|
||||
The simplest example commonly used to demonstrate any new
|
||||
programming language or tool is the
|
||||
"<ulink url="http://en.wikipedia.org/wiki/Hello_world_program">Hello World</ulink>"
|
||||
example.
|
||||
This appendix demonstrates, in tutorial form, Hello
|
||||
World within the context of BitBake.
|
||||
The tutorial describes how to create a new project
|
||||
and the applicable metadata files necessary to allow
|
||||
BitBake to build it.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='example-obtaining-bitbake'>
|
||||
<title>Obtaining BitBake</title>
|
||||
|
||||
<para>
|
||||
See the
|
||||
"<link linkend='obtaining-bitbake'>Obtaining BitBake</link>"
|
||||
section for information on how to obtain BitBake.
|
||||
Once you have the source code on your machine, the BitBake directory
|
||||
appears as follows:
|
||||
<literallayout class='monospaced'>
|
||||
$ ls -al
|
||||
total 100
|
||||
drwxrwxr-x. 9 wmat wmat 4096 Jan 31 13:44 .
|
||||
drwxrwxr-x. 3 wmat wmat 4096 Feb 4 10:45 ..
|
||||
-rw-rw-r--. 1 wmat wmat 365 Nov 26 04:55 AUTHORS
|
||||
drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 bin
|
||||
drwxrwxr-x. 4 wmat wmat 4096 Jan 31 13:44 build
|
||||
-rw-rw-r--. 1 wmat wmat 16501 Nov 26 04:55 ChangeLog
|
||||
drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 classes
|
||||
drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 conf
|
||||
drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 contrib
|
||||
-rw-rw-r--. 1 wmat wmat 17987 Nov 26 04:55 COPYING
|
||||
drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 doc
|
||||
-rw-rw-r--. 1 wmat wmat 69 Nov 26 04:55 .gitignore
|
||||
-rw-rw-r--. 1 wmat wmat 849 Nov 26 04:55 HEADER
|
||||
drwxrwxr-x. 5 wmat wmat 4096 Jan 31 13:44 lib
|
||||
-rw-rw-r--. 1 wmat wmat 195 Nov 26 04:55 MANIFEST.in
|
||||
-rw-rw-r--. 1 wmat wmat 2887 Nov 26 04:55 TODO
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
At this point, you should have BitBake cloned to
|
||||
a directory that matches the previous listing except for
|
||||
dates and user names.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='setting-up-the-bitbake-environment'>
|
||||
<title>Setting Up the BitBake Environment</title>
|
||||
|
||||
<para>
|
||||
First, you need to be sure that you can run BitBake.
|
||||
Set your working directory to where your local BitBake
|
||||
files are and run the following command:
|
||||
<literallayout class='monospaced'>
|
||||
$ ./bin/bitbake --version
|
||||
BitBake Build Tool Core version 1.23.0, bitbake version 1.23.0
|
||||
</literallayout>
|
||||
The console output tells you what version you are running.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The recommended method to run BitBake is from a directory of your
|
||||
choice.
|
||||
To be able to run BitBake from any directory, you need to add the
|
||||
executable binary to your binary to your shell's environment
|
||||
<filename>PATH</filename> variable.
|
||||
First, look at your current <filename>PATH</filename> variable
|
||||
by entering the following:
|
||||
<literallayout class='monospaced'>
|
||||
$ echo $PATH
|
||||
</literallayout>
|
||||
Next, add the directory location for the BitBake binary to the
|
||||
<filename>PATH</filename>.
|
||||
Here is an example that adds the
|
||||
<filename>/home/scott-lenovo/bitbake/bin</filename> directory
|
||||
to the front of the <filename>PATH</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
$ export PATH=/home/scott-lenovo/bitbake/bin:$PATH
|
||||
</literallayout>
|
||||
You should now be able to enter the <filename>bitbake</filename>
|
||||
command from the command line while working from any directory.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='the-hello-world-example'>
|
||||
<title>The Hello World Example</title>
|
||||
|
||||
<para>
|
||||
The overall goal of this exercise is to build a
|
||||
complete "Hello World" example utilizing task and layer
|
||||
concepts.
|
||||
Because this is how modern projects such as OpenEmbedded and
|
||||
the Yocto Project utilize BitBake, the example
|
||||
provides an excellent starting point for understanding
|
||||
BitBake.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To help you understand how to use BitBake to build targets,
|
||||
the example starts with nothing but the <filename>bitbake</filename>
|
||||
command, which causes BitBake to fail and report problems.
|
||||
The example progresses by adding pieces to the build to
|
||||
eventually conclude with a working, minimal "Hello World"
|
||||
example.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
While every attempt is made to explain what is happening during
|
||||
the example, the descriptions cannot cover everything.
|
||||
You can find further information throughout this manual.
|
||||
Also, you can actively participate in the
|
||||
<ulink url='http://lists.openembedded.org/mailman/listinfo/bitbake-devel'></ulink>
|
||||
discussion mailing list about the BitBake build tool.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
This example was inspired by and drew heavily from these sources:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
<ulink url="http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html">Mailing List post - The BitBake equivalent of "Hello, World!"</ulink>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<ulink url="http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</note>
|
||||
|
||||
<para>
|
||||
As stated earlier, the goal of this example
|
||||
is to eventually compile "Hello World".
|
||||
However, it is unknown what BitBake needs and what you have
|
||||
to provide in order to achieve that goal.
|
||||
Recall that BitBake utilizes three types of metadata files:
|
||||
<link linkend='configuration-files'>Configuration Files</link>,
|
||||
<link linkend='classes'>Classes</link>, and
|
||||
<link linkend='recipes'>Recipes</link>.
|
||||
But where do they go?
|
||||
How does BitBake find them?
|
||||
BitBake's error messaging helps you answer these types of questions
|
||||
and helps you better understand exactly what is going on.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following is the complete "Hello World" example.
|
||||
</para>
|
||||
|
||||
<orderedlist>
|
||||
<listitem><para><emphasis>Create a Project Directory:</emphasis>
|
||||
First, set up a directory for the "Hello World" project.
|
||||
Here is how you can do so in your home directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ mkdir ~/hello
|
||||
$ cd ~/hello
|
||||
</literallayout>
|
||||
This is the directory that BitBake will use to do all of
|
||||
its work.
|
||||
You can use this directory to keep all the metafiles needed
|
||||
by BitBake.
|
||||
Having a project directory is a good way to isolate your
|
||||
project.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
||||
At this point, you have nothing but a project directory.
|
||||
Run the <filename>bitbake</filename> command and see what
|
||||
it does:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake
|
||||
The BBPATH variable is not set and bitbake did not
|
||||
find a conf/bblayers.conf file in the expected location.
|
||||
Maybe you accidentally invoked bitbake from the wrong directory?
|
||||
DEBUG: Removed the following variables from the environment:
|
||||
GNOME_DESKTOP_SESSION_ID, XDG_CURRENT_DESKTOP,
|
||||
GNOME_KEYRING_CONTROL, DISPLAY, SSH_AGENT_PID, LANG, no_proxy,
|
||||
XDG_SESSION_PATH, XAUTHORITY, SESSION_MANAGER, SHLVL,
|
||||
MANDATORY_PATH, COMPIZ_CONFIG_PROFILE, WINDOWID, EDITOR,
|
||||
GPG_AGENT_INFO, SSH_AUTH_SOCK, GDMSESSION, GNOME_KEYRING_PID,
|
||||
XDG_SEAT_PATH, XDG_CONFIG_DIRS, LESSOPEN, DBUS_SESSION_BUS_ADDRESS,
|
||||
_, XDG_SESSION_COOKIE, DESKTOP_SESSION, LESSCLOSE, DEFAULTS_PATH,
|
||||
UBUNTU_MENUPROXY, OLDPWD, XDG_DATA_DIRS, COLORTERM, LS_COLORS
|
||||
</literallayout>
|
||||
The majority of this output is specific to environment variables
|
||||
that are not directly relevant to BitBake.
|
||||
However, the very first message regarding the
|
||||
<filename>BBPATH</filename> variable and the
|
||||
<filename>conf/bblayers.conf</filename> file
|
||||
is relevant.</para>
|
||||
<para>
|
||||
When you run BitBake, it begins looking for metadata files.
|
||||
The
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
variable is what tells BitBake where to look for those files.
|
||||
<filename>BBPATH</filename> is not set and you need to set it.
|
||||
Without <filename>BBPATH</filename>, Bitbake cannot
|
||||
find any configuration files (<filename>.conf</filename>)
|
||||
or recipe files (<filename>.bb</filename>) at all.
|
||||
BitBake also cannot find the <filename>bitbake.conf</filename>
|
||||
file.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Setting <filename>BBPATH</filename>:</emphasis>
|
||||
For this example, you can set <filename>BBPATH</filename>
|
||||
in the same manner that you set <filename>PATH</filename>
|
||||
earlier in the appendix.
|
||||
You should realize, though, that it is much more flexible to set the
|
||||
<filename>BBPATH</filename> variable up in a configuration
|
||||
file for each project.</para>
|
||||
<para>From your shell, enter the following commands to set and
|
||||
export the <filename>BBPATH</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>projectdirectory</replaceable>"
|
||||
$ export BBPATH
|
||||
</literallayout>
|
||||
Use your actual project directory in the command.
|
||||
BitBake uses that directory to find the metadata it needs for
|
||||
your project.
|
||||
<note>
|
||||
When specifying your project directory, do not use the
|
||||
tilde ("~") character as BitBake does not expand that character
|
||||
as the shell would.
|
||||
</note>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
||||
Now that you have <filename>BBPATH</filename> defined, run
|
||||
the <filename>bitbake</filename> command again:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake
|
||||
ERROR: Traceback (most recent call last):
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
|
||||
return func(fn, *args)
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 173, in parse_config_file
|
||||
return bb.parse.handle(fn, data, include)
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 99, in handle
|
||||
return h['handle'](fn, data, include)
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 120, in handle
|
||||
abs_fn = resolve_file(fn, data)
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 117, in resolve_file
|
||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
||||
IOError: file conf/bitbake.conf not found in /home/scott-lenovo/hello
|
||||
|
||||
ERROR: Unable to parse conf/bitbake.conf: file conf/bitbake.conf not found in /home/scott-lenovo/hello
|
||||
</literallayout>
|
||||
This sample output shows that BitBake could not find the
|
||||
<filename>conf/bitbake.conf</filename> file in the project
|
||||
directory.
|
||||
This file is the first thing BitBake must find in order
|
||||
to build a target.
|
||||
And, since the project directory for this example is
|
||||
empty, you need to provide a <filename>conf/bitbake.conf</filename>
|
||||
file.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Creating <filename>conf/bitbake.conf</filename>:</emphasis>
|
||||
The <filename>conf/bitbake.conf</filename> includes a number of
|
||||
configuration variables BitBake uses for metadata and recipe
|
||||
files.
|
||||
For this example, you need to create the file in your project directory
|
||||
and define some key BitBake variables.
|
||||
For more information on the <filename>bitbake.conf</filename>,
|
||||
see
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
</para>
|
||||
<para>Use the following commands to create the <filename>conf</filename>
|
||||
directory in the project directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ mkdir conf
|
||||
</literallayout>
|
||||
From within the <filename>conf</filename> directory, use
|
||||
some editor to create the <filename>bitbake.conf</filename>
|
||||
so that it contains the following:
|
||||
<literallayout class='monospaced'>
|
||||
TMPDIR = "${<link linkend='var-TOPDIR'>TOPDIR</link>}/tmp"
|
||||
<link linkend='var-CACHE'>CACHE</link> = "${TMPDIR}/cache"
|
||||
<link linkend='var-STAMP'>STAMP</link> = "${TMPDIR}/stamps"
|
||||
<link linkend='var-T'>T</link> = "${TMPDIR}/work"
|
||||
<link linkend='var-B'>B</link> = "${TMPDIR}"
|
||||
</literallayout>
|
||||
The <filename>TMPDIR</filename> variable establishes a directory
|
||||
that BitBake uses for build output and intermediate files (other
|
||||
than the cached information used by the
|
||||
<link linkend='setscene'>Setscene</link> process.
|
||||
Here, the <filename>TMPDIR</filename> directory is set to
|
||||
<filename>hello/tmp</filename>.
|
||||
<note><title>Tip</title>
|
||||
You can always safely delete the <filename>tmp</filename>
|
||||
directory in order to rebuild a BitBake target.
|
||||
The build process creates the directory for you
|
||||
when you run BitBake.
|
||||
</note></para>
|
||||
<para>For information about each of the other variables defined in this
|
||||
example, click on the links to take you to the definitions in
|
||||
the glossary.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
||||
After making sure that the <filename>conf/bitbake.conf</filename>
|
||||
file exists, you can run the <filename>bitbake</filename>
|
||||
command again:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake
|
||||
ERROR: Traceback (most recent call last):
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
|
||||
return func(fn, *args)
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 177, in _inherit
|
||||
bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 92, in inherit
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 100, in include
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
|
||||
|
||||
ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
|
||||
</literallayout>
|
||||
In the sample output, BitBake could not find the
|
||||
<filename>classes/base.bbclass</filename> file.
|
||||
You need to create that file next.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Creating <filename>classes/base.bbclass</filename>:</emphasis>
|
||||
BitBake uses class files to provide common code and functionality.
|
||||
The minimally required class for BitBake is the
|
||||
<filename>classes/base.bbclass</filename> file.
|
||||
The <filename>base</filename> class is implicitly inherited by
|
||||
every recipe.
|
||||
BitBake looks for the class in the <filename>classes</filename>
|
||||
directory of the project (i.e <filename>hello/classes</filename>
|
||||
in this example).
|
||||
</para>
|
||||
<para>Create the <filename>classes</filename> directory as follows:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd $HOME/hello
|
||||
$ mkdir classes
|
||||
</literallayout>
|
||||
Move to the <filename>classes</filename> directory and then
|
||||
create the <filename>base.bbclass</filename> file by inserting
|
||||
this single line:
|
||||
<literallayout class='monospaced'>
|
||||
addtask build
|
||||
</literallayout>
|
||||
The minimal task that BitBake runs is the
|
||||
<filename>do_build</filename> task.
|
||||
This is all the example needs in order to build the project.
|
||||
Of course, the <filename>base.bbclass</filename> can have much
|
||||
more depending on which build environments BitBake is
|
||||
supporting.
|
||||
For more information on the <filename>base.bbclass</filename> file,
|
||||
you can look at
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
||||
After making sure that the <filename>classes/base.bbclass</filename>
|
||||
file exists, you can run the <filename>bitbake</filename>
|
||||
command again:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake
|
||||
Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.
|
||||
</literallayout>
|
||||
BitBake is finally reporting no errors.
|
||||
However, you can see that it really does not have anything
|
||||
to do.
|
||||
You need to create a recipe that gives BitBake something to do.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Creating a Layer:</emphasis>
|
||||
While it is not really necessary for such a small example,
|
||||
it is good practice to create a layer in which to keep your
|
||||
code separate from the general metadata used by BitBake.
|
||||
Thus, this example creates and uses a layer called "mylayer".
|
||||
<note>
|
||||
You can find additional information on adding a layer at
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
</note>
|
||||
</para>
|
||||
<para>Minimally, you need a recipe file and a layer configuration
|
||||
file in your layer.
|
||||
The configuration file needs to be in the <filename>conf</filename>
|
||||
directory inside the layer.
|
||||
Use these commands to set up the layer and the <filename>conf</filename>
|
||||
directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd $HOME
|
||||
$ mkdir mylayer
|
||||
$ cd mylayer
|
||||
$ mkdir conf
|
||||
</literallayout>
|
||||
Move to the <filename>conf</filename> directory and create a
|
||||
<filename>layer.conf</filename> file that has the following:
|
||||
<literallayout class='monospaced'>
|
||||
BBPATH .= ":${<link linkend='var-LAYERDIR'>LAYERDIR</link>}"
|
||||
|
||||
<link linkend='var-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
|
||||
|
||||
<link linkend='var-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
|
||||
<link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR_RE}/"
|
||||
</literallayout>
|
||||
For information on these variables, click the links
|
||||
to go to the definitions in the glossary.</para>
|
||||
<para>You need to create the recipe file next.
|
||||
Inside your layer at the top-level, use an editor and create
|
||||
a recipe file named <filename>printhello.bb</filename> that
|
||||
has the following:
|
||||
<literallayout class='monospaced'>
|
||||
<link linkend='var-DESCRIPTION'>DESCRIPTION</link> = "Prints Hello World"
|
||||
<link linkend='var-PN'>PN</link> = 'printhello'
|
||||
<link linkend='var-PV'>PV</link> = '1'
|
||||
|
||||
python do_build() {
|
||||
bb.plain("********************");
|
||||
bb.plain("* *");
|
||||
bb.plain("* Hello, World! *");
|
||||
bb.plain("* *");
|
||||
bb.plain("********************");
|
||||
}
|
||||
</literallayout>
|
||||
The recipe file simply provides a description of the
|
||||
recipe, the name, version, and the <filename>do_build</filename>
|
||||
task, which prints out "Hello World" to the console.
|
||||
For more information on these variables, follow the links
|
||||
to the glossary.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake With a Target:</emphasis>
|
||||
Now that a BitBake target exists, run the command and provide
|
||||
that target:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd $HOME/hello
|
||||
$ bitbake printhello
|
||||
ERROR: no recipe files to build, check your BBPATH and BBFILES?
|
||||
|
||||
Summary: There was 1 ERROR message shown, returning a non-zero exit code.
|
||||
</literallayout>
|
||||
We have created the layer with the recipe and the layer
|
||||
configuration file but it still seems that BitBake cannot
|
||||
find the recipe.
|
||||
BitBake needs a <filename>conf/bblayers.conf</filename> that
|
||||
lists the layers for the project.
|
||||
Without this file, BitBake cannot find the recipe.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Creating <filename>conf/bblayers.conf</filename>:</emphasis>
|
||||
BitBake uses the <filename>conf/bblayers.conf</filename> file
|
||||
to locate layers needed for the project.
|
||||
This file must reside in the <filename>conf</filename> directory
|
||||
of the project (i.e. <filename>hello/conf</filename> for this
|
||||
example).</para>
|
||||
<para>Set your working directory to the <filename>hello/conf</filename>
|
||||
directory and then create the <filename>bblayers.conf</filename>
|
||||
file so that it contains the following:
|
||||
<literallayout class='monospaced'>
|
||||
BBLAYERS ?= " \
|
||||
/home/<you>/mylayer \
|
||||
"
|
||||
</literallayout>
|
||||
You need to provide your own information for
|
||||
<filename>you</filename> in the file.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake With a Target:</emphasis>
|
||||
Now that you have supplied the <filename>bblayers.conf</filename>
|
||||
file, run the <filename>bitbake</filename> command and provide
|
||||
the target:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake printhello
|
||||
Parsing recipes: 100% |##################################################################################|
|
||||
Time: 00:00:00
|
||||
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
|
||||
NOTE: Resolving any missing task queue dependencies
|
||||
NOTE: Preparing RunQueue
|
||||
NOTE: Executing RunQueue Tasks
|
||||
********************
|
||||
* *
|
||||
* Hello, World! *
|
||||
* *
|
||||
********************
|
||||
NOTE: Tasks Summary: Attempted 1 tasks of which 0 didn't need to be rerun and all succeeded.
|
||||
</literallayout>
|
||||
BitBake finds the <filename>printhello</filename> recipe and
|
||||
successfully runs the task.
|
||||
<note>
|
||||
After the first execution, re-running
|
||||
<filename>bitbake printhello</filename> again will not
|
||||
result in a BitBake run that prints the same console
|
||||
output.
|
||||
The reason for this is that the first time the
|
||||
<filename>printhello.bb</filename> recipe's
|
||||
<filename>do_build</filename> task executes
|
||||
successfully, BitBake writes a stamp file for the task.
|
||||
Thus, the next time you attempt to run the task
|
||||
using that same <filename>bitbake</filename> command,
|
||||
BitBake notices the stamp and therefore determines
|
||||
that the task does not need to be re-run.
|
||||
If you delete the <filename>tmp</filename> directory
|
||||
or run <filename>bitbake -c clean printhello</filename>
|
||||
and then re-run the build, the "Hello, World!" message will
|
||||
be printed again.
|
||||
</note>
|
||||
</para></listitem>
|
||||
</orderedlist>
|
||||
</section>
|
||||
</appendix>
|
||||
@@ -1,710 +0,0 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
|
||||
<chapter id="bitbake-user-manual-intro">
|
||||
<title>Overview</title>
|
||||
|
||||
<para>
|
||||
Welcome to the BitBake User Manual.
|
||||
This manual provides information on the BitBake tool.
|
||||
The information attempts to be as independent as possible regarding
|
||||
systems that use BitBake, such as OpenEmbedded and the
|
||||
Yocto Project.
|
||||
In some cases, scenarios or examples within the context of
|
||||
a build system are used in the manual to help with understanding.
|
||||
For these cases, the manual clearly states the context.
|
||||
</para>
|
||||
|
||||
<section id="intro">
|
||||
<title>Introduction</title>
|
||||
|
||||
<para>
|
||||
Fundamentally, BitBake is a generic task execution
|
||||
engine that allows shell and Python tasks to be run
|
||||
efficiently and in parallel while working within
|
||||
complex inter-task dependency constraints.
|
||||
One of BitBake's main users, OpenEmbedded, takes this core
|
||||
and builds embedded Linux software stacks using
|
||||
a task-oriented approach.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Conceptually, BitBake is similar to GNU Make in
|
||||
some regards but has significant differences:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
BitBake executes tasks according to provided
|
||||
metadata that builds up the tasks.
|
||||
Metadata is stored in recipe (<filename>.bb</filename>)
|
||||
and related recipe "append" (<filename>.bbappend</filename>)
|
||||
files, configuration (<filename>.conf</filename>) and
|
||||
underlying include (<filename>.inc</filename>) files, and
|
||||
in class (<filename>.bbclass</filename>) files.
|
||||
The metadata provides
|
||||
BitBake with instructions on what tasks to run and
|
||||
the dependencies between those tasks.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a fetcher library for obtaining source
|
||||
code from various places such as local files, source control
|
||||
systems, or websites.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The instructions for each unit to be built (e.g. a piece
|
||||
of software) are known as "recipe" files and
|
||||
contain all the information about the unit
|
||||
(dependencies, source file locations, checksums, description
|
||||
and so on).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a client/server abstraction and can
|
||||
be used from a command line or used as a service over
|
||||
XML-RPC and has several different user interfaces.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id="history-and-goals">
|
||||
<title>History and Goals</title>
|
||||
|
||||
<para>
|
||||
BitBake was originally a part of the OpenEmbedded project.
|
||||
It was inspired by the Portage package management system
|
||||
used by the Gentoo Linux distribution.
|
||||
On December 7, 2004, OpenEmbedded project team member
|
||||
Chris Larson split the project into two distinct pieces:
|
||||
<itemizedlist>
|
||||
<listitem><para>BitBake, a generic task executor</para></listitem>
|
||||
<listitem><para>OpenEmbedded, a metadata set utilized by
|
||||
BitBake</para></listitem>
|
||||
</itemizedlist>
|
||||
Today, BitBake is the primary basis of the
|
||||
<ulink url="http://www.openembedded.org/">OpenEmbedded</ulink>
|
||||
project, which is being used to build and maintain Linux
|
||||
distributions such as the
|
||||
<ulink url='http://www.angstrom-distribution.org/'>Angstrom Distribution</ulink>,
|
||||
and which is also being used as the build tool for Linux projects
|
||||
such as the
|
||||
<ulink url='http://www.yoctoproject.org'>Yocto Project</ulink>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Prior to BitBake, no other build tool adequately met the needs of
|
||||
an aspiring embedded Linux distribution.
|
||||
All of the build systems used by traditional desktop Linux
|
||||
distributions lacked important functionality, and none of the
|
||||
ad hoc Buildroot-based systems, prevalent in the
|
||||
embedded space, were scalable or maintainable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some important original goals for BitBake were:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
Handle cross-compilation.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Handle inter-package dependencies (build time on
|
||||
target architecture, build time on native
|
||||
architecture, and runtime).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Support running any number of tasks within a given
|
||||
package, including, but not limited to, fetching
|
||||
upstream sources, unpacking them, patching them,
|
||||
configuring them, and so forth.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Be Linux distribution agnostic for both build and
|
||||
target systems.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Be architecture agnostic.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Support multiple build and target operating systems
|
||||
(e.g. Cygwin, the BSDs, and so forth).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Be self contained, rather than tightly
|
||||
integrated into the build machine's root
|
||||
filesystem.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Handle conditional metadata on the target architecture,
|
||||
operating system, distribution, and machine.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Be easy to use the tools to supply local metadata and packages
|
||||
against which to operate.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Be easy to use BitBake to collaborate between multiple
|
||||
projects for their builds.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Provide an inheritance mechanism to share
|
||||
common metadata between many packages.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Over time it became apparent that some further requirements
|
||||
were necessary:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
Handle variants of a base recipe (e.g. native, sdk,
|
||||
and multilib).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Split metadata into layers and allow layers
|
||||
to enhance or override other layers.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Allow representation of a given set of input variables
|
||||
to a task as a checksum.
|
||||
Based on that checksum, allow acceleration of builds
|
||||
with prebuilt components.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
BitBake satisfies all the original requirements and many more
|
||||
with extensions being made to the basic functionality to
|
||||
reflect the additional requirements.
|
||||
Flexibility and power have always been the priorities.
|
||||
BitBake is highly extensible and supports embedded Python code and
|
||||
execution of any arbitrary tasks.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id="Concepts">
|
||||
<title>Concepts</title>
|
||||
|
||||
<para>
|
||||
BitBake is a program written in the Python language.
|
||||
At the highest level, BitBake interprets metadata, decides
|
||||
what tasks are required to run, and executes those tasks.
|
||||
Similar to GNU Make, BitBake controls how software is
|
||||
built.
|
||||
GNU Make achieves its control through "makefiles", while
|
||||
BitBake uses "recipes".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake extends the capabilities of a simple
|
||||
tool like GNU Make by allowing for the definition of much more
|
||||
complex tasks, such as assembling entire embedded Linux
|
||||
distributions.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The remainder of this section introduces several concepts
|
||||
that should be understood in order to better leverage
|
||||
the power of BitBake.
|
||||
</para>
|
||||
|
||||
<section id='recipes'>
|
||||
<title>Recipes</title>
|
||||
|
||||
<para>
|
||||
BitBake Recipes, which are denoted by the file extension
|
||||
<filename>.bb</filename>, are the most basic metadata files.
|
||||
These recipe files provide BitBake with the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>Descriptive information about the
|
||||
package (author, homepage, license, and so on)</para></listitem>
|
||||
<listitem><para>The version of the recipe</para></listitem>
|
||||
<listitem><para>Existing dependencies (both build
|
||||
and runtime dependencies)</para></listitem>
|
||||
<listitem><para>Where the source code resides and
|
||||
how to fetch it</para></listitem>
|
||||
<listitem><para>Whether the source code requires
|
||||
any patches, where to find them, and how to apply
|
||||
them</para></listitem>
|
||||
<listitem><para>How to configure and compile the
|
||||
source code</para></listitem>
|
||||
<listitem><para>Where on the target machine to install the
|
||||
package or packages created</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Within the context of BitBake, or any project utilizing BitBake
|
||||
as its build system, files with the <filename>.bb</filename>
|
||||
extension are referred to as recipes.
|
||||
<note>
|
||||
The term "package" is also commonly used to describe recipes.
|
||||
However, since the same word is used to describe packaged
|
||||
output from a project, it is best to maintain a single
|
||||
descriptive term - "recipes".
|
||||
Put another way, a single "recipe" file is quite capable
|
||||
of generating a number of related but separately installable
|
||||
"packages".
|
||||
In fact, that ability is fairly common.
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='configuration-files'>
|
||||
<title>Configuration Files</title>
|
||||
|
||||
<para>
|
||||
Configuration files, which are denoted by the
|
||||
<filename>.conf</filename> extension, define
|
||||
various configuration variables that govern the project's build
|
||||
process.
|
||||
These files fall into several areas that define
|
||||
machine configuration options, distribution configuration
|
||||
options, compiler tuning options, general common
|
||||
configuration options, and user configuration options.
|
||||
The main configuration file is the sample
|
||||
<filename>bitbake.conf</filename> file, which is
|
||||
located within the BitBake source tree
|
||||
<filename>conf</filename> directory.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='classes'>
|
||||
<title>Classes</title>
|
||||
|
||||
<para>
|
||||
Class files, which are denoted by the
|
||||
<filename>.bbclass</filename> extension, contain
|
||||
information that is useful to share between metadata files.
|
||||
The BitBake source tree currently comes with one class metadata file
|
||||
called <filename>base.bbclass</filename>.
|
||||
You can find this file in the
|
||||
<filename>classes</filename> directory.
|
||||
The <filename>base.bbclass</filename> class files is special since it
|
||||
is always included automatically for all recipes
|
||||
and classes.
|
||||
This class contains definitions for standard basic tasks such
|
||||
as fetching, unpacking, configuring (empty by default),
|
||||
compiling (runs any Makefile present), installing (empty by
|
||||
default) and packaging (empty by default).
|
||||
These tasks are often overridden or extended by other classes
|
||||
added during the project development process.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='layers'>
|
||||
<title>Layers</title>
|
||||
|
||||
<para>
|
||||
Layers allow you to isolate different types of
|
||||
customizations from each other.
|
||||
While you might find it tempting to keep everything in one layer
|
||||
when working on a single project, the more modular you organize
|
||||
your metadata, the easier it is to cope with future changes.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To illustrate how you can use layers to keep things modular,
|
||||
consider customizations you might make to support a specific target machine.
|
||||
These types of customizations typically reside in a special layer,
|
||||
rather than a general layer, called a Board Support Package (BSP)
|
||||
Layer.
|
||||
Furthermore, the machine customizations should be isolated from
|
||||
recipes and metadata that support a new GUI environment, for
|
||||
example.
|
||||
This situation gives you a couple of layers: one for the machine
|
||||
configurations and one for the GUI environment.
|
||||
It is important to understand, however, that the BSP layer can still
|
||||
make machine-specific additions to recipes within
|
||||
the GUI environment layer without polluting the GUI layer itself
|
||||
with those machine-specific changes.
|
||||
You can accomplish this through a recipe that is a BitBake append
|
||||
(<filename>.bbappend</filename>) file.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='append-bbappend-files'>
|
||||
<title>Append Files</title>
|
||||
|
||||
<para>
|
||||
Append files, which are files that have the
|
||||
<filename>.bbappend</filename> file extension, extend or
|
||||
override information in an existing recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake expects every append file to have a corresponding recipe file.
|
||||
Furthermore, the append file and corresponding recipe file
|
||||
must use the same root filename.
|
||||
The filenames can differ only in the file type suffix used
|
||||
(e.g. <filename>formfactor_0.0.bb</filename> and
|
||||
<filename>formfactor_0.0.bbappend</filename>).
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Information in append files extends or
|
||||
overrides the information in the underlying,
|
||||
similarly-named recipe files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When you name an append file, you can use the
|
||||
wildcard character (%) to allow for matching recipe names.
|
||||
For example, suppose you have an append file named
|
||||
as follows:
|
||||
<literallayout class='monospaced'>
|
||||
busybox_1.21.%.bbappend
|
||||
</literallayout>
|
||||
That append file would match any <filename>busybox_1.21.x.bb</filename>
|
||||
version of the recipe.
|
||||
So, the append file would match the following recipe names:
|
||||
<literallayout class='monospaced'>
|
||||
busybox_1.21.1.bb
|
||||
busybox_1.21.2.bb
|
||||
busybox_1.21.3.bb
|
||||
</literallayout>
|
||||
If the <filename>busybox</filename> recipe was updated to
|
||||
<filename>busybox_1.3.0.bb</filename>, the append name would not
|
||||
match.
|
||||
However, if you named the append file
|
||||
<filename>busybox_1.%.bbappend</filename>, then you would have a match.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In the most general case, you could name the append file something as
|
||||
simple as <filename>busybox_%.bbappend</filename> to be entirely
|
||||
version independent.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='obtaining-bitbake'>
|
||||
<title>Obtaining BitBake</title>
|
||||
|
||||
<para>
|
||||
You can obtain BitBake several different ways:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Cloning BitBake:</emphasis>
|
||||
Using Git to clone the BitBake source code repository
|
||||
is the recommended method for obtaining BitBake.
|
||||
Cloning the repository makes it easy to get bug fixes
|
||||
and have access to stable branches and the master
|
||||
branch.
|
||||
Once you have cloned BitBake, you should use
|
||||
the latest stable
|
||||
branch for development since the master branch is for
|
||||
BitBake development and might contain less stable changes.
|
||||
</para>
|
||||
<para>You usually need a version of BitBake
|
||||
that matches the metadata you are using.
|
||||
The metadata is generally backwards compatible but
|
||||
not forward compatible.</para>
|
||||
<para>Here is an example that clones the BitBake repository:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.openembedded.org/bitbake
|
||||
</literallayout>
|
||||
This command clones the BitBake Git repository into a
|
||||
directory called <filename>bitbake</filename>.
|
||||
Alternatively, you can
|
||||
designate a directory after the
|
||||
<filename>git clone</filename> command
|
||||
if you want to call the new directory something
|
||||
other than <filename>bitbake</filename>.
|
||||
Here is an example that names the directory
|
||||
<filename>bbdev</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.openembedded.org/bitbake bbdev
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para><emphasis>Installation using your Distribution
|
||||
Package Management System:</emphasis>
|
||||
This method is not
|
||||
recommended because the BitBake version that is
|
||||
provided by your distribution, in most cases,
|
||||
is several
|
||||
releases behind a snapshot of the BitBake repository.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Taking a snapshot of BitBake:</emphasis>
|
||||
Downloading a snapshot of BitBake from the
|
||||
source code repository gives you access to a known
|
||||
branch or release of BitBake.
|
||||
<note>
|
||||
Cloning the Git repository, as described earlier,
|
||||
is the preferred method for getting BitBake.
|
||||
Cloning the repository makes it easier to update as
|
||||
patches are added to the stable branches.
|
||||
</note></para>
|
||||
<para>The following example downloads a snapshot of
|
||||
BitBake version 1.17.0:
|
||||
<literallayout class='monospaced'>
|
||||
$ wget http://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz
|
||||
$ tar zxpvf bitbake-1.17.0.tar.gz
|
||||
</literallayout>
|
||||
After extraction of the tarball using the tar utility,
|
||||
you have a directory entitled
|
||||
<filename>bitbake-1.17.0</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Using the BitBake that Comes With Your
|
||||
Build Checkout:</emphasis>
|
||||
A final possibility for getting a copy of BitBake is that it
|
||||
already comes with your checkout of a larger Bitbake-based build
|
||||
system, such as Poky or Yocto Project.
|
||||
Rather than manually checking out individual layers and
|
||||
gluing them together yourself, you can check
|
||||
out an entire build system.
|
||||
The checkout will already include a version of BitBake that
|
||||
has been thoroughly tested for compatibility with the other
|
||||
components.
|
||||
For information on how to check out a particular BitBake-based
|
||||
build system, consult that build system's supporting documentation.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id="bitbake-user-manual-command">
|
||||
<title>The BitBake Command</title>
|
||||
|
||||
<para>
|
||||
The <filename>bitbake</filename> command is the primary interface
|
||||
to the BitBake tool.
|
||||
This section presents the BitBake command syntax and provides
|
||||
several execution examples.
|
||||
</para>
|
||||
|
||||
<section id='usage-and-syntax'>
|
||||
<title>Usage and syntax</title>
|
||||
|
||||
<para>
|
||||
Following is the usage and syntax for BitBake:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -h
|
||||
Usage: bitbake [options] [recipename/target recipe:do_task ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.
|
||||
|
||||
Options:
|
||||
--version show program's version number and exit
|
||||
-h, --help show this help message and exit
|
||||
-b BUILDFILE, --buildfile=BUILDFILE
|
||||
Execute tasks from a specific .bb recipe directly.
|
||||
WARNING: Does not handle any dependencies from other
|
||||
recipes.
|
||||
-k, --continue Continue as much as possible after an error. While the
|
||||
target that failed and anything depending on it cannot
|
||||
be built, as much as possible will be built before
|
||||
stopping.
|
||||
-a, --tryaltconfigs Continue with builds by trying to use alternative
|
||||
providers where possible.
|
||||
-f, --force Force the specified targets/task to run (invalidating
|
||||
any existing stamp file).
|
||||
-c CMD, --cmd=CMD Specify the task to execute. The exact options
|
||||
available depend on the metadata. Some examples might
|
||||
be 'compile' or 'populate_sysroot' or 'listtasks' may
|
||||
give a list of the tasks available.
|
||||
-C INVALIDATE_STAMP, --clear-stamp=INVALIDATE_STAMP
|
||||
Invalidate the stamp for the specified task such as
|
||||
'compile' and then run the default task for the
|
||||
specified target(s).
|
||||
-r PREFILE, --read=PREFILE
|
||||
Read the specified file before bitbake.conf.
|
||||
-R POSTFILE, --postread=POSTFILE
|
||||
Read the specified file after bitbake.conf.
|
||||
-v, --verbose Output more log message data to the terminal.
|
||||
-D, --debug Increase the debug level. You can specify this more
|
||||
than once.
|
||||
-n, --dry-run Don't execute, just go through the motions.
|
||||
-S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER
|
||||
Dump out the signature construction information, with
|
||||
no task execution. The SIGNATURE_HANDLER parameter is
|
||||
passed to the handler. Two common values are none and
|
||||
printdiff but the handler may define more/less. none
|
||||
means only dump the signature, printdiff means compare
|
||||
the dumped signature with the cached one.
|
||||
-p, --parse-only Quit after parsing the BB recipes.
|
||||
-s, --show-versions Show current and preferred versions of all recipes.
|
||||
-e, --environment Show the global or per-recipe environment complete
|
||||
with information about where variables were
|
||||
set/changed.
|
||||
-g, --graphviz Save dependency tree information for the specified
|
||||
targets in the dot syntax.
|
||||
-I EXTRA_ASSUME_PROVIDED, --ignore-deps=EXTRA_ASSUME_PROVIDED
|
||||
Assume these dependencies don't exist and are already
|
||||
provided (equivalent to ASSUME_PROVIDED). Useful to
|
||||
make dependency graphs more appealing
|
||||
-l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
|
||||
Show debug logging for the specified logging domains
|
||||
-P, --profile Profile the command and save reports.
|
||||
-u UI, --ui=UI The user interface to use (depexp, goggle, hob, knotty
|
||||
or ncurses - default knotty).
|
||||
-t SERVERTYPE, --servertype=SERVERTYPE
|
||||
Choose which server type to use (process or xmlrpc -
|
||||
default process).
|
||||
--token=XMLRPCTOKEN Specify the connection token to be used when
|
||||
connecting to a remote server.
|
||||
--revisions-changed Set the exit code depending on whether upstream
|
||||
floating revisions have changed or not.
|
||||
--server-only Run bitbake without a UI, only starting a server
|
||||
(cooker) process.
|
||||
-B BIND, --bind=BIND The name/address for the bitbake server to bind to.
|
||||
--no-setscene Do not run any setscene tasks. sstate will be ignored
|
||||
and everything needed, built.
|
||||
--remote-server=REMOTE_SERVER
|
||||
Connect to the specified server.
|
||||
-m, --kill-server Terminate the remote server.
|
||||
--observe-only Connect to a server as an observing-only client.
|
||||
--status-only Check the status of the remote bitbake server.
|
||||
-w WRITEEVENTLOG, --write-log=WRITEEVENTLOG
|
||||
Writes the event log of the build to a bitbake event
|
||||
json file. Use '' (empty string) to assign the name
|
||||
automatically.
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bitbake-examples'>
|
||||
<title>Examples</title>
|
||||
|
||||
<para>
|
||||
This section presents some examples showing how to use BitBake.
|
||||
</para>
|
||||
|
||||
<section id='example-executing-a-task-against-a-single-recipe'>
|
||||
<title>Executing a Task Against a Single Recipe</title>
|
||||
|
||||
<para>
|
||||
Executing tasks for a single recipe file is relatively simple.
|
||||
You specify the file in question, and BitBake parses
|
||||
it and executes the specified task.
|
||||
If you do not specify a task, BitBake executes the default
|
||||
task, which is "build”.
|
||||
BitBake obeys inter-task dependencies when doing
|
||||
so.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The following command runs the build task, which is
|
||||
the default task, on the <filename>foo_1.0.bb</filename>
|
||||
recipe file:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -b foo_1.0.bb
|
||||
</literallayout>
|
||||
The following command runs the clean task on the
|
||||
<filename>foo.bb</filename> recipe file:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -b foo.bb -c clean
|
||||
</literallayout>
|
||||
<note>
|
||||
The "-b" option explicitly does not handle recipe
|
||||
dependencies.
|
||||
Other than for debugging purposes, it is instead
|
||||
recommended that you use the syntax presented in the
|
||||
next section.
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='executing-tasks-against-a-set-of-recipe-files'>
|
||||
<title>Executing Tasks Against a Set of Recipe Files</title>
|
||||
|
||||
<para>
|
||||
There are a number of additional complexities introduced
|
||||
when one wants to manage multiple <filename>.bb</filename>
|
||||
files.
|
||||
Clearly there needs to be a way to tell BitBake what
|
||||
files are available and, of those, which you
|
||||
want to execute.
|
||||
There also needs to be a way for each recipe
|
||||
to express its dependencies, both for build-time and
|
||||
runtime.
|
||||
There must be a way for you to express recipe preferences
|
||||
when multiple recipes provide the same functionality, or when
|
||||
there are multiple versions of a recipe.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>bitbake</filename> command, when not using
|
||||
"--buildfile" or "-b" only accepts a "PROVIDES".
|
||||
You cannot provide anything else.
|
||||
By default, a recipe file generally "PROVIDES" its
|
||||
"packagename" as shown in the following example:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake foo
|
||||
</literallayout>
|
||||
This next example "PROVIDES" the package name and also uses
|
||||
the "-c" option to tell BitBake to just execute the
|
||||
<filename>do_clean</filename> task:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -c clean foo
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='executing-a-list-of-task-and-recipe-combinations'>
|
||||
<title>Executing a List of Task and Recipe Combinations</title>
|
||||
|
||||
<para>
|
||||
The BitBake command line supports specifying different
|
||||
tasks for individual targets when you specify multiple
|
||||
targets.
|
||||
For example, suppose you had two targets (or recipes)
|
||||
<filename>myfirstrecipe</filename> and
|
||||
<filename>mysecondrecipe</filename> and you needed
|
||||
BitBake to run <filename>taskA</filename> for the first
|
||||
recipe and <filename>taskB</filename> for the second
|
||||
recipe:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake myfirstrecipe:do_taskA mysecondrecipe:do_taskB
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='generating-dependency-graphs'>
|
||||
<title>Generating Dependency Graphs</title>
|
||||
|
||||
<para>
|
||||
BitBake is able to generate dependency graphs using
|
||||
the <filename>dot</filename> syntax.
|
||||
You can convert these graphs into images using the
|
||||
<filename>dot</filename> tool from
|
||||
<ulink url='http://www.graphviz.org'>Graphviz</ulink>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When you generate a dependency graph, BitBake writes four files
|
||||
to the current working directory:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>package-depends.dot</filename>:</emphasis>
|
||||
Shows BitBake's knowledge of dependencies between
|
||||
runtime targets.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>pn-depends.dot</filename>:</emphasis>
|
||||
Shows dependencies between build-time targets
|
||||
(i.e. recipes).
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>task-depends.dot</filename>:</emphasis>
|
||||
Shows dependencies between tasks.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>pn-buildlist</filename>:</emphasis>
|
||||
Shows a simple list of targets that are to be built.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To stop depending on common depends, use the "-I" depend
|
||||
option and BitBake omits them from the graph.
|
||||
Leaving this information out can produce more readable graphs.
|
||||
This way, you can remove from the graph
|
||||
<filename>DEPENDS</filename> from inherited classes
|
||||
such as <filename>base.bbclass</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are two examples that create dependency graphs.
|
||||
The second example omits depends common in OpenEmbedded from
|
||||
the graph:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -g foo
|
||||
|
||||
$ bitbake -g -I virtual/kernel -I eglibc foo
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
</section>
|
||||
</chapter>
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,984 +0,0 @@
|
||||
/*
|
||||
Generic XHTML / DocBook XHTML CSS Stylesheet.
|
||||
|
||||
Browser wrangling and typographic design by
|
||||
Oyvind Kolas / pippin@gimp.org
|
||||
|
||||
Customised for Poky by
|
||||
Matthew Allum / mallum@o-hand.com
|
||||
|
||||
Thanks to:
|
||||
Liam R. E. Quin
|
||||
William Skaggs
|
||||
Jakub Steiner
|
||||
|
||||
Structure
|
||||
---------
|
||||
|
||||
The stylesheet is divided into the following sections:
|
||||
|
||||
Positioning
|
||||
Margins, paddings, width, font-size, clearing.
|
||||
Decorations
|
||||
Borders, style
|
||||
Colors
|
||||
Colors
|
||||
Graphics
|
||||
Graphical backgrounds
|
||||
Nasty IE tweaks
|
||||
Workarounds needed to make it work in internet explorer,
|
||||
currently makes the stylesheet non validating, but up until
|
||||
this point it is validating.
|
||||
Mozilla extensions
|
||||
Transparency for footer
|
||||
Rounded corners on boxes
|
||||
|
||||
*/
|
||||
|
||||
|
||||
/*************** /
|
||||
/ Positioning /
|
||||
/ ***************/
|
||||
|
||||
body {
|
||||
font-family: Verdana, Sans, sans-serif;
|
||||
|
||||
min-width: 640px;
|
||||
width: 80%;
|
||||
margin: 0em auto;
|
||||
padding: 2em 5em 5em 5em;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
h1,h2,h3,h4,h5,h6,h7 {
|
||||
font-family: Arial, Sans;
|
||||
color: #00557D;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 2em;
|
||||
text-align: left;
|
||||
padding: 0em 0em 0em 0em;
|
||||
margin: 2em 0em 0em 0em;
|
||||
}
|
||||
|
||||
h2.subtitle {
|
||||
margin: 0.10em 0em 3.0em 0em;
|
||||
padding: 0em 0em 0em 0em;
|
||||
font-size: 1.8em;
|
||||
padding-left: 20%;
|
||||
font-weight: normal;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
h2 {
|
||||
margin: 2em 0em 0.66em 0em;
|
||||
padding: 0.5em 0em 0em 0em;
|
||||
font-size: 1.5em;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
h3.subtitle {
|
||||
margin: 0em 0em 1em 0em;
|
||||
padding: 0em 0em 0em 0em;
|
||||
font-size: 142.14%;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
h3 {
|
||||
margin: 1em 0em 0.5em 0em;
|
||||
padding: 1em 0em 0em 0em;
|
||||
font-size: 140%;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
h4 {
|
||||
margin: 1em 0em 0.5em 0em;
|
||||
padding: 1em 0em 0em 0em;
|
||||
font-size: 120%;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
h5 {
|
||||
margin: 1em 0em 0.5em 0em;
|
||||
padding: 1em 0em 0em 0em;
|
||||
font-size: 110%;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
h6 {
|
||||
margin: 1em 0em 0em 0em;
|
||||
padding: 1em 0em 0em 0em;
|
||||
font-size: 110%;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.authorgroup {
|
||||
background-color: transparent;
|
||||
background-repeat: no-repeat;
|
||||
padding-top: 256px;
|
||||
background-image: url("figures/bitbake-title.png");
|
||||
background-position: left top;
|
||||
margin-top: -256px;
|
||||
padding-right: 50px;
|
||||
margin-left: 0px;
|
||||
text-align: right;
|
||||
width: 740px;
|
||||
}
|
||||
|
||||
h3.author {
|
||||
margin: 0em 0me 0em 0em;
|
||||
padding: 0em 0em 0em 0em;
|
||||
font-weight: normal;
|
||||
font-size: 100%;
|
||||
color: #333;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.author tt.email {
|
||||
font-size: 66%;
|
||||
}
|
||||
|
||||
.titlepage hr {
|
||||
width: 0em;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.revhistory {
|
||||
padding-top: 2em;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.toc,
|
||||
.list-of-tables,
|
||||
.list-of-examples,
|
||||
.list-of-figures {
|
||||
padding: 1.33em 0em 2.5em 0em;
|
||||
color: #00557D;
|
||||
}
|
||||
|
||||
.toc p,
|
||||
.list-of-tables p,
|
||||
.list-of-figures p,
|
||||
.list-of-examples p {
|
||||
padding: 0em 0em 0em 0em;
|
||||
padding: 0em 0em 0.3em;
|
||||
margin: 1.5em 0em 0em 0em;
|
||||
}
|
||||
|
||||
.toc p b,
|
||||
.list-of-tables p b,
|
||||
.list-of-figures p b,
|
||||
.list-of-examples p b{
|
||||
font-size: 100.0%;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.toc dl,
|
||||
.list-of-tables dl,
|
||||
.list-of-figures dl,
|
||||
.list-of-examples dl {
|
||||
margin: 0em 0em 0.5em 0em;
|
||||
padding: 0em 0em 0em 0em;
|
||||
}
|
||||
|
||||
.toc dt {
|
||||
margin: 0em 0em 0em 0em;
|
||||
padding: 0em 0em 0em 0em;
|
||||
}
|
||||
|
||||
.toc dd {
|
||||
margin: 0em 0em 0em 2.6em;
|
||||
padding: 0em 0em 0em 0em;
|
||||
}
|
||||
|
||||
div.glossary dl,
|
||||
div.variablelist dl {
|
||||
}
|
||||
|
||||
.glossary dl dt,
|
||||
.variablelist dl dt,
|
||||
.variablelist dl dt span.term {
|
||||
font-weight: normal;
|
||||
width: 20em;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.variablelist dl dt {
|
||||
margin-top: 0.5em;
|
||||
}
|
||||
|
||||
.glossary dl dd,
|
||||
.variablelist dl dd {
|
||||
margin-top: -1em;
|
||||
margin-left: 25.5em;
|
||||
}
|
||||
|
||||
.glossary dd p,
|
||||
.variablelist dd p {
|
||||
margin-top: 0em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
|
||||
div.calloutlist table td {
|
||||
padding: 0em 0em 0em 0em;
|
||||
margin: 0em 0em 0em 0em;
|
||||
}
|
||||
|
||||
div.calloutlist table td p {
|
||||
margin-top: 0em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
div p.copyright {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
div.legalnotice p.legalnotice-title {
|
||||
margin-bottom: 0em;
|
||||
}
|
||||
|
||||
p {
|
||||
line-height: 1.5em;
|
||||
margin-top: 0em;
|
||||
|
||||
}
|
||||
|
||||
dl {
|
||||
padding-top: 0em;
|
||||
}
|
||||
|
||||
hr {
|
||||
border: solid 1px;
|
||||
}
|
||||
|
||||
|
||||
.mediaobject,
|
||||
.mediaobjectco {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
img {
|
||||
border: none;
|
||||
}
|
||||
|
||||
ul {
|
||||
padding: 0em 0em 0em 1.5em;
|
||||
}
|
||||
|
||||
ul li {
|
||||
padding: 0em 0em 0em 0em;
|
||||
}
|
||||
|
||||
ul li p {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
table {
|
||||
width :100%;
|
||||
}
|
||||
|
||||
th {
|
||||
padding: 0.25em;
|
||||
text-align: left;
|
||||
font-weight: normal;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
td {
|
||||
padding: 0.25em;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
p a[id] {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
display: inline;
|
||||
background-image: none;
|
||||
}
|
||||
|
||||
a {
|
||||
text-decoration: underline;
|
||||
color: #444;
|
||||
}
|
||||
|
||||
pre {
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
/*font-weight: bold;*/
|
||||
}
|
||||
|
||||
/* This style defines how the permalink character
|
||||
appears by itself and when hovered over with
|
||||
the mouse. */
|
||||
|
||||
[alt='Permalink'] { color: #eee; }
|
||||
[alt='Permalink']:hover { color: black; }
|
||||
|
||||
|
||||
div.informalfigure,
|
||||
div.informalexample,
|
||||
div.informaltable,
|
||||
div.figure,
|
||||
div.table,
|
||||
div.example {
|
||||
margin: 1em 0em;
|
||||
padding: 1em;
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
|
||||
|
||||
div.informalfigure p.title b,
|
||||
div.informalexample p.title b,
|
||||
div.informaltable p.title b,
|
||||
div.figure p.title b,
|
||||
div.example p.title b,
|
||||
div.table p.title b{
|
||||
padding-top: 0em;
|
||||
margin-top: 0em;
|
||||
font-size: 100%;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
.mediaobject .caption,
|
||||
.mediaobject .caption p {
|
||||
text-align: center;
|
||||
font-size: 80%;
|
||||
padding-top: 0.5em;
|
||||
padding-bottom: 0.5em;
|
||||
}
|
||||
|
||||
.epigraph {
|
||||
padding-left: 55%;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.epigraph p {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.epigraph .quote {
|
||||
font-style: italic;
|
||||
}
|
||||
.epigraph .attribution {
|
||||
font-style: normal;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
span.application {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.programlisting {
|
||||
font-family: monospace;
|
||||
font-size: 80%;
|
||||
white-space: pre;
|
||||
margin: 1.33em 0em;
|
||||
padding: 1.33em;
|
||||
}
|
||||
|
||||
.tip,
|
||||
.warning,
|
||||
.caution,
|
||||
.note {
|
||||
margin-top: 1em;
|
||||
margin-bottom: 1em;
|
||||
|
||||
}
|
||||
|
||||
/* force full width of table within div */
|
||||
.tip table,
|
||||
.warning table,
|
||||
.caution table,
|
||||
.note table {
|
||||
border: none;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
|
||||
.tip table th,
|
||||
.warning table th,
|
||||
.caution table th,
|
||||
.note table th {
|
||||
padding: 0.8em 0.0em 0.0em 0.0em;
|
||||
margin : 0em 0em 0em 0em;
|
||||
}
|
||||
|
||||
.tip p,
|
||||
.warning p,
|
||||
.caution p,
|
||||
.note p {
|
||||
margin-top: 0.5em;
|
||||
margin-bottom: 0.5em;
|
||||
padding-right: 1em;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.acronym {
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
b.keycap,
|
||||
.keycap {
|
||||
padding: 0.09em 0.3em;
|
||||
margin: 0em;
|
||||
}
|
||||
|
||||
.itemizedlist li {
|
||||
clear: none;
|
||||
}
|
||||
|
||||
.filename {
|
||||
font-size: medium;
|
||||
font-family: Courier, monospace;
|
||||
}
|
||||
|
||||
|
||||
div.navheader, div.heading{
|
||||
position: absolute;
|
||||
left: 0em;
|
||||
top: 0em;
|
||||
width: 100%;
|
||||
background-color: #cdf;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.navfooter, div.footing{
|
||||
position: fixed;
|
||||
left: 0em;
|
||||
bottom: 0em;
|
||||
background-color: #eee;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
|
||||
div.navheader td,
|
||||
div.navfooter td {
|
||||
font-size: 66%;
|
||||
}
|
||||
|
||||
div.navheader table th {
|
||||
/*font-family: Georgia, Times, serif;*/
|
||||
/*font-size: x-large;*/
|
||||
font-size: 80%;
|
||||
}
|
||||
|
||||
div.navheader table {
|
||||
border-left: 0em;
|
||||
border-right: 0em;
|
||||
border-top: 0em;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.navfooter table {
|
||||
border-left: 0em;
|
||||
border-right: 0em;
|
||||
border-bottom: 0em;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.navheader table td a,
|
||||
div.navfooter table td a {
|
||||
color: #777;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
/* normal text in the footer */
|
||||
div.navfooter table td {
|
||||
color: black;
|
||||
}
|
||||
|
||||
div.navheader table td a:visited,
|
||||
div.navfooter table td a:visited {
|
||||
color: #444;
|
||||
}
|
||||
|
||||
|
||||
/* links in header and footer */
|
||||
div.navheader table td a:hover,
|
||||
div.navfooter table td a:hover {
|
||||
text-decoration: underline;
|
||||
background-color: transparent;
|
||||
color: #33a;
|
||||
}
|
||||
|
||||
div.navheader hr,
|
||||
div.navfooter hr {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
.qandaset tr.question td p {
|
||||
margin: 0em 0em 1em 0em;
|
||||
padding: 0em 0em 0em 0em;
|
||||
}
|
||||
|
||||
.qandaset tr.answer td p {
|
||||
margin: 0em 0em 1em 0em;
|
||||
padding: 0em 0em 0em 0em;
|
||||
}
|
||||
.answer td {
|
||||
padding-bottom: 1.5em;
|
||||
}
|
||||
|
||||
.emphasis {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
|
||||
/************* /
|
||||
/ decorations /
|
||||
/ *************/
|
||||
|
||||
.titlepage {
|
||||
}
|
||||
|
||||
.part .title {
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
border: none;
|
||||
}
|
||||
|
||||
/*
|
||||
h1 {
|
||||
border: none;
|
||||
}
|
||||
|
||||
h2 {
|
||||
border-top: solid 0.2em;
|
||||
border-bottom: solid 0.06em;
|
||||
}
|
||||
|
||||
h3 {
|
||||
border-top: 0em;
|
||||
border-bottom: solid 0.06em;
|
||||
}
|
||||
|
||||
h4 {
|
||||
border: 0em;
|
||||
border-bottom: solid 0.06em;
|
||||
}
|
||||
|
||||
h5 {
|
||||
border: 0em;
|
||||
}
|
||||
*/
|
||||
|
||||
.programlisting {
|
||||
border: solid 1px;
|
||||
}
|
||||
|
||||
div.figure,
|
||||
div.table,
|
||||
div.informalfigure,
|
||||
div.informaltable,
|
||||
div.informalexample,
|
||||
div.example {
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
|
||||
|
||||
.tip,
|
||||
.warning,
|
||||
.caution,
|
||||
.note {
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.tip table th,
|
||||
.warning table th,
|
||||
.caution table th,
|
||||
.note table th {
|
||||
border-bottom: 1px solid;
|
||||
}
|
||||
|
||||
.question td {
|
||||
border-top: 1px solid black;
|
||||
}
|
||||
|
||||
.answer {
|
||||
}
|
||||
|
||||
|
||||
b.keycap,
|
||||
.keycap {
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
|
||||
div.navheader, div.heading{
|
||||
border-bottom: 1px solid;
|
||||
}
|
||||
|
||||
|
||||
div.navfooter, div.footing{
|
||||
border-top: 1px solid;
|
||||
}
|
||||
|
||||
/********* /
|
||||
/ colors /
|
||||
/ *********/
|
||||
|
||||
body {
|
||||
color: #333;
|
||||
background: white;
|
||||
}
|
||||
|
||||
a {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
background-color: #dedede;
|
||||
}
|
||||
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6,
|
||||
h7,
|
||||
h8 {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
hr {
|
||||
border-color: #aaa;
|
||||
}
|
||||
|
||||
|
||||
.tip, .warning, .caution, .note {
|
||||
border-color: #fff;
|
||||
}
|
||||
|
||||
|
||||
.tip table th,
|
||||
.warning table th,
|
||||
.caution table th,
|
||||
.note table th {
|
||||
border-bottom-color: #fff;
|
||||
}
|
||||
|
||||
|
||||
.warning {
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.caution {
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.tip {
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.note {
|
||||
background-color: #f0f0f2;
|
||||
}
|
||||
|
||||
.glossary dl dt,
|
||||
.variablelist dl dt,
|
||||
.variablelist dl dt span.term {
|
||||
color: #044;
|
||||
}
|
||||
|
||||
div.figure,
|
||||
div.table,
|
||||
div.example,
|
||||
div.informalfigure,
|
||||
div.informaltable,
|
||||
div.informalexample {
|
||||
border-color: #aaa;
|
||||
}
|
||||
|
||||
pre.programlisting {
|
||||
color: black;
|
||||
background-color: #fff;
|
||||
border-color: #aaa;
|
||||
border-width: 2px;
|
||||
}
|
||||
|
||||
.guimenu,
|
||||
.guilabel,
|
||||
.guimenuitem {
|
||||
background-color: #eee;
|
||||
}
|
||||
|
||||
|
||||
b.keycap,
|
||||
.keycap {
|
||||
background-color: #eee;
|
||||
border-color: #999;
|
||||
}
|
||||
|
||||
|
||||
div.navheader {
|
||||
border-color: black;
|
||||
}
|
||||
|
||||
|
||||
div.navfooter {
|
||||
border-color: black;
|
||||
}
|
||||
|
||||
|
||||
/*********** /
|
||||
/ graphics /
|
||||
/ ***********/
|
||||
|
||||
/*
|
||||
body {
|
||||
background-image: url("images/body_bg.jpg");
|
||||
background-attachment: fixed;
|
||||
}
|
||||
|
||||
.navheader,
|
||||
.note,
|
||||
.tip {
|
||||
background-image: url("images/note_bg.jpg");
|
||||
background-attachment: fixed;
|
||||
}
|
||||
|
||||
.warning,
|
||||
.caution {
|
||||
background-image: url("images/warning_bg.jpg");
|
||||
background-attachment: fixed;
|
||||
}
|
||||
|
||||
.figure,
|
||||
.informalfigure,
|
||||
.example,
|
||||
.informalexample,
|
||||
.table,
|
||||
.informaltable {
|
||||
background-image: url("images/figure_bg.jpg");
|
||||
background-attachment: fixed;
|
||||
}
|
||||
|
||||
*/
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6,
|
||||
h7{
|
||||
}
|
||||
|
||||
/*
|
||||
Example of how to stick an image as part of the title.
|
||||
|
||||
div.article .titlepage .title
|
||||
{
|
||||
background-image: url("figures/white-on-black.png");
|
||||
background-position: center;
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
*/
|
||||
|
||||
div.preface .titlepage .title,
|
||||
div.colophon .title,
|
||||
div.chapter .titlepage .title,
|
||||
div.article .titlepage .title
|
||||
{
|
||||
}
|
||||
|
||||
div.section div.section .titlepage .title,
|
||||
div.sect2 .titlepage .title {
|
||||
background: none;
|
||||
}
|
||||
|
||||
|
||||
h1.title {
|
||||
background-color: transparent;
|
||||
background-repeat: no-repeat;
|
||||
height: 256px;
|
||||
text-indent: -9000px;
|
||||
overflow:hidden;
|
||||
}
|
||||
|
||||
h2.subtitle {
|
||||
background-color: transparent;
|
||||
text-indent: -9000px;
|
||||
overflow:hidden;
|
||||
width: 0px;
|
||||
display: none;
|
||||
}
|
||||
|
||||
/*************************************** /
|
||||
/ pippin.gimp.org specific alterations /
|
||||
/ ***************************************/
|
||||
|
||||
/*
|
||||
div.heading, div.navheader {
|
||||
color: #777;
|
||||
font-size: 80%;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
text-align: left;
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
left: 0px;
|
||||
width: 100%;
|
||||
height: 50px;
|
||||
background: url('/gfx/heading_bg.png') transparent;
|
||||
background-repeat: repeat-x;
|
||||
background-attachment: fixed;
|
||||
border: none;
|
||||
}
|
||||
|
||||
div.heading a {
|
||||
color: #444;
|
||||
}
|
||||
|
||||
div.footing, div.navfooter {
|
||||
border: none;
|
||||
color: #ddd;
|
||||
font-size: 80%;
|
||||
text-align:right;
|
||||
|
||||
width: 100%;
|
||||
padding-top: 10px;
|
||||
position: absolute;
|
||||
bottom: 0px;
|
||||
left: 0px;
|
||||
|
||||
background: url('/gfx/footing_bg.png') transparent;
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
|
||||
/****************** /
|
||||
/ nasty ie tweaks /
|
||||
/ ******************/
|
||||
|
||||
/*
|
||||
div.heading, div.navheader {
|
||||
width:expression(document.body.clientWidth + "px");
|
||||
}
|
||||
|
||||
div.footing, div.navfooter {
|
||||
width:expression(document.body.clientWidth + "px");
|
||||
margin-left:expression("-5em");
|
||||
}
|
||||
body {
|
||||
padding:expression("4em 5em 0em 5em");
|
||||
}
|
||||
*/
|
||||
|
||||
/**************************************** /
|
||||
/ mozilla vendor specific css extensions /
|
||||
/ ****************************************/
|
||||
/*
|
||||
div.navfooter, div.footing{
|
||||
-moz-opacity: 0.8em;
|
||||
}
|
||||
|
||||
div.figure,
|
||||
div.table,
|
||||
div.informalfigure,
|
||||
div.informaltable,
|
||||
div.informalexample,
|
||||
div.example,
|
||||
.tip,
|
||||
.warning,
|
||||
.caution,
|
||||
.note {
|
||||
-moz-border-radius: 0.5em;
|
||||
}
|
||||
|
||||
b.keycap,
|
||||
.keycap {
|
||||
-moz-border-radius: 0.3em;
|
||||
}
|
||||
*/
|
||||
|
||||
table tr td table tr td {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
hr {
|
||||
display: none;
|
||||
}
|
||||
|
||||
table {
|
||||
border: 0em;
|
||||
}
|
||||
|
||||
.photo {
|
||||
float: right;
|
||||
margin-left: 1.5em;
|
||||
margin-bottom: 1.5em;
|
||||
margin-top: 0em;
|
||||
max-width: 17em;
|
||||
border: 1px solid gray;
|
||||
padding: 3px;
|
||||
background: white;
|
||||
}
|
||||
.seperator {
|
||||
padding-top: 2em;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
#validators {
|
||||
margin-top: 5em;
|
||||
text-align: right;
|
||||
color: #777;
|
||||
}
|
||||
@media print {
|
||||
body {
|
||||
font-size: 8pt;
|
||||
}
|
||||
.noprint {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.tip,
|
||||
.note {
|
||||
background: #f0f0f2;
|
||||
color: #333;
|
||||
padding: 20px;
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
.tip h3,
|
||||
.note h3 {
|
||||
padding: 0em;
|
||||
margin: 0em;
|
||||
font-size: 2em;
|
||||
font-weight: bold;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
.tip a,
|
||||
.note a {
|
||||
color: #333;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.footnote {
|
||||
font-size: small;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Changes the announcement text */
|
||||
.tip h3,
|
||||
.warning h3,
|
||||
.caution h3,
|
||||
.note h3 {
|
||||
font-size:large;
|
||||
color: #00557D;
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
|
||||
<book id='bitbake-user-manual' lang='en'
|
||||
xmlns:xi="http://www.w3.org/2003/XInclude"
|
||||
xmlns="http://docbook.org/ns/docbook"
|
||||
>
|
||||
<bookinfo>
|
||||
|
||||
<mediaobject>
|
||||
<imageobject>
|
||||
<imagedata fileref='figures/bitbake-title.png'
|
||||
format='SVG'
|
||||
align='left' scalefit='1' width='100%'/>
|
||||
</imageobject>
|
||||
</mediaobject>
|
||||
|
||||
<title>
|
||||
BitBake User Manual
|
||||
</title>
|
||||
|
||||
<authorgroup>
|
||||
<author>
|
||||
<firstname>Richard Purdie, Chris Larson, and </firstname> <surname>Phil Blundell</surname>
|
||||
<affiliation>
|
||||
<orgname>BitBake Community</orgname>
|
||||
</affiliation>
|
||||
<email>bitbake-devel@lists.openembedded.org</email>
|
||||
</author>
|
||||
</authorgroup>
|
||||
|
||||
<!--
|
||||
# Add in some revision history if we want it here.
|
||||
<revhistory>
|
||||
<revision>
|
||||
<revnumber>x.x</revnumber>
|
||||
<date>dd month year</date>
|
||||
<revremark>Some relevent comment</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>x.x</revnumber>
|
||||
<date>dd month year</date>
|
||||
<revremark>Some relevent comment</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>x.x</revnumber>
|
||||
<date>dd month year</date>
|
||||
<revremark>Some relevent comment</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>x.x</revnumber>
|
||||
<date>dd month year</date>
|
||||
<revremark>Some relevent comment</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
-->
|
||||
|
||||
<copyright>
|
||||
<year>2004-2016</year>
|
||||
<holder>Richard Purdie</holder>
|
||||
<holder>Chris Larson</holder>
|
||||
<holder>and Phil Blundell</holder>
|
||||
</copyright>
|
||||
|
||||
<legalnotice>
|
||||
<para>
|
||||
This work is licensed under the Creative Commons Attribution License.
|
||||
To view a copy of this license, visit
|
||||
<ulink url="http://creativecommons.org/licenses/by/2.5/">http://creativecommons.org/licenses/by/2.5/</ulink>
|
||||
or send a letter to Creative Commons, 444 Castro Street,
|
||||
Suite 900, Mountain View, California 94041, USA.
|
||||
</para>
|
||||
</legalnotice>
|
||||
</bookinfo>
|
||||
|
||||
<xi:include href="bitbake-user-manual-intro.xml"/>
|
||||
|
||||
<xi:include href="bitbake-user-manual-execution.xml"/>
|
||||
|
||||
<xi:include href="bitbake-user-manual-metadata.xml"/>
|
||||
|
||||
<xi:include href="bitbake-user-manual-fetching.xml"/>
|
||||
|
||||
<xi:include href="bitbake-user-manual-ref-variables.xml"/>
|
||||
|
||||
<xi:include href="bitbake-user-manual-hello.xml"/>
|
||||
|
||||
</book>
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 5.0 KiB |
@@ -32,7 +32,7 @@ command.
|
||||
\fBbitbake\fP is a program that executes the specified task (default is 'build')
|
||||
for a given set of BitBake files.
|
||||
.br
|
||||
It expects that BBFILES is defined, which is a space separated list of files to
|
||||
It expects that BBFILES is defined, which is a space seperated list of files to
|
||||
be executed. BBFILES does support wildcards.
|
||||
.br
|
||||
Default BBFILES are the .bb files in the current directory.
|
||||
@@ -67,7 +67,7 @@ drop into the interactive mode also called the BitBake shell.
|
||||
Specify task to execute. Note that this only executes the specified task for
|
||||
the providee and the packages it depends on, i.e. 'compile' does not implicitly
|
||||
call stage for the dependencies (IOW: use only if you know what you are doing).
|
||||
Depending on the base.bbclass a listtasks task is defined and will show
|
||||
Depending on the base.bbclass a listtaks tasks is defined and will show
|
||||
available tasks.
|
||||
.TP
|
||||
.B \-rFILE, \-\-read=FILE
|
||||
@@ -85,11 +85,14 @@ don't execute, just go through the motions
|
||||
.B \-p, \-\-parse-only
|
||||
quit after parsing the BB files (developers only)
|
||||
.TP
|
||||
.B \-d, \-\-disable-psyco
|
||||
disable using the psyco just-in-time compiler (not recommended)
|
||||
.TP
|
||||
.B \-s, \-\-show-versions
|
||||
show current and preferred versions of all packages
|
||||
.TP
|
||||
.B \-e, \-\-environment
|
||||
show the global or per-recipe environment (this is what used to be bbread)
|
||||
show the global or per-package environment (this is what used to be bbread)
|
||||
.TP
|
||||
.B \-g, \-\-graphviz
|
||||
emit the dependency trees of the specified packages in the dot syntax
|
||||
@@ -104,30 +107,6 @@ Show debug logging for the specified logging domains
|
||||
.B \-P, \-\-profile
|
||||
profile the command and print a report
|
||||
.TP
|
||||
.B \-uUI, \-\-ui=UI
|
||||
User interface to use. Currently, hob, depexp, goggle or ncurses can be specified as UI.
|
||||
.TP
|
||||
.B \-tSERVERTYPE, \-\-servertype=SERVERTYPE
|
||||
Choose which server to use, none, process or xmlrpc.
|
||||
.TP
|
||||
.B \-\-revisions-changed
|
||||
Set the exit code depending on whether upstream floating revisions have changed or not.
|
||||
.TP
|
||||
.B \-\-server-only
|
||||
Run bitbake without UI, the frontend can connect with bitbake server itself.
|
||||
.TP
|
||||
.B \-BBIND, \-\-bind=BIND
|
||||
The name/address for the bitbake server to bind to.
|
||||
.TP
|
||||
.B \-\-no\-setscene
|
||||
Do not run any setscene tasks, forces builds.
|
||||
|
||||
.SH ENVIRONMENT VARIABLES
|
||||
bitbake uses the following environment variables to control its
|
||||
operation:
|
||||
.TP
|
||||
.B BITBAKE_UI
|
||||
The bitbake user interface; overridden by the \fB-u\fP commandline option.
|
||||
|
||||
.SH AUTHORS
|
||||
BitBake was written by
|
||||
|
||||
56
bitbake/doc/manual/Makefile
Normal file
56
bitbake/doc/manual/Makefile
Normal file
@@ -0,0 +1,56 @@
|
||||
topdir = .
|
||||
manual = $(topdir)/usermanual.xml
|
||||
# types = pdf txt rtf ps xhtml html man tex texi dvi
|
||||
# types = pdf txt
|
||||
types = $(xmltotypes) $(htmltypes)
|
||||
xmltotypes = pdf txt
|
||||
htmltypes = html xhtml
|
||||
htmlxsl = $(if $(filter $@,$(foreach type,$(htmltypes),$(type)-nochunks)),http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl,http://docbook.sourceforge.net/release/xsl/current/$@/chunk.xsl)
|
||||
htmlcssfile = docbook.css
|
||||
htmlcss = $(topdir)/html.css
|
||||
# htmlcssfile =
|
||||
# htmlcss =
|
||||
cleanfiles = $(foreach i,$(types),$(topdir)/$(i))
|
||||
|
||||
ifdef DEBUG
|
||||
define command
|
||||
$(1)
|
||||
endef
|
||||
else
|
||||
define command
|
||||
@echo $(2) $(3) $(4)
|
||||
@$(1) >/dev/null
|
||||
endef
|
||||
endif
|
||||
|
||||
all: $(types)
|
||||
|
||||
lint: $(manual) FORCE
|
||||
$(call command,xmllint --xinclude --postvalid --noout $(manual),XMLLINT $(manual))
|
||||
|
||||
$(types) $(foreach type,$(htmltypes),$(type)-nochunks): lint FORCE
|
||||
|
||||
$(foreach type,$(htmltypes),$(type)-nochunks): $(if $(htmlcss),$(htmlcss)) $(manual)
|
||||
@mkdir -p $@
|
||||
ifdef htmlcss
|
||||
$(call command,install -m 0644 $(htmlcss) $@/$(htmlcssfile),CP $(htmlcss) $@/$(htmlcssfile))
|
||||
endif
|
||||
$(call command,xsltproc --stringparam base.dir $@/ $(if $(htmlcssfile),--stringparam html.stylesheet $(htmlcssfile)) $(htmlxsl) $(manual) > $@/index.$(patsubst %-nochunks,%,$@),XSLTPROC $@ $(manual))
|
||||
|
||||
$(htmltypes): $(if $(htmlcss),$(htmlcss)) $(manual)
|
||||
@mkdir -p $@
|
||||
ifdef htmlcss
|
||||
$(call command,install -m 0644 $(htmlcss) $@/$(htmlcssfile),CP $(htmlcss) $@/$(htmlcssfile))
|
||||
endif
|
||||
$(call command,xsltproc --stringparam base.dir $@/ $(if $(htmlcssfile),--stringparam html.stylesheet $(htmlcssfile)) $(htmlxsl) $(manual),XSLTPROC $@ $(manual))
|
||||
|
||||
$(xmltotypes): $(manual)
|
||||
$(call command,xmlto --extensions -o $(topdir)/$@ $@ $(manual),XMLTO $@ $(manual))
|
||||
|
||||
clean:
|
||||
rm -rf $(cleanfiles)
|
||||
|
||||
$(foreach i,$(types) $(foreach type,$(htmltypes),$(type)-nochunks),clean-$(i)):
|
||||
rm -rf $(patsubst clean-%,%,$@)
|
||||
|
||||
FORCE:
|
||||
519
bitbake/doc/manual/usermanual.xml
Normal file
519
bitbake/doc/manual/usermanual.xml
Normal file
@@ -0,0 +1,519 @@
|
||||
<?xml version="1.0"?>
|
||||
<!--
|
||||
ex:ts=4:sw=4:sts=4:et
|
||||
-*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
-->
|
||||
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||
<book>
|
||||
<bookinfo>
|
||||
<title>BitBake User Manual</title>
|
||||
<authorgroup>
|
||||
<corpauthor>BitBake Team</corpauthor>
|
||||
</authorgroup>
|
||||
<copyright>
|
||||
<year>2004, 2005, 2006</year>
|
||||
<holder>Chris Larson</holder>
|
||||
<holder>Phil Blundell</holder>
|
||||
</copyright>
|
||||
<legalnotice>
|
||||
<para>This work is licensed under the Creative Commons Attribution License. To view a copy of this license, visit <ulink url="http://creativecommons.org/licenses/by/2.5/">http://creativecommons.org/licenses/by/2.5/</ulink> or send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.</para>
|
||||
</legalnotice>
|
||||
</bookinfo>
|
||||
<chapter>
|
||||
<title>Introduction</title>
|
||||
<section>
|
||||
<title>Overview</title>
|
||||
<para>BitBake is, at its simplest, a tool for executing
|
||||
tasks and managing metadata. As such, its similarities to GNU make and other
|
||||
build tools are readily apparent. It was inspired by Portage, the package management system used by the Gentoo Linux distribution. BitBake is the basis of the <ulink url="http://www.openembedded.org/">OpenEmbedded</ulink> project, which is being used to build and maintain a number of embedded Linux distributions, including OpenZaurus and Familiar.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Background and Goals</title>
|
||||
<para>Prior to BitBake, no other build tool adequately met
|
||||
the needs of an aspiring embedded Linux distribution. All of the
|
||||
buildsystems used by traditional desktop Linux distributions lacked
|
||||
important functionality, and none of the ad-hoc
|
||||
<emphasis>buildroot</emphasis> systems, prevalent in the
|
||||
embedded space, were scalable or maintainable.</para>
|
||||
|
||||
<para>Some important goals for BitBake were:
|
||||
<itemizedlist>
|
||||
<listitem><para>Handle crosscompilation.</para></listitem>
|
||||
<listitem><para>Handle interpackage dependencies (build time on target architecture, build time on native architecture, and runtime).</para></listitem>
|
||||
<listitem><para>Support running any number of tasks within a given package, including, but not limited to, fetching upstream sources, unpacking them, patching them, configuring them, et cetera.</para></listitem>
|
||||
<listitem><para>Must be linux distribution agnostic (both build and target).</para></listitem>
|
||||
<listitem><para>Must be architecture agnostic</para></listitem>
|
||||
<listitem><para>Must support multiple build and target operating systems (including cygwin, the BSDs, etc).</para></listitem>
|
||||
<listitem><para>Must be able to be self contained, rather than tightly integrated into the build machine's root filesystem.</para></listitem>
|
||||
<listitem><para>There must be a way to handle conditional metadata (on target architecture, operating system, distribution, machine).</para></listitem>
|
||||
<listitem><para>It must be easy for the person using the tools to supply their own local metadata and packages to operate against.</para></listitem>
|
||||
<listitem><para>Must make it easy to collaborate
|
||||
between multiple projects using BitBake for their
|
||||
builds.</para></listitem>
|
||||
<listitem><para>Should provide an inheritance mechanism to
|
||||
share common metadata between many packages.</para></listitem>
|
||||
<listitem><para>Et cetera...</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
<para>BitBake satisfies all these and many more. Flexibility and power have always been the priorities. It is highly extensible, supporting embedded Python code and execution of any arbitrary tasks.</para>
|
||||
</section>
|
||||
</chapter>
|
||||
<chapter>
|
||||
<title>Metadata</title>
|
||||
<section>
|
||||
<title>Description</title>
|
||||
<itemizedlist>
|
||||
<para>BitBake metadata can be classified into 3 major areas:</para>
|
||||
<listitem>
|
||||
<para>Configuration Files</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>.bb Files</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>Classes</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
<para>What follows are a large number of examples of BitBake metadata. Any syntax which isn't supported in any of the aforementioned areas will be documented as such.</para>
|
||||
<section>
|
||||
<title>Basic variable setting</title>
|
||||
<para><screen><varname>VARIABLE</varname> = "value"</screen></para>
|
||||
<para>In this example, <varname>VARIABLE</varname> is <literal>value</literal>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Variable expansion</title>
|
||||
<para>BitBake supports variables referencing one another's contents using a syntax which is similar to shell scripting</para>
|
||||
<para><screen><varname>A</varname> = "aval"
|
||||
<varname>B</varname> = "pre${A}post"</screen></para>
|
||||
<para>This results in <varname>A</varname> containing <literal>aval</literal> and <varname>B</varname> containing <literal>preavalpost</literal>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Immediate variable expansion (:=)</title>
|
||||
<para>:= results in a variable's contents being expanded immediately, rather than when the variable is actually used.</para>
|
||||
<para><screen><varname>T</varname> = "123"
|
||||
<varname>A</varname> := "${B} ${A} test ${T}"
|
||||
<varname>T</varname> = "456"
|
||||
<varname>B</varname> = "${T} bval"
|
||||
|
||||
<varname>C</varname> = "cval"
|
||||
<varname>C</varname> := "${C}append"</screen></para>
|
||||
<para>In that example, <varname>A</varname> would contain <literal> test 123</literal>, <varname>B</varname> would contain <literal>456 bval</literal>, and <varname>C</varname> would be <literal>cvalappend</literal>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Appending (+=) and prepending (=+)</title>
|
||||
<para><screen><varname>B</varname> = "bval"
|
||||
<varname>B</varname> += "additionaldata"
|
||||
<varname>C</varname> = "cval"
|
||||
<varname>C</varname> =+ "test"</screen></para>
|
||||
<para>In this example, <varname>B</varname> is now <literal>bval additionaldata</literal> and <varname>C</varname> is <literal>test cval</literal>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Appending (.=) and prepending (=.) without spaces</title>
|
||||
<para><screen><varname>B</varname> = "bval"
|
||||
<varname>B</varname> .= "additionaldata"
|
||||
<varname>C</varname> = "cval"
|
||||
<varname>C</varname> =. "test"</screen></para>
|
||||
<para>In this example, <varname>B</varname> is now <literal>bvaladditionaldata</literal> and <varname>C</varname> is <literal>testcval</literal>. In contrast to the above Appending and Prepending operators no additional space
|
||||
will be introduced.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Conditional metadata set</title>
|
||||
<para>OVERRIDES is a <quote>:</quote> seperated variable containing each item you want to satisfy conditions. So, if you have a variable which is conditional on <quote>arm</quote>, and <quote>arm</quote> is in OVERRIDES, then the <quote>arm</quote> specific version of the variable is used rather than the non-conditional version. Example:</para>
|
||||
<para><screen><varname>OVERRIDES</varname> = "architecture:os:machine"
|
||||
<varname>TEST</varname> = "defaultvalue"
|
||||
<varname>TEST_os</varname> = "osspecificvalue"
|
||||
<varname>TEST_condnotinoverrides</varname> = "othercondvalue"</screen></para>
|
||||
<para>In this example, <varname>TEST</varname> would be <literal>osspecificvalue</literal>, due to the condition <quote>os</quote> being in <varname>OVERRIDES</varname>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Conditional appending</title>
|
||||
<para>BitBake also supports appending and prepending to variables based on whether something is in OVERRIDES. Example:</para>
|
||||
<para><screen><varname>DEPENDS</varname> = "glibc ncurses"
|
||||
<varname>OVERRIDES</varname> = "machine:local"
|
||||
<varname>DEPENDS_append_machine</varname> = " libmad"</screen></para>
|
||||
<para>In this example, <varname>DEPENDS</varname> is set to <literal>glibc ncurses libmad</literal>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Inclusion</title>
|
||||
<para>Next, there is the <literal>include</literal> directive, which causes BitBake to parse in whatever file you specify, and insert it at that location, which is not unlike <command>make</command>. However, if the path specified on the <literal>include</literal> line is a relative path, BitBake will locate the first one it can find within <envar>BBPATH</envar>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Requiring Inclusion</title>
|
||||
<para>In contrast to the <literal>include</literal> directive, <literal>require</literal> will
|
||||
raise an ParseError if the to be included file can not be found. Otherwise it will behave just like the <literal>
|
||||
include</literal> directive.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Python variable expansion</title>
|
||||
<para><screen><varname>DATE</varname> = "${@time.strftime('%Y%m%d',time.gmtime())}"</screen></para>
|
||||
<para>This would result in the <varname>DATE</varname> variable containing today's date.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Defining executable metadata</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para><screen>do_mytask () {
|
||||
echo "Hello, world!"
|
||||
}</screen></para>
|
||||
<para>This is essentially identical to setting a variable, except that this variable happens to be executable shell code.</para>
|
||||
<para><screen>python do_printdate () {
|
||||
import time
|
||||
print time.strftime('%Y%m%d', time.gmtime())
|
||||
}</screen></para>
|
||||
<para>This is the similar to the previous, but flags it as python so that BitBake knows it is python code.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Defining python functions into the global python namespace</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para><screen>def get_depends(bb, d):
|
||||
if bb.data.getVar('SOMECONDITION', d, True):
|
||||
return "dependencywithcond"
|
||||
else:
|
||||
return "dependency"
|
||||
|
||||
<varname>SOMECONDITION</varname> = "1"
|
||||
<varname>DEPENDS</varname> = "${@get_depends(bb, d)}"</screen></para>
|
||||
<para>This would result in <varname>DEPENDS</varname> containing <literal>dependencywithcond</literal>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Variable Flags</title>
|
||||
<para>Variables can have associated flags which provide a way of tagging extra information onto a variable. Several flags are used internally by bitbake but they can be used externally too if needed. The standard operations mentioned above also work on flags.</para>
|
||||
<para><screen><varname>VARIABLE</varname>[<varname>SOMEFLAG</varname>] = "value"</screen></para>
|
||||
<para>In this example, <varname>VARIABLE</varname> has a flag, <varname>SOMEFLAG</varname> which is set to <literal>value</literal>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Inheritance</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudamentary form of inheritence. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.oeclass in <envar>BBPATH</envar>, where filename is what you inherited.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Tasks</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para>In BitBake, each step that needs to be run for a given .bb is known as a task. There is a command <literal>addtask</literal> to add new tasks (must be a defined python executable metadata and must start with <quote>do_</quote>) and describe intertask dependencies.</para>
|
||||
<para><screen>python do_printdate () {
|
||||
import time
|
||||
print time.strftime('%Y%m%d', time.gmtime())
|
||||
}
|
||||
|
||||
addtask printdate before do_build</screen></para>
|
||||
<para>This defines the necessary python function and adds it as a task which is now a dependency of do_build (the default task). If anyone executes the do_build task, that will result in do_printdate being run first.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Events</title>
|
||||
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||
<para>BitBake allows to install event handlers. Events are triggered at certain points during operation, such as, the beginning of operation against a given .bb, the start of a given task, task failure, task success, et cetera. The intent was to make it easy to do things like email notifications on build failure.</para>
|
||||
<para><screen>addhandler myclass_eventhandler
|
||||
python myclass_eventhandler() {
|
||||
from bb.event import NotHandled, getName
|
||||
from bb import data
|
||||
|
||||
print "The name of the Event is %s" % getName(e)
|
||||
print "The file we run for is %s" % data.getVar('FILE', e.data, True)
|
||||
|
||||
return NotHandled
|
||||
}
|
||||
</screen></para><para>
|
||||
This event handler gets called every time an event is triggered. A global variable <varname>e</varname> is defined. <varname>e</varname>.data contains an instance of bb.data. With the getName(<varname>e</varname>)
|
||||
method one can get the name of the triggered event.</para><para>The above event handler prints the name
|
||||
of the event and the content of the <varname>FILE</varname> variable.</para>
|
||||
</section>
|
||||
</section>
|
||||
<section>
|
||||
<title>Dependency Handling</title>
|
||||
<para>Bitbake 1.7.x onwards works with the metadata at the task level since this is optimal when dealing with multiple threads of execution. A robust method of specifing task dependencies is therefore needed. </para>
|
||||
<section>
|
||||
<title>Dependencies internal to the .bb file</title>
|
||||
<para>Where the dependencies are internal to a given .bb file, the dependencies are handled by the previously detailed addtask directive.</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>DEPENDS</title>
|
||||
<para>DEPENDS is taken to specify build time dependencies. The 'deptask' flag for tasks is used to signify the task of each DEPENDS which must have completed before that task can be executed.</para>
|
||||
<para><screen>do_configure[deptask] = "do_populate_staging"</screen></para>
|
||||
<para>means the do_populate_staging task of each item in DEPENDS must have completed before do_configure can execute.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>RDEPENDS</title>
|
||||
<para>RDEPENDS is taken to specify runtime dependencies. The 'rdeptask' flag for tasks is used to signify the task of each RDEPENDS which must have completed before that task can be executed.</para>
|
||||
<para><screen>do_package_write[rdeptask] = "do_package"</screen></para>
|
||||
<para>means the do_package task of each item in RDEPENDS must have completed before do_package_write can execute.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Recursive DEPENDS</title>
|
||||
<para>These are specified with the 'recdeptask' flag and is used signify the task(s) of each DEPENDS which must have completed before that task can be executed. It applies recursively so also, the DEPENDS of each item in the original DEPENDS must be met and so on.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Recursive RDEPENDS</title>
|
||||
<para>These are specified with the 'recrdeptask' flag and is used signify the task(s) of each RDEPENDS which must have completed before that task can be executed. It applies recursively so also, the RDEPENDS of each item in the original RDEPENDS must be met and so on. It also runs all DEPENDS first too.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Inter Task</title>
|
||||
<para>The 'depends' flag for tasks is a more generic form of which allows an interdependency on specific tasks rather than specifying the data in DEPENDS or RDEPENDS.</para>
|
||||
<para><screen>do_patch[depends] = "quilt-native:do_populate_staging"</screen></para>
|
||||
<para>means the do_populate_staging task of the target quilt-native must have completed before the do_patch can execute.</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Parsing</title>
|
||||
<section>
|
||||
<title>Configuration Files</title>
|
||||
<para>The first of the classifications of metadata in BitBake is configuration metadata. This metadata is global, and therefore affects <emphasis>all</emphasis> packages and tasks which are executed. Currently, BitBake has hardcoded knowledge of a single configuration file. It expects to find 'conf/bitbake.conf' somewhere in the user specified <envar>BBPATH</envar>. That configuration file generally has include directives to pull in any other metadata (generally files specific to architecture, machine, <emphasis>local</emphasis> and so on.</para>
|
||||
<para>Only variable definitions and include directives are allowed in .conf files.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Classes</title>
|
||||
<para>BitBake classes are our rudamentary inheritence mechanism. As briefly mentioned in the metadata introduction, they're parsed when an <literal>inherit</literal> directive is encountered, and they are located in classes/ relative to the dirs in <envar>BBPATH</envar>.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>.bb Files</title>
|
||||
<para>A BitBake (.bb) file is a logical unit of tasks to be executed. Normally this is a package to be built. Inter-.bb dependencies are obeyed. The files themselves are located via the <varname>BBFILES</varname> variable, which is set to a space seperated list of .bb files, and does handle wildcards.</para>
|
||||
</section>
|
||||
</section>
|
||||
</chapter>
|
||||
|
||||
<chapter>
|
||||
<title>File Download support</title>
|
||||
<section>
|
||||
<title>Overview</title>
|
||||
<para>BitBake provides support to download files this procedure is called fetching. The SRC_URI is normally used to indicate BitBake which files to fetch. The next sections will describe th available fetchers and the options they have. Each Fetcher honors a set of Variables and
|
||||
a per URI parameters separated by a <quote>;</quote> consisting of a key and a value. The semantic of the Variables and Parameters are defined by the Fetcher. BitBakes tries to have a consistent semantic between the different Fetchers.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Local File Fetcher</title>
|
||||
<para>The URN for the Local File Fetcher is <emphasis>file</emphasis>. The filename can be either absolute or relative. If the filename is relative <varname>FILESPATH</varname> and <varname>FILESDIR</varname> will be used to find the appropriate relative file depending on the <varname>OVERRIDES</varname>. Single files and complete directories can be specified.
|
||||
<screen><varname>SRC_URI</varname>= "file://relativefile.patch"
|
||||
<varname>SRC_URI</varname>= "file://relativefile.patch;this=ignored"
|
||||
<varname>SRC_URI</varname>= "file:///Users/ich/very_important_software"
|
||||
</screen>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>CVS File Fetcher</title>
|
||||
<para>The URN for the CVS Fetcher is <emphasis>cvs</emphasis>. This Fetcher honors the variables <varname>DL_DIR</varname>, <varname>SRCDATE</varname>, <varname>FETCHCOMMAND_cvs</varname>, <varname>UPDATECOMMAND_cvs</varname>. <varname>DL_DIRS</varname> specifies where a temporary checkout is saved, <varname>SRCDATE</varname> specifies which date to use when doing the fetching (the special value of "now" will cause the checkout to be updated on every build), <varname>FETCHCOMMAND</varname> and <varname>UPDATECOMMAND</varname> specify which executables should be used when doing the CVS checkout or update.
|
||||
</para>
|
||||
<para>The supported Parameters are <varname>module</varname>, <varname>tag</varname>, <varname>date</varname>, <varname>method</varname>, <varname>localdir</varname>, <varname>rsh</varname>. The <varname>module</varname> specifies which module to check out, the <varname>tag</varname> describes which CVS TAG should be used for the checkout by default the TAG is empty. A <varname>date</varname> can be specified to override the SRCDATE of the configuration to checkout a specific date. The special value of "now" will cause the checkout to be updated on every build.<varname>method</varname> is by default <emphasis>pserver</emphasis>, if <emphasis>ext</emphasis> is used the <varname>rsh</varname> parameter will be evaluated and <varname>CVS_RSH</varname> will be set. Finally <varname>localdir</varname> is used to checkout into a special directory relative to <varname>CVSDIR></varname>.
|
||||
<screen><varname>SRC_URI</varname> = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext"
|
||||
<varname>SRC_URI</varname> = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat"
|
||||
</screen>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>HTTP/FTP Fetcher</title>
|
||||
<para>The URNs for the HTTP/FTP are <emphasis>http</emphasis>, <emphasis>https</emphasis> and <emphasis>ftp</emphasis>. This Fetcher honors the variables <varname>DL_DIR</varname>, <varname>FETCHCOMMAND_wget</varname>, <varname>PREMIRRORS</varname>, <varname>MIRRORS</varname>. The <varname>DL_DIR</varname> defines where to store the fetched file, <varname>FETCHCOMMAND</varname> contains the command used for fetching. <quote>${URI}</quote> and <quote>${FILES}</quote> will be replaced by the uri and basename of the to be fetched file. <varname>PREMIRRORS</varname>
|
||||
will be tried first when fetching a file if that fails the actual file will be tried and finally all <varname>MIRRORS</varname> will be tried.
|
||||
</para>
|
||||
<para>The only supported Parameter is <varname>md5sum</varname>. After a fetch the <varname>md5sum</varname> of the file will be calculated and the two sums will be compared.
|
||||
</para>
|
||||
<para><screen><varname>SRC_URI</varname> = "http://oe.handhelds.org/not_there.aac;md5sum=12343"
|
||||
<varname>SRC_URI</varname> = "ftp://oe.handhelds.org/not_there_as_well.aac;md5sum=1234"
|
||||
<varname>SRC_URI</varname> = "ftp://you@oe.handheld.sorg/home/you/secret.plan;md5sum=1234"
|
||||
</screen></para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>SVK Fetcher</title>
|
||||
<para>
|
||||
<emphasis>Currently NOT supported</emphasis>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>SVN Fetcher</title>
|
||||
<para>The URN for the SVN Fetcher is <emphasis>svn</emphasis>.
|
||||
</para>
|
||||
<para>This Fetcher honors the variables <varname>FETCHCOMMAND_svn</varname>, <varname>DL_DIR</varname>, <varname>SRCDATE</varname>. <varname>FETCHCOMMAND</varname> contains the subversion command, <varname>DL_DIR</varname> is the directory where tarballs will be saved, <varname>SRCDATE</varname> specifies which date to use when doing the fetching (the special value of "now" will cause the checkout to be updated on every build).
|
||||
</para>
|
||||
<para>The supported Parameters are <varname>proto</varname>, <varname>rev</varname>. <varname>proto</varname> is the subversion prototype, <varname>rev</varname> is the subversions revision.
|
||||
</para>
|
||||
<para><screen><varname>SRC_URI</varname> = "svn://svn.oe.handhelds.org/svn;module=vip;proto=http;rev=667"
|
||||
<varname>SRC_URI</varname> = "svn://svn.oe.handhelds.org/svn/;module=opie;proto=svn+ssh;date=20060126"
|
||||
</screen></para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>GIT Fetcher</title>
|
||||
<para>The URN for the GIT Fetcher is <emphasis>git</emphasis>.
|
||||
</para>
|
||||
<para>The Variables <varname>DL_DIR</varname>, <varname>GITDIR</varname> are used. <varname>DL_DIR</varname> will be used to store the checkedout version. <varname>GITDIR</varname> will be used as the base directory where the git tree is cloned to.
|
||||
</para>
|
||||
<para>The Parameters are <emphasis>tag</emphasis>, <emphasis>protocol</emphasis>. <emphasis>tag</emphasis> is a git tag, the default is <quote>master</quote>. <emphasis>protocol</emphasis> is the git protocol to use and defaults to <quote>rsync</quote>.
|
||||
</para>
|
||||
<para><screen><varname>SRC_URI</varname> = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
|
||||
<varname>SRC_URI</varname> = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
|
||||
</screen></para>
|
||||
</section>
|
||||
|
||||
</chapter>
|
||||
|
||||
|
||||
<chapter>
|
||||
<title>Commands</title>
|
||||
<section>
|
||||
<title>bbread</title>
|
||||
<para>bbread is a command for displaying BitBake metadata. When run with no arguments, it has the core parse 'conf/bitbake.conf', as located in BBPATH, and displays that. If you supply a file on the commandline, such as a .bb, then it parses that afterwards, using the aforementioned configuration metadata.</para>
|
||||
<para><emphasis>NOTE: the stand a lone bbread command was removed. Instead of bbread use bitbake -e.
|
||||
</emphasis></para>
|
||||
</section>
|
||||
<section>
|
||||
<title>bitbake</title>
|
||||
<section>
|
||||
<title>Introduction</title>
|
||||
<para>bitbake is the primary command in the system. It facilitates executing tasks in a single .bb file, or executing a given task on a set of multiple .bb files, accounting for interdependencies amongst them.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Usage and Syntax</title>
|
||||
<para>
|
||||
<screen><prompt>$ </prompt>bitbake --help
|
||||
usage: bitbake [options] [package ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of BitBake files.
|
||||
It expects that BBFILES is defined, which is a space seperated list of files to
|
||||
be executed. BBFILES does support wildcards.
|
||||
Default BBFILES are the .bb files in the current directory.
|
||||
|
||||
options:
|
||||
--version show program's version number and exit
|
||||
-h, --help show this help message and exit
|
||||
-b BUILDFILE, --buildfile=BUILDFILE
|
||||
execute the task against this .bb file, rather than a
|
||||
package from BBFILES.
|
||||
-k, --continue continue as much as possible after an error. While the
|
||||
target that failed, and those that depend on it,
|
||||
cannot be remade, the other dependencies of these
|
||||
targets can be processed all the same.
|
||||
-f, --force force run of specified cmd, regardless of stamp status
|
||||
-i, --interactive drop into the interactive mode also called the BitBake
|
||||
shell.
|
||||
-c CMD, --cmd=CMD Specify task to execute. Note that this only executes
|
||||
the specified task for the providee and the packages
|
||||
it depends on, i.e. 'compile' does not implicitly call
|
||||
stage for the dependencies (IOW: use only if you know
|
||||
what you are doing). Depending on the base.bbclass a
|
||||
listtasks tasks is defined and will show available
|
||||
tasks
|
||||
-r FILE, --read=FILE read the specified file before bitbake.conf
|
||||
-v, --verbose output more chit-chat to the terminal
|
||||
-D, --debug Increase the debug level. You can specify this more
|
||||
than once.
|
||||
-n, --dry-run don't execute, just go through the motions
|
||||
-p, --parse-only quit after parsing the BB files (developers only)
|
||||
-d, --disable-psyco disable using the psyco just-in-time compiler (not
|
||||
recommended)
|
||||
-s, --show-versions show current and preferred versions of all packages
|
||||
-e, --environment show the global or per-package environment (this is
|
||||
what used to be bbread)
|
||||
-g, --graphviz emit the dependency trees of the specified packages in
|
||||
the dot syntax
|
||||
-I IGNORED_DOT_DEPS, --ignore-deps=IGNORED_DOT_DEPS
|
||||
Stop processing at the given list of dependencies when
|
||||
generating dependency graphs. This can help to make
|
||||
the graph more appealing
|
||||
-l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
|
||||
Show debug logging for the specified logging domains
|
||||
-P, --profile profile the command and print a report
|
||||
|
||||
</screen>
|
||||
</para>
|
||||
<para>
|
||||
<example>
|
||||
<title>Executing a task against a single .bb</title>
|
||||
<para>Executing tasks for a single file is relatively simple. You specify the file in question, and bitbake parses it and executes the specified task (or <quote>build</quote> by default). It obeys intertask dependencies when doing so.</para>
|
||||
<para><quote>clean</quote> task:</para>
|
||||
<para><screen><prompt>$ </prompt>bitbake -b blah_1.0.bb -c clean</screen></para>
|
||||
<para><quote>build</quote> task:</para>
|
||||
<para><screen><prompt>$ </prompt>bitbake -b blah_1.0.bb</screen></para>
|
||||
</example>
|
||||
</para>
|
||||
<para>
|
||||
<example>
|
||||
<title>Executing tasks against a set of .bb files</title>
|
||||
<para>There are a number of additional complexities introduced when one wants to manage multiple .bb files. Clearly there needs to be a way to tell bitbake what files are available, and of those, which we want to execute at this time. There also needs to be a way for each .bb to express its dependencies, both for build time and runtime. There must be a way for the user to express their preferences when multiple .bb's provide the same functionality, or when there are multiple versions of a .bb.</para>
|
||||
<para>The next section, Metadata, outlines how one goes about specifying such things.</para>
|
||||
<para>Note that the bitbake command, when not using --buildfile, accepts a <varname>PROVIDER</varname>, not a filename or anything else. By default, a .bb generally PROVIDES its packagename, packagename-version, and packagename-version-revision.</para>
|
||||
<screen><prompt>$ </prompt>bitbake blah</screen>
|
||||
<screen><prompt>$ </prompt>bitbake blah-1.0</screen>
|
||||
<screen><prompt>$ </prompt>bitbake blah-1.0-r0</screen>
|
||||
<screen><prompt>$ </prompt>bitbake -c clean blah</screen>
|
||||
<screen><prompt>$ </prompt>bitbake virtual/whatever</screen>
|
||||
<screen><prompt>$ </prompt>bitbake -c clean virtual/whatever</screen>
|
||||
</example>
|
||||
<example>
|
||||
<title>Generating dependency graphs</title>
|
||||
<para>BitBake is able to generate dependency graphs using the dot syntax. These graphs can be converted
|
||||
to images using the <application>dot</application> application from <ulink url="http://www.graphviz.org">graphviz</ulink>.
|
||||
Two files will be written into the current working directory, <emphasis>depends.dot</emphasis> containing dependency information at the package level and <emphasis>task-depends.dot</emphasis> containing a breakdown of the dependencies at the task level. To stop depending on common depends one can use the <prompt>-I depend</prompt> to omit these from the graph. This can lead to more readable graphs. E.g. this way <varname>DEPENDS</varname> from inherited classes, e.g. base.bbclass, can be removed from the graph.</para>
|
||||
<screen><prompt>$ </prompt>bitbake -g blah</screen>
|
||||
<screen><prompt>$ </prompt>bitbake -g -I virtual/whatever -I bloom blah</screen>
|
||||
</example>
|
||||
</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Special variables</title>
|
||||
<para>Certain variables affect bitbake operation:</para>
|
||||
<section>
|
||||
<title><varname>BB_NUMBER_THREADS</varname></title>
|
||||
<para> The number of threads bitbake should run at once (default: 1).</para>
|
||||
</section>
|
||||
</section>
|
||||
<section>
|
||||
<title>Metadata</title>
|
||||
<para>As you may have seen in the usage information, or in the information about .bb files, the BBFILES variable is how the bitbake tool locates its files. This variable is a space seperated list of files that are available, and supports wildcards.
|
||||
<example>
|
||||
<title>Setting BBFILES</title>
|
||||
<programlisting><varname>BBFILES</varname> = "/path/to/bbfiles/*.bb"</programlisting>
|
||||
</example></para>
|
||||
<para>With regard to dependencies, it expects the .bb to define a <varname>DEPENDS</varname> variable, which contains a space seperated list of <quote>package names</quote>, which themselves are the <varname>PN</varname> variable. The <varname>PN</varname> variable is, in general, by default, set to a component of the .bb filename.</para>
|
||||
<example>
|
||||
<title>Depending on another .bb</title>
|
||||
<para>a.bb:
|
||||
<screen>PN = "package-a"
|
||||
DEPENDS += "package-b"</screen>
|
||||
</para>
|
||||
<para>b.bb:
|
||||
<screen>PN = "package-b"</screen>
|
||||
</para>
|
||||
</example>
|
||||
<example>
|
||||
<title>Using PROVIDES</title>
|
||||
<para>This example shows the usage of the PROVIDES variable, which allows a given .bb to specify what functionality it provides.</para>
|
||||
<para>package1.bb:
|
||||
<screen>PROVIDES += "virtual/package"</screen>
|
||||
</para>
|
||||
<para>package2.bb:
|
||||
<screen>DEPENDS += "virtual/package"</screen>
|
||||
</para>
|
||||
<para>package3.bb:
|
||||
<screen>PROVIDES += "virtual/package"</screen>
|
||||
</para>
|
||||
<para>As you can see, here there are two different .bb's that provide the same functionality (virtual/package). Clearly, there needs to be a way for the person running bitbake to control which of those providers gets used. There is, indeed, such a way.</para>
|
||||
<para>The following would go into a .conf file, to select package1:
|
||||
<screen>PREFERRED_PROVIDER_virtual/package = "package1"</screen>
|
||||
</para>
|
||||
</example>
|
||||
<example>
|
||||
<title>Specifying version preference</title>
|
||||
<para>When there are multiple <quote>versions</quote> of a given package, bitbake defaults to selecting the most recent version, unless otherwise specified. If the .bb in question has a <varname>DEFAULT_PREFERENCE</varname> set lower than the other .bb's (default is 0), then it will not be selected. This allows the person or persons maintaining the repository of .bb files to specify their preferences for the default selected version. In addition, the user can specify their preferences with regard to version.</para>
|
||||
<para>If the first .bb is named <filename>a_1.1.bb</filename>, then the <varname>PN</varname> variable will be set to <quote>a</quote>, and the <varname>PV</varname> variable will be set to 1.1.</para>
|
||||
<para>If we then have an <filename>a_1.2.bb</filename>, bitbake will choose 1.2 by default. However, if we define the following variable in a .conf that bitbake parses, we can change that.
|
||||
<screen>PREFERRED_VERSION_a = "1.1"</screen>
|
||||
</para>
|
||||
</example>
|
||||
<example>
|
||||
<title>Using <quote>bbfile collections</quote></title>
|
||||
<para>bbfile collections exist to allow the user to have multiple repositories of bbfiles that contain the same exact package. For example, one could easily use them to make one's own local copy of an upstream repository, but with custom modifications that one does not want upstream. Usage:</para>
|
||||
<screen>BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb"
|
||||
BBFILE_COLLECTIONS = "upstream local"
|
||||
BBFILE_PATTERN_upstream = "^/stuff/openembedded/"
|
||||
BBFILE_PATTERN_local = "^/stuff/openembedded.modified/"
|
||||
BBFILE_PRIORITY_upstream = "5"
|
||||
BBFILE_PRIORITY_local = "10"</screen>
|
||||
</example>
|
||||
</section>
|
||||
</section>
|
||||
</chapter>
|
||||
</book>
|
||||
@@ -1,59 +0,0 @@
|
||||
<!ENTITY DISTRO "1.4">
|
||||
<!ENTITY DISTRO_NAME "tbd">
|
||||
<!ENTITY YOCTO_DOC_VERSION "1.4">
|
||||
<!ENTITY POKYVERSION "8.0">
|
||||
<!ENTITY YOCTO_POKY "poky-&DISTRO_NAME;-&POKYVERSION;">
|
||||
<!ENTITY COPYRIGHT_YEAR "2010-2013">
|
||||
<!ENTITY YOCTO_DL_URL "http://downloads.yoctoproject.org">
|
||||
<!ENTITY YOCTO_HOME_URL "http://www.yoctoproject.org">
|
||||
<!ENTITY YOCTO_LISTS_URL "http://lists.yoctoproject.org">
|
||||
<!ENTITY YOCTO_BUGZILLA_URL "http://bugzilla.yoctoproject.org">
|
||||
<!ENTITY YOCTO_WIKI_URL "https://wiki.yoctoproject.org">
|
||||
<!ENTITY YOCTO_AB_URL "http://autobuilder.yoctoproject.org">
|
||||
<!ENTITY YOCTO_GIT_URL "http://git.yoctoproject.org">
|
||||
<!ENTITY YOCTO_ADTREPO_URL "http://adtrepo.yoctoproject.org">
|
||||
<!ENTITY OE_HOME_URL "http://www.openembedded.org">
|
||||
<!ENTITY OE_LISTS_URL "http://lists.linuxtogo.org/cgi-bin/mailman">
|
||||
<!ENTITY OE_DOCS_URL "http://docs.openembedded.org">
|
||||
<!ENTITY OH_HOME_URL "http://o-hand.com">
|
||||
<!ENTITY BITBAKE_HOME_URL "http://developer.berlios.de/projects/bitbake/">
|
||||
<!ENTITY ECLIPSE_MAIN_URL "http://www.eclipse.org/downloads">
|
||||
<!ENTITY ECLIPSE_DL_URL "http://download.eclipse.org">
|
||||
<!ENTITY ECLIPSE_DL_PLUGIN_URL "&YOCTO_DL_URL;/releases/eclipse-plugin/&DISTRO;">
|
||||
<!ENTITY ECLIPSE_UPDATES_URL "&ECLIPSE_DL_URL;/tm/updates/3.3">
|
||||
<!ENTITY ECLIPSE_INDIGO_URL "&ECLIPSE_DL_URL;/releases/indigo">
|
||||
<!ENTITY ECLIPSE_JUNO_URL "&ECLIPSE_DL_URL;/releases/juno">
|
||||
<!ENTITY ECLIPSE_INDIGO_CDT_URL "&ECLIPSE_DL_URL;tools/cdt/releases/indigo">
|
||||
<!ENTITY YOCTO_DOCS_URL "&YOCTO_HOME_URL;/docs">
|
||||
<!ENTITY YOCTO_SOURCES_URL "&YOCTO_HOME_URL;/sources/">
|
||||
<!ENTITY YOCTO_AB_PORT_URL "&YOCTO_AB_URL;:8010">
|
||||
<!ENTITY YOCTO_AB_NIGHTLY_URL "&YOCTO_AB_URL;/nightly/">
|
||||
<!ENTITY YOCTO_POKY_URL "&YOCTO_DL_URL;/releases/poky/">
|
||||
<!ENTITY YOCTO_RELEASE_DL_URL "&YOCTO_DL_URL;/releases/yocto/yocto-&DISTRO;">
|
||||
<!ENTITY YOCTO_TOOLCHAIN_DL_URL "&YOCTO_RELEASE_DL_URL;/toolchain/">
|
||||
<!ENTITY YOCTO_ECLIPSE_DL_URL "&YOCTO_RELEASE_DL_URL;/eclipse-plugin/indigo;">
|
||||
<!ENTITY YOCTO_ADTINSTALLER_DL_URL "&YOCTO_RELEASE_DL_URL;/adt_installer">
|
||||
<!ENTITY YOCTO_POKY_DL_URL "&YOCTO_RELEASE_DL_URL;/&YOCTO_POKY;.tar.bz2">
|
||||
<!ENTITY YOCTO_MACHINES_DL_URL "&YOCTO_RELEASE_DL_URL;/machines">
|
||||
<!ENTITY YOCTO_QEMU_DL_URL "&YOCTO_MACHINES_DL_URL;/qemu">
|
||||
<!ENTITY YOCTO_PYTHON-i686_DL_URL "&YOCTO_DL_URL;/releases/miscsupport/python-nativesdk-standalone-i686.tar.bz2">
|
||||
<!ENTITY YOCTO_PYTHON-x86_64_DL_URL "&YOCTO_DL_URL;/releases/miscsupport/python-nativesdk-standalone-x86_64.tar.bz2">
|
||||
<!ENTITY YOCTO_DOCS_QS_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/yocto-project-qs/yocto-project-qs.html">
|
||||
<!ENTITY YOCTO_DOCS_ADT_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/adt-manual/adt-manual.html">
|
||||
<!ENTITY YOCTO_DOCS_REF_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/ref-manual/ref-manual.html">
|
||||
<!ENTITY YOCTO_DOCS_BSP_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/bsp-guide/bsp-guide.html">
|
||||
<!ENTITY YOCTO_DOCS_DEV_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/dev-manual/dev-manual.html">
|
||||
<!ENTITY YOCTO_DOCS_KERNEL_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/kernel-manual/kernel-manual.html">
|
||||
<!ENTITY YOCTO_ADTPATH_DIR "/opt/poky/&DISTRO;">
|
||||
<!ENTITY YOCTO_POKY_TARBALL "&YOCTO_POKY;.tar.bz2">
|
||||
<!ENTITY OE_INIT_PATH "&YOCTO_POKY;/oe-init-build-env">
|
||||
<!ENTITY OE_INIT_FILE "oe-init-build-env">
|
||||
<!ENTITY UBUNTU_HOST_PACKAGES_ESSENTIAL "gawk wget git-core diffstat unzip texinfo \
|
||||
build-essential chrpath">
|
||||
<!ENTITY FEDORA_HOST_PACKAGES_ESSENTIAL "gawk make wget tar bzip2 gzip python unzip perl patch \
|
||||
diffutils diffstat git cpp gcc gcc-c++ eglibc-devel texinfo chrpath \
|
||||
ccache">
|
||||
<!ENTITY OPENSUSE_HOST_PACKAGES_ESSENTIAL "python gcc gcc-c++ git chrpath make wget python-xml \
|
||||
diffstat texinfo python-curses">
|
||||
<!ENTITY CENTOS_HOST_PACKAGES_ESSENTIAL "gawk make wget tar bzip2 gzip python unzip perl patch \
|
||||
diffutils diffstat git cpp gcc gcc-c++ glibc-devel texinfo chrpath">
|
||||
39
bitbake/doc/template/component.title.xsl
vendored
39
bitbake/doc/template/component.title.xsl
vendored
@@ -1,39 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="component.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="ancestor::d:section">
|
||||
<xsl:value-of select="count(ancestor::d:section)+1"/>
|
||||
</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect5">6</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect4">5</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect3">4</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect2">3</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect1">2</xsl:when>
|
||||
<xsl:otherwise>1</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
<xsl:element name="h{$level+1}" namespace="http://www.w3.org/1999/xhtml">
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:if test="$generate.id.attributes = 0">
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:element>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
64
bitbake/doc/template/db-pdf.xsl
vendored
64
bitbake/doc/template/db-pdf.xsl
vendored
@@ -1,64 +0,0 @@
|
||||
<?xml version='1.0'?>
|
||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/fo/docbook.xsl" />
|
||||
|
||||
<!-- check project-plan.sh for how this is generated, needed to tweak
|
||||
the cover page
|
||||
-->
|
||||
<xsl:include href="/tmp/titlepage.xsl"/>
|
||||
|
||||
<!-- To force a page break in document, i.e per section add a
|
||||
<?hard-pagebreak?> tag.
|
||||
-->
|
||||
<xsl:template match="processing-instruction('hard-pagebreak')">
|
||||
<fo:block break-before='page' />
|
||||
</xsl:template>
|
||||
|
||||
<!--Fix for defualt indent getting TOC all wierd..
|
||||
See http://sources.redhat.com/ml/docbook-apps/2005-q1/msg00455.html
|
||||
FIXME: must be a better fix
|
||||
-->
|
||||
<xsl:param name="body.start.indent" select="'0'"/>
|
||||
<!--<xsl:param name="title.margin.left" select="'0'"/>-->
|
||||
|
||||
<!-- stop long-ish header titles getting wrapped -->
|
||||
<xsl:param name="header.column.widths">1 10 1</xsl:param>
|
||||
|
||||
<!-- customise headers and footers a little -->
|
||||
|
||||
<xsl:template name="head.sep.rule">
|
||||
<xsl:if test="$header.rule != 0">
|
||||
<xsl:attribute name="border-bottom-width">0.5pt</xsl:attribute>
|
||||
<xsl:attribute name="border-bottom-style">solid</xsl:attribute>
|
||||
<xsl:attribute name="border-bottom-color">#cccccc</xsl:attribute>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:template name="foot.sep.rule">
|
||||
<xsl:if test="$footer.rule != 0">
|
||||
<xsl:attribute name="border-top-width">0.5pt</xsl:attribute>
|
||||
<xsl:attribute name="border-top-style">solid</xsl:attribute>
|
||||
<xsl:attribute name="border-top-color">#cccccc</xsl:attribute>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:attribute-set name="header.content.properties">
|
||||
<xsl:attribute name="color">#cccccc</xsl:attribute>
|
||||
</xsl:attribute-set>
|
||||
|
||||
<xsl:attribute-set name="footer.content.properties">
|
||||
<xsl:attribute name="color">#cccccc</xsl:attribute>
|
||||
</xsl:attribute-set>
|
||||
|
||||
|
||||
<!-- general settings -->
|
||||
|
||||
<xsl:param name="fop1.extensions" select="1"></xsl:param>
|
||||
<xsl:param name="paper.type" select="'A4'"></xsl:param>
|
||||
<xsl:param name="section.autolabel" select="1"></xsl:param>
|
||||
<xsl:param name="body.font.family" select="'verasans'"></xsl:param>
|
||||
<xsl:param name="title.font.family" select="'verasans'"></xsl:param>
|
||||
<xsl:param name="monospace.font.family" select="'veramono'"></xsl:param>
|
||||
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/division.title.xsl
vendored
25
bitbake/doc/template/division.title.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="division.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<h1>
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</h1>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
|
||||
58
bitbake/doc/template/fop-config.xml
vendored
58
bitbake/doc/template/fop-config.xml
vendored
@@ -1,58 +0,0 @@
|
||||
<fop version="1.0">
|
||||
|
||||
<!-- Strict user configuration -->
|
||||
<strict-configuration>true</strict-configuration>
|
||||
|
||||
<!-- Strict FO validation -->
|
||||
<strict-validation>true</strict-validation>
|
||||
|
||||
<!--
|
||||
Set the baseDir so common/openedhand.svg references in plans still
|
||||
work ok. Note, relative file references to current dir should still work.
|
||||
-->
|
||||
<base>../template</base>
|
||||
<font-base>../template</font-base>
|
||||
|
||||
<!-- Source resolution in dpi (dots/pixels per inch) for determining the
|
||||
size of pixels in SVG and bitmap images, default: 72dpi -->
|
||||
<!-- <source-resolution>72</source-resolution> -->
|
||||
<!-- Target resolution in dpi (dots/pixels per inch) for specifying the
|
||||
target resolution for generated bitmaps, default: 72dpi -->
|
||||
<!-- <target-resolution>72</target-resolution> -->
|
||||
|
||||
<!-- default page-height and page-width, in case
|
||||
value is specified as auto -->
|
||||
<default-page-settings height="11in" width="8.26in"/>
|
||||
|
||||
<!-- <use-cache>false</use-cache> -->
|
||||
|
||||
<renderers>
|
||||
<renderer mime="application/pdf">
|
||||
<fonts>
|
||||
<font metrics-file="VeraMono.xml"
|
||||
kerning="yes"
|
||||
embed-url="VeraMono.ttf">
|
||||
<font-triplet name="veramono" style="normal" weight="normal"/>
|
||||
</font>
|
||||
|
||||
<font metrics-file="VeraMoBd.xml"
|
||||
kerning="yes"
|
||||
embed-url="VeraMoBd.ttf">
|
||||
<font-triplet name="veramono" style="normal" weight="bold"/>
|
||||
</font>
|
||||
|
||||
<font metrics-file="Vera.xml"
|
||||
kerning="yes"
|
||||
embed-url="Vera.ttf">
|
||||
<font-triplet name="verasans" style="normal" weight="normal"/>
|
||||
<font-triplet name="verasans" style="normal" weight="bold"/>
|
||||
<font-triplet name="verasans" style="italic" weight="normal"/>
|
||||
<font-triplet name="verasans" style="italic" weight="bold"/>
|
||||
</font>
|
||||
|
||||
<auto-detect/>
|
||||
</fonts>
|
||||
</renderer>
|
||||
</renderers>
|
||||
</fop>
|
||||
|
||||
21
bitbake/doc/template/formal.object.heading.xsl
vendored
21
bitbake/doc/template/formal.object.heading.xsl
vendored
@@ -1,21 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="formal.object.heading">
|
||||
<xsl:param name="object" select="."/>
|
||||
<xsl:param name="title">
|
||||
<xsl:apply-templates select="$object" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
</xsl:param>
|
||||
<p class="title">
|
||||
<b><xsl:copy-of select="$title"/></b>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$object"/>
|
||||
</xsl:call-template>
|
||||
</p>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
@@ -1,14 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<xsl:template match="glossentry/glossterm">
|
||||
<xsl:apply-imports/>
|
||||
<xsl:if test="$generate.permalink != 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select=".."/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user