From 972dcfcdbfe75dcfeb777150c136576cf1a71e99 Mon Sep 17 00:00:00 2001 From: Tudor Florea Date: Fri, 9 Oct 2015 22:59:03 +0200 Subject: initial commit for Enea Linux 5.0 arm Signed-off-by: Tudor Florea diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..06df61b --- /dev/null +++ b/.gitignore @@ -0,0 +1,23 @@ +*.pyc +*.pyo +/*.patch +build*/ +pyshtables.py +pstage/ +scripts/oe-git-proxy-socks +sources/ +meta-*/ +!meta-skeleton +!meta-hob +hob-image-*.bb +*.swp +*.orig +*.rej +*~ +!meta-yocto +!meta-yocto-bsp +!meta-yocto-imported +documentation/user-manual/user-manual.html +documentation/user-manual/user-manual.pdf +documentation/user-manual/user-manual.tgz +pull-*/ diff --git a/.templateconf b/.templateconf new file mode 100644 index 0000000..0650a46 --- /dev/null +++ b/.templateconf @@ -0,0 +1,2 @@ +# Template settings +TEMPLATECONF=${TEMPLATECONF:-meta-yocto/conf} diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..21fa6e6 --- /dev/null +++ b/LICENSE @@ -0,0 +1,14 @@ +Different components of OpenEmbedded are under different licenses (a mix +of MIT and GPLv2). Please see: + +meta/COPYING.GPLv2 (GPLv2) +meta/COPYING.MIT (MIT) +meta-selftest/COPYING.MIT (MIT) +meta-skeleton/COPYING.MIT (MIT) + +All metadata is MIT licensed unless otherwise stated. Source code +included in tree for individual recipes is under the LICENSE stated in +the associated recipe (.bb file) unless otherwise stated. + +License information for any other files is either explicitly stated +or defaults to GPL version 2. diff --git a/README b/README new file mode 100644 index 0000000..0a18c9c --- /dev/null +++ b/README @@ -0,0 +1,49 @@ +Poky +==== + +Poky is an integration of various components to form a complete prepackaged +build system and development environment. It features support for building +customised embedded device style images. There are reference demo images +featuring a X11/Matchbox/GTK themed UI called Sato. The system supports +cross-architecture application development using QEMU emulation and a +standalone toolchain and SDK with IDE integration. + +Additional information on the specifics of hardware that Poky supports +is available in README.hardware. Further hardware support can easily be added +in the form of layers which extend the systems capabilities in a modular way. + +As an integration layer Poky consists of several upstream projects such as +BitBake, OpenEmbedded-Core, Yocto documentation and various sources of information +e.g. for the hardware support. Poky is in turn a component of the Yocto Project. + +The Yocto Project has extensive documentation about the system including a +reference manual which can be found at: + http://yoctoproject.org/documentation + +OpenEmbedded-Core is a layer containing the core metadata for current versions +of OpenEmbedded. It is distro-less (can build a functional image with +DISTRO = "nodistro") and contains only emulated machine support. + +For information about OpenEmbedded, see the OpenEmbedded website: + http://www.openembedded.org/ + +Where to Send Patches +===================== + +As Poky is an integration repository, patches against the various components +should be sent to their respective upstreams. + +bitbake: + bitbake-devel@lists.openembedded.org + +meta-yocto: + poky@yoctoproject.org + +Most everything else should be sent to the OpenEmbedded Core mailing list. If +in doubt, check the oe-core git repository for the content you intend to modify. +Before sending, be sure the patches apply cleanly to the current oe-core git +repository. + openembedded-core@lists.openembedded.org + +Note: The scripts directory should be treated with extra care as it is a mix + of oe-core and poky-specific files. diff --git a/README.hardware b/README.hardware new file mode 100644 index 0000000..d8faaa3 --- /dev/null +++ b/README.hardware @@ -0,0 +1,499 @@ + Poky Hardware README + ==================== + +This file gives details about using Poky with the reference machines +supported out of the box. A full list of supported reference target machines +can be found by looking in the following directories: + + meta/conf/machine/ + meta-yocto-bsp/conf/machine/ + +If you are in doubt about using Poky/OpenEmbedded with your hardware, consult +the documentation for your board/device. + +Support for additional devices is normally added by creating BSP layers - for +more information please see the Yocto Board Support Package (BSP) Developer's +Guide - documentation source is in documentation/bspguide or download the PDF +from: + + http://yoctoproject.org/documentation + +Support for physical reference hardware has now been split out into a +meta-yocto-bsp layer which can be removed separately from other layers if not +needed. + + +QEMU Emulation Targets +====================== + +To simplify development, the build system supports building images to +work with the QEMU emulator in system emulation mode. Several architectures +are currently supported: + + * ARM (qemuarm) + * x86 (qemux86) + * x86-64 (qemux86-64) + * PowerPC (qemuppc) + * MIPS (qemumips) + +Use of the QEMU images is covered in the Yocto Project Reference Manual. +The appropriate MACHINE variable value corresponding to the target is given +in brackets. + + +Hardware Reference Boards +========================= + +The following boards are supported by the meta-yocto-bsp layer: + + * Texas Instruments Beaglebone (beaglebone) + * Freescale MPC8315E-RDB (mpc8315e-rdb) + +For more information see the board's section below. The appropriate MACHINE +variable value corresponding to the board is given in brackets. + +Reference Board Maintenance +=========================== + +Send pull requests, patches, comments or questions about meta-yocto-bsps to poky@yoctoproject.org + +Maintainers: Kevin Hao + Bruce Ashfield + +Consumer Devices +================ + +The following consumer devices are supported by the meta-yocto-bsp layer: + + * Intel x86 based PCs and devices (genericx86) + * Ubiquiti Networks EdgeRouter Lite (edgerouter) + +For more information see the device's section below. The appropriate MACHINE +variable value corresponding to the device is given in brackets. + + + + Specific Hardware Documentation + =============================== + + +Intel x86 based PCs and devices (genericx86) +========================================== + +The genericx86 MACHINE is tested on the following platforms: + +Intel Xeon/Core i-Series: + + Intel Romley Server: Sandy Bridge Xeon processor, C600 PCH (Patsburg), (Canoe Pass CRB) + + Intel Romley Server: Ivy Bridge Xeon processor, C600 PCH (Patsburg), (Intel SDP S2R3) + + Intel Crystal Forest Server: Sandy Bridge Xeon processor, DH89xx PCH (Cave Creek), (Stargo CRB) + + Intel Chief River Mobile: Ivy Bridge Mobile processor, QM77 PCH (Panther Point-M), (Emerald Lake II CRB, Sabino Canyon CRB) + + Intel Huron River Mobile: Sandy Bridge processor, QM67 PCH (Cougar Point), (Emerald Lake CRB, EVOC EC7-1817LNAR board) + + Intel Calpella Platform: Core i7 processor, QM57 PCH (Ibex Peak-M), (Red Fort CRB, Emerson MATXM CORE-411-B) + + Intel Nehalem/Westmere-EP Server: Xeon 56xx/55xx processors, 5520 chipset, ICH10R IOH (82801), (Hanlan Creek CRB) + + Intel Nehalem Workstation: Xeon 56xx/55xx processors, System SC5650SCWS (Greencity CRB) + + Intel Picket Post Server: Xeon 56xx/55xx processors (Jasper Forest), 3420 chipset (Ibex Peak), (Osage CRB) + + Intel Storage Platform: Sandy Bridge Xeon processor, C600 PCH (Patsburg), (Oak Creek Canyon CRB) + + Intel Shark Bay Client Platform: Haswell processor, LynxPoint PCH, (Walnut Canyon CRB, Lava Canyon CRB, Basking Ridge CRB, Flathead Creek CRB) + + Intel Shark Bay Ultrabook Platform: Haswell ULT processor, Lynx Point-LP PCH, (WhiteTip Mountain 1 CRB) + +Intel Atom platforms: + + Intel embedded Menlow: Intel Atom Z510/530 CPU, System Controller Hub US15W (Portwell NANO-8044) + + Intel Luna Pier: Intel Atom N4xx/D5xx series CPU (aka: Pineview-D & -M), 82801HM I/O Hub (ICH8M), (Advantech AIMB-212, Moon Creek CRB) + + Intel Queens Bay platform: Intel Atom E6xx CPU (aka: Tunnel Creek), Topcliff EG20T I/O Hub (Emerson NITX-315, Crown Bay CRB, Minnow Board) + + Intel Fish River Island platform: Intel Atom E6xx CPU (aka: Tunnel Creek), Topcliff EG20T I/O Hub (Kontron KM2M806) + + Intel Cedar Trail platform: Intel Atom N2000 & D2000 series CPU (aka: Cedarview), NM10 Express Chipset (Norco kit BIS-6630, Cedar Rock CRB) + +and is likely to work on many unlisted Atom/Core/Xeon based devices. The MACHINE +type supports ethernet, wifi, sound, and Intel/vesa graphics by default in +addition to common PC input devices, busses, and so on. Note that it does not +included the binary-only graphic drivers used on some Atom platforms, for +accelerated graphics on these machines please refer to meta-intel. + +Depending on the device, it can boot from a traditional hard-disk, a USB device, +or over the network. Writing generated images to physical media is +straightforward with a caveat for USB devices. The following examples assume the +target boot device is /dev/sdb, be sure to verify this and use the correct +device as the following commands are run as root and are not reversable. + +USB Device: + 1. Build a live image. This image type consists of a simple filesystem + without a partition table, which is suitable for USB keys, and with the + default setup for the genericx86 machine, this image type is built + automatically for any image you build. For example: + + $ bitbake core-image-minimal + + 2. Use the "dd" utility to write the image to the raw block device. For + example: + + # dd if=core-image-minimal-genericx86.hddimg of=/dev/sdb + + If the device fails to boot with "Boot error" displayed, or apparently + stops just after the SYSLINUX version banner, it is likely the BIOS cannot + understand the physical layout of the disk (or rather it expects a + particular layout and cannot handle anything else). There are two possible + solutions to this problem: + + 1. Change the BIOS USB Device setting to HDD mode. The label will vary by + device, but the idea is to force BIOS to read the Cylinder/Head/Sector + geometry from the device. + + 2. Without such an option, the BIOS generally boots the device in USB-ZIP + mode. To write an image to a USB device that will be bootable in + USB-ZIP mode, carry out the following actions: + + a. Determine the geometry of your USB device using fdisk: + + # fdisk /dev/sdb + Command (m for help): p + + Disk /dev/sdb: 4011 MB, 4011491328 bytes + 124 heads, 62 sectors/track, 1019 cylinders, total 7834944 sectors + ... + + Command (m for help): q + + b. Configure the USB device for USB-ZIP mode: + + # mkdiskimage -4 /dev/sdb 1019 124 62 + + Where 1019, 124 and 62 are the cylinder, head and sectors/track counts + as reported by fdisk (substitute the values reported for your device). + When the operation has finished and the access LED (if any) on the + device stops flashing, remove and reinsert the device to allow the + kernel to detect the new partition layout. + + c. Copy the contents of the image to the USB-ZIP mode device: + + # mkdir /tmp/image + # mkdir /tmp/usbkey + # mount -o loop core-image-minimal-genericx86.hddimg /tmp/image + # mount /dev/sdb4 /tmp/usbkey + # cp -rf /tmp/image/* /tmp/usbkey + + d. Install the syslinux boot loader: + + # syslinux /dev/sdb4 + + e. Unmount everything: + + # umount /tmp/image + # umount /tmp/usbkey + + Install the boot device in the target board and configure the BIOS to boot + from it. + + For more details on the USB-ZIP scenario, see the syslinux documentation: + http://git.kernel.org/?p=boot/syslinux/syslinux.git;a=blob_plain;f=doc/usbkey.txt;hb=HEAD + + +Texas Instruments Beaglebone (beaglebone) +========================================= + +The Beaglebone is an ARM Cortex-A8 development board with USB, Ethernet, 2D/3D +accelerated graphics, audio, serial, JTAG, and SD/MMC. The Black adds a faster +CPU, more RAM, eMMC flash and a micro HDMI port. The beaglebone MACHINE is +tested on the following platforms: + + o Beaglebone Black A6 + o Beaglebone A6 (the original "White" model) + +The Beaglebone Black has eMMC, while the White does not. Pressing the USER/BOOT +button when powering on will temporarily change the boot order. But for the sake +of simplicity, these instructions assume you have erased the eMMC on the Black, +so its boot behavior matches that of the White and boots off of SD card. To do +this, issue the following commands from the u-boot prompt: + + # mmc dev 1 + # mmc erase 0 512 + +To further tailor these instructions for your board, please refer to the +documentation at http://www.beagleboard.org/bone and http://www.beagleboard.org/black + +From a Linux system with access to the image files perform the following steps +as root, replacing mmcblk0* with the SD card device on your machine (such as sdc +if used via a usb card reader): + + 1. Partition and format an SD card: + # fdisk -lu /dev/mmcblk0 + + Disk /dev/mmcblk0: 3951 MB, 3951034368 bytes + 255 heads, 63 sectors/track, 480 cylinders, total 7716864 sectors + Units = sectors of 1 * 512 = 512 bytes + + Device Boot Start End Blocks Id System + /dev/mmcblk0p1 * 63 144584 72261 c Win95 FAT32 (LBA) + /dev/mmcblk0p2 144585 465884 160650 83 Linux + + # mkfs.vfat -F 16 -n "boot" /dev/mmcblk0p1 + # mke2fs -j -L "root" /dev/mmcblk0p2 + + The following assumes the SD card partitions 1 and 2 are mounted at + /media/boot and /media/root respectively. Removing the card and reinserting + it will do just that on most modern Linux desktop environments. + + The files referenced below are made available after the build in + build/tmp/deploy/images. + + 2. Install the boot loaders + # cp MLO-beaglebone /media/boot/MLO + # cp u-boot-beaglebone.img /media/boot/u-boot.img + + 3. Install the root filesystem + # tar x -C /media/root -f core-image-$IMAGE_TYPE-beaglebone.tar.bz2 + + 4. If using core-image-base or core-image-sato images, the SD card is ready + and rootfs already contains the kernel, modules and device tree (DTB) + files necessary to be booted with U-boot's default configuration, so + skip directly to step 8. + For core-image-minimal, proceed through next steps. + + 5. If using core-image-minimal rootfs, install the modules + # tar x -C /media/root -f modules-beaglebone.tgz + + 6. If using core-image-minimal rootfs, install the kernel uImage into /boot + directory of rootfs + # cp uImage-beaglebone.bin /media/root/boot/uImage + + 7. If using core-image-minimal rootfs, also install device tree (DTB) files + into /boot directory of rootfs + # cp uImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb + # cp uImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb + + 8. Unmount the SD partitions, insert the SD card into the Beaglebone, and + boot the Beaglebone + + +Freescale MPC8315E-RDB (mpc8315e-rdb) +===================================== + +The MPC8315 PowerPC reference platform (MPC8315E-RDB) is aimed at hardware and +software development of network attached storage (NAS) and digital media server +applications. The MPC8315E-RDB features the PowerQUICC II Pro processor, which +includes a built-in security accelerator. + +(Note: you may find it easier to order MPC8315E-RDBA; this appears to be the +same board in an enclosure with accessories. In any case it is fully +compatible with the instructions given here.) + +Setup instructions +------------------ + +You will need the following: +* NFS root setup on your workstation +* TFTP server installed on your workstation +* Straight-thru 9-conductor serial cable (DB9, M/F) connected from your + PC to UART1 +* Ethernet connected to the first ethernet port on the board + +--- Preparation --- + +Note: if you have altered your board's ethernet MAC address(es) from the +defaults, or you need to do so because you want multiple boards on the same +network, then you will need to change the values in the dts file (patch +linux/arch/powerpc/boot/dts/mpc8315erdb.dts within the kernel source). If +you have left them at the factory default then you shouldn't need to do +anything here. + +--- Booting from NFS root --- + +Load the kernel and dtb (device tree blob), and boot the system as follows: + + 1. Get the kernel (uImage-mpc8315e-rdb.bin) and dtb (uImage-mpc8315e-rdb.dtb) + files from the tmp/deploy directory, and make them available on your TFTP + server. + + 2. Connect the board's first serial port to your workstation and then start up + your favourite serial terminal so that you will be able to interact with + the serial console. If you don't have a favourite, picocom is suggested: + + $ picocom /dev/ttyUSB0 -b 115200 + + 3. Power up or reset the board and press a key on the terminal when prompted + to get to the U-Boot command line + + 4. Set up the environment in U-Boot: + + => setenv ipaddr + => setenv serverip + => setenv bootargs root=/dev/nfs rw nfsroot=: ip=:::255.255.255.0:mpc8315e:eth0:off console=ttyS0,115200 + + 5. Download the kernel and dtb, and boot: + + => tftp 1000000 uImage-mpc8315e-rdb.bin + => tftp 2000000 uImage-mpc8315e-rdb.dtb + => bootm 1000000 - 2000000 + +--- Booting from JFFS2 root --- + + 1. First boot the board with NFS root. + + 2. Erase the MTD partition which will be used as root: + + $ flash_eraseall /dev/mtd3 + + 3. Copy the JFFS2 image to the MTD partition: + + $ flashcp core-image-minimal-mpc8315e-rdb.jffs2 /dev/mtd3 + + 4. Then reboot the board and set up the environment in U-Boot: + + => setenv bootargs root=/dev/mtdblock3 rootfstype=jffs2 console=ttyS0,115200 + + +Ubiquiti Networks EdgeRouter Lite (edgerouter) +============================================== + +The EdgeRouter Lite is part of the EdgeMax series. It is a MIPS64 router +(based on the Cavium Octeon processor) with 512MB of RAM, which uses an +internal USB pendrive for storage. + +Setup instructions +------------------ + +You will need the following: +* NFS root setup on your workstation +* TFTP server installed on your workstation +* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE + port on the board +* Ethernet connected to the first ethernet port on the board + +--- Preparation --- + +Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE. +In the following instruction it is based on core-image-minimal. Another target +may be similiar with it. + +--- Booting from NFS root --- + +Load the kernel, and boot the system as follows: + + 1. Get the kernel (vmlinux) file from the tmp/deploy/images/edgerouter + directory, and make them available on your TFTP server. + + 2. Connect the board's first serial port to your workstation and then start up + your favourite serial terminal so that you will be able to interact with + the serial console. If you don't have a favourite, picocom is suggested: + + $ picocom /dev/ttyS0 -b 115200 + + 3. Power up or reset the board and press a key on the terminal when prompted + to get to the U-Boot command line + + 4. Set up the environment in U-Boot: + + => setenv ipaddr + => setenv serverip + + 5. Download the kernel and boot: + + => tftp tftp $loadaddr vmlinux + => bootoctlinux $loadaddr coremask=0x3 root=/dev/nfs rw nfsroot=: ip=::::edgerouter:eth0:off mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom) + +--- Booting from USB root --- + +To boot from the USB disk, you either need to remove it from the edgerouter +box and populate it from another computer, or use a previously booted NFS +image and populate from the edgerouter itself. + +Type 1: Mounted USB disk +------------------------ + +To boot from the USB disk there are two available partitions on the factory +USB storage. The rest of this guide assumes that these partitions are left +intact. If you change the partition scheme, you must update your boot method +appropriately. + +The standard partitions are: + + - 1: vfat partition containing factory kernels + - 2: ext3 partition for the root filesystem. + +You can place the kernel on either partition 1, or partition 2, but the roofs +must go on partition 2 (due to its size). + +Note: If you place the kernel on the ext3 partition, you must re-create the + ext3 filesystem, since the factory u-boot can only handle 128 byte inodes and + cannot read the partition otherwise. + +Steps: + + 1. Remove the USB disk from the edgerouter and insert it into a computer + that has access to your build artifacts. + + 2. Copy the kernel image to the USB storage (assuming discovered as 'sdb' on + the development machine): + + 2a) if booting from vfat + + # mount /dev/sdb1 /mnt + # cp tmp/deploy/images/edgerouter/vmlinux /mnt + # umount /mnt + + 2b) if booting from ext3 + + # mkfs.ext3 -I 128 /dev/sdb2 + # mount /dev/sdb2 /mnt + # mkdir /mnt/boot + # cp tmp/deploy/images/edgerouter/vmlinux /mnt/boot + # umount /mnt + + 3. Extract the rootfs to the USB storage ext3 partition + + # mount /dev/sdb2 /mnt + # tar -xvjpf core-image-minimal-XXX.tar.bz2 -C /mnt + # umount /mnt + + 4. Reboot the board and press a key on the terminal when prompted to get to the U-Boot + command line: + + 5. Load the kernel and boot: + + 5a) vfat boot + + => fatload usb 0:1 $loadaddr vmlinux + + 5b) ext3 boot + + => ext2load usb 0:2 $loadaddr boot/vmlinux + + => bootoctlinux $loadaddr coremask=0x3 root=/dev/sda2 rw rootwait mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom) + + +Type 2: NFS +----------- + +Note: If you place the kernel on the ext3 partition, you must re-create the + ext3 filesystem, since the factory u-boot can only handle 128 byte inodes and + cannot read the partition otherwise. + + These boot instructions assume that you have recreated the ext3 filesystem with + 128 byte inodes, you have an updated uboot or you are running and image capable + of making the filesystem on the board itself. + + + 1. Boot from NFS root + + 2. Mount the USB disk partition 2 and then extract the contents of + tmp/deploy/core-image-XXXX.tar.bz2 into it. + + Before starting, copy core-image-minimal-xxx.tar.bz2 and vmlinux into + rootfs path on your workstation. + + and then, + + # mount /dev/sda2 /media/sda2 + # tar -xvjpf core-image-minimal-XXX.tar.bz2 -C /media/sda2 + # cp vmlinux /media/sda2/boot/vmlinux + # umount /media/sda2 + # reboot + + 3. Reboot the board and press a key on the terminal when prompted to get to the U-Boot + command line: + + # reboot + + 4. Load the kernel and boot: + + => ext2load usb 0:2 $loadaddr boot/vmlinux + => bootoctlinux $loadaddr coremask=0x3 root=/dev/sda2 rw rootwait mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom) diff --git a/bitbake/AUTHORS b/bitbake/AUTHORS new file mode 100644 index 0000000..91fd78f --- /dev/null +++ b/bitbake/AUTHORS @@ -0,0 +1,10 @@ +Tim Ansell +Phil Blundell +Seb Frankengul +Holger Freyther +Marcin Juszkiewicz +Chris Larson +Ulrich Luckas +Mickey Lauer +Richard Purdie +Holger Schurig diff --git a/bitbake/COPYING b/bitbake/COPYING new file mode 100644 index 0000000..d511905 --- /dev/null +++ b/bitbake/COPYING @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/bitbake/ChangeLog b/bitbake/ChangeLog new file mode 100644 index 0000000..4ac2a64 --- /dev/null +++ b/bitbake/ChangeLog @@ -0,0 +1,317 @@ +Changes in Bitbake 1.9.x: + - Add PE (Package Epoch) support from Philipp Zabel (pH5) + - Treat python functions the same as shell functions for logging + - Use TMPDIR/anonfunc as a __anonfunc temp directory (T) + - Catch truncated cache file errors + - Allow operations other than assignment on flag variables + - Add code to handle inter-task dependencies + - Fix cache errors when generation dotGraphs + - Make sure __inherit_cache is updated before calling include() (from Michael Krelin) + - Fix bug when target was in ASSUME_PROVIDED (#2236) + - Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062) + - Fix invalid regexp in BBMASK error handling (missing import) (#1124) + - Promote certain warnings from debug to note 2 level + - Update manual + - Correctly redirect stdin when forking + - If parsing errors are found, exit, too many users miss the errors + - Remove supriours PREFERRED_PROVIDER warnings + - svn fetcher: Add _buildsvncommand function + - Improve certain error messages + - Rewrite svn fetcher to make adding extra operations easier + as part of future SRCDATE="now" fixes + (requires new FETCHCMD_svn definition in bitbake.conf) + - Change SVNDIR layout to be more unique (fixes #2644 and #2624) + - Add ConfigParsed Event after configuration parsing is complete + - Add SRCREV support for svn fetcher + - data.emit_var() - only call getVar if we need the variable + - Stop generating the A variable (seems to be legacy code) + - Make sure intertask depends get processed correcting in recursive depends + - Add pn-PN to overrides when evaluating PREFERRED_VERSION + - Improve the progress indicator by skipping tasks that have + already run before starting the build rather than during it + - Add profiling option (-P) + - Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache + - Add SRCREV_FORMAT support + - Fix local fetcher's localpath return values + - Apply OVERRIDES before performing immediate expansions + - Allow the -b -e option combination to take regular expressions + - Fix handling of variables with expansion in the name using _append/_prepend + e.g. RRECOMMENDS_${PN}_append_xyz = "abc" + - Add plain message function to bb.msg + - Sort the list of providers before processing so dependency problems are + reproducible rather than effectively random + - Fix/improve bitbake -s output + - Add locking for fetchers so only one tries to fetch a given file at a given time + - Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains + - Expand data in addtasks + - Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...." + error message. + - Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction) + - Sort digraph output to make builds more reproducible + - Split expandKeys into two for loops to benefit from the expand_cache (12% speedup) + - runqueue.py: Fix idepends handling to avoid dependency errors + - Clear the terminal TOSTOP flag if set (and warn the user) + - Fix regression from r653 and make SRCDATE/CVSDATE work for packages again + - Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530) + - Warn about malformed PREFERRED_PROVIDERS (#1072) + - Add support for BB_NICE_LEVEL option (#1627) + - Psyco is used only on x86 as there is no support for other architectures. + - Sort initial providers list by default preference (#1145, #2024) + - Improve provider sorting so prefered versions have preference over latest versions (#768) + - Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359) + - Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed + - Handle paths in svn fetcher module parameter + - Support the syntax "export VARIABLE" + - Add bzr fetcher + - Add support for cleaning directories before a task in the form: + do_taskname[cleandirs] = "dir" + - bzr fetcher tweaks from Robert Schuster (#2913) + - Add mercurial (hg) fetcher from Robert Schuster (#2913) + - Don't add duplicates to BBPATH + - Fix preferred_version return values (providers.py) + - Fix 'depends' flag splitting + - Fix unexport handling (#3135) + - Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting) + - Allow multiple options for deptask flag + - Use git-fetch instead of git-pull removing any need for merges when + fetching (we don't care about the index). Fixes fetch errors. + - Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches + faster at the expense of not creating mirror tarballs. + - SRCREV handling updates, improvements and fixes from Poky + - Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky + - Add support for task selfstamp and lockfiles flags + - Disable task number acceleration since it can allow the tasks to run + out of sequence + - Improve runqueue code comments + - Add task scheduler abstraction and some example schedulers + - Improve circular dependency chain debugging code and user feedback + - Don't give a stacktrace for invalid tasks, have a user friendly message (#3431) + - Add support for "-e target" (#3432) + - Fix shell showdata command (#3259) + - Fix shell data updating problems (#1880) + - Properly raise errors for invalid source URI protocols + - Change the wget fetcher failure handling to avoid lockfile problems + - Add support for branches in git fetcher (Otavio Salvador, Michael Lauer) + - Make taskdata and runqueue errors more user friendly + - Add norecurse and fullpath options to cvs fetcher + - Fix exit code for build failures in --continue mode + - Fix git branch tags fetching + - Change parseConfigurationFile so it works on real data, not a copy + - Handle 'base' inherit and all other INHERITs from parseConfigurationFile + instead of BBHandler + - Fix getVarFlags bug in data_smart + - Optmise cache handling by more quickly detecting an invalid cache, only + saving the cache when its changed, moving the cache validity check into + the parsing loop and factoring some getVar calls outside a for loop + - Cooker: Remove a debug message from the parsing loop to lower overhead + - Convert build.py exec_task to use getVarFlags + - Update shell to use cooker.buildFile + - Add StampUpdate event + - Convert -b option to use taskdata/runqueue + - Remove digraph and switch to new stamp checking code. exec_task no longer + honours dependencies + - Make fetcher timestamp updating non-fatal when permissions don't allow + updates + - Add BB_SCHEDULER variable/option ("completion" or "speed") controlling + the way bitbake schedules tasks + - Add BB_STAMP_POLICY variable/option ("perfile" or "full") controlling + how extensively stamps are looked at for validity + - When handling build target failures make sure idepends are checked and + failed where needed. Fixes --continue mode crashes. + - Fix -f (force) in conjunction with -b + - Fix problems with recrdeptask handling where some idepends weren't handled + correctly. + - Handle exit codes correctly (from pH5) + - Work around refs/HEAD issues with git over http (#3410) + - Add proxy support to the CVS fetcher (from Cyril Chemparathy) + - Improve runfetchcmd so errors are seen and various GIT variables are exported + - Add ability to fetchers to check URL validity without downloading + - Improve runtime PREFERRED_PROVIDERS warning message + - Add BB_STAMP_WHITELIST option which contains a list of stamps to ignore when + checking stamp dependencies and using a BB_STAMP_POLICY of "whitelist" + - No longer weight providers on the basis of a package being "already staged". This + leads to builds being non-deterministic. + - Flush stdout/stderr before forking to fix duplicate console output + - Make sure recrdeps tasks include all inter-task dependencies of a given fn + - Add bb.runqueue.check_stamp_fn() for use by packaged-staging + - Add PERSISTENT_DIR to store the PersistData in a persistent + directory != the cache dir. + - Add md5 and sha256 checksum generation functions to utils.py + - Correctly handle '-' characters in class names (#2958) + - Make sure expandKeys has been called on the data dictionary before running tasks + - Correctly add a task override in the form task-TASKNAME. + - Revert the '-' character fix in class names since it breaks things + - When a regexp fails to compile for PACKAGES_DYNAMIC, print a more useful error (#4444) + - Allow to checkout CVS by Date and Time. Just add HHmm to the SRCDATE. + - Move prunedir function to utils.py and add explode_dep_versions function + - Raise an exception if SRCREV == 'INVALID' + - Fix hg fetcher username/password handling and fix crash + - Fix PACKAGES_DYNAMIC handling of packages with '++' in the name + - Rename __depends to __base_depends after configuration parsing so we don't + recheck the validity of the config files time after time + - Add better environmental variable handling. By default it will now only pass certain + whitelisted variables into the data store. If BB_PRESERVE_ENV is set bitbake will use + all variable from the environment. If BB_ENV_WHITELIST is set, that whitelist will be + used instead of the internal bitbake one. Alternatively, BB_ENV_EXTRAWHITE can be used + to extend the internal whitelist. + - Perforce fetcher fix to use commandline options instead of being overriden by the environment + - bb.utils.prunedir can cope with symlinks to directoriees without exceptions + - use @rev when doing a svn checkout + - Add osc fetcher (from Joshua Lock in Poky) + - When SRCREV autorevisioning for a recipe is in use, don't cache the recipe + - Add tryaltconfigs option to control whether bitbake trys using alternative providers + to fulfil failed dependencies. It defaults to off, changing the default since this + behaviour confuses many users and isn't often useful. + - Improve lock file function error handling + - Add username handling to the git fetcher (Robert Bragg) + - Add support for HTTP_PROXY and HTTP_PROXY_IGNORE variables to the wget fetcher + - Export more variables to the fetcher commands to allow ssh checkouts and checkouts through + proxies to work better. (from Poky) + - Also allow user and pswd options in SRC_URIs globally (from Poky) + - Improve proxy handling when using mirrors (from Poky) + - Add bb.utils.prune_suffix function + - Fix hg checkouts of specific revisions (from Poky) + - Fix wget fetching of urls with parameters specified (from Poky) + - Add username handling to git fetcher (from Poky) + - Set HOME environmental variable when running fetcher commands (from Poky) + - Make sure allowed variables inherited from the environment are exported again (from Poky) + - When running a stage task in bbshell, run populate_staging, not the stage task (from Poky) + - Fix + character escaping from PACKAGES_DYNAMIC (thanks Otavio Salvador) + - Addition of BBCLASSEXTEND support for allowing one recipe to provide multiple targets (from Poky) + +Changes in Bitbake 1.8.0: + - Release 1.7.x as a stable series + +Changes in BitBake 1.7.x: + - Major updates of the dependency handling and execution + of tasks. Code from bin/bitbake replaced with runqueue.py + and taskdata.py + - New task execution code supports multithreading with a simplistic + threading algorithm controlled by BB_NUMBER_THREADS + - Change of the SVN Fetcher to keep the checkout around + courtsey of Paul Sokolovsky (#1367) + - PATH fix to bbimage (#1108) + - Allow debug domains to be specified on the commandline (-l) + - Allow 'interactive' tasks + - Logging message improvements + - Drop now uneeded BUILD_ALL_DEPS variable + - Add support for wildcards to -b option + - Major overhaul of the fetchers making a large amount of code common + including mirroring code + - Fetchers now touch md5 stamps upon access (to show activity) + - Fix -f force option when used without -b (long standing bug) + - Add expand_cache to data_cache.py, caching expanded data (speedup) + - Allow version field in DEPENDS (ignored for now) + - Add abort flag support to the shell + - Make inherit fail if the class doesn't exist (#1478) + - Fix data.emit_env() to expand keynames as well as values + - Add ssh fetcher + - Add perforce fetcher + - Make PREFERRED_PROVIDER_foobar defaults to foobar if available + - Share the parser's mtime_cache, reducing the number of stat syscalls + - Compile all anonfuncs at once! + *** Anonfuncs must now use common spacing format *** + - Memorise the list of handlers in __BBHANDLERS and tasks in __BBTASKS + This removes 2 million function calls resulting in a 5-10% speedup + - Add manpage + - Update generateDotGraph to use taskData/runQueue improving accuracy + and also adding a task dependency graph + - Fix/standardise on GPLv2 licence + - Move most functionality from bin/bitbake to cooker.py and split into + separate funcitons + - CVS fetcher: Added support for non-default port + - Add BBINCLUDELOGS_LINES, the number of lines to read from any logfile + - Drop shebangs from lib/bb scripts + +Changes in Bitbake 1.6.0: + - Better msg handling + - COW dict implementation from Tim Ansell (mithro) leading + to better performance + - Speed up of -s + +Changes in Bitbake 1.4.4: + - SRCDATE now handling courtsey Justin Patrin + - #1017 fix to work with rm_work + +Changes in BitBake 1.4.2: + - Send logs to oe.pastebin.com instead of pastebin.com + fixes #856 + - Copy the internal bitbake data before building the + dependency graph. This fixes nano not having a + virtual/libc dependency + - Allow multiple TARBALL_STASH entries + - Cache, check if the directory exists before changing + into it + - git speedup cloning by not doing a checkout + - allow to have spaces in filenames (.conf, .bb, .bbclass) + +Changes in BitBake 1.4.0: + - Fix to check both RDEPENDS and RDEPENDS_${PN} + - Fix a RDEPENDS parsing bug in utils:explode_deps() + - Update git fetcher behaviour to match git changes + - ASSUME_PROVIDED allowed to include runtime packages + - git fetcher cleanup and efficency improvements + - Change the format of the cache + - Update usermanual to document the Fetchers + - Major changes to caching with a new strategy + giving a major performance increase when reparsing + with few data changes + +Changes in BitBake 1.3.3: + - Create a new Fetcher module to ease the + development of new Fetchers. + Issue #438 fixed by rpurdie@openedhand.com + - Make the Subversion fetcher honor the SRC Date + (CVSDATE). + Issue #555 fixed by chris@openedhand.com + - Expand PREFERRED_PROVIDER properly + Issue #436 fixed by rprudie@openedhand.com + - Typo fix for Issue #531 by Philipp Zabel for the + BitBake Shell + - Introduce a new special variable SRCDATE as + a generic naming to replace CVSDATE. + - Introduce a new keyword 'required'. In contrast + to 'include' parsing will fail if a to be included + file can not be found. + - Remove hardcoding of the STAMP directory. Patch + courtsey pHilipp Zabel + - Track the RDEPENDS of each package (rpurdie@openedhand.com) + - Introduce BUILD_ALL_DEPS to build all RDEPENDS. E.g + this is used by the OpenEmbedded Meta Packages. + (rpurdie@openedhand.com). + +Changes in BitBake 1.3.2: + - reintegration of make.py into BitBake + - bbread is gone, use bitbake -e + - lots of shell updates and bugfixes + - Introduction of the .= and =. operator + - Sort variables, keys and groups in bitdoc + - Fix regression in the handling of BBCOLLECTIONS + - Update the bitbake usermanual + +Changes in BitBake 1.3.0: + - add bitbake interactive shell (bitbake -i) + - refactor bitbake utility in OO style + - kill default arguments in methods in the bb.data module + - kill default arguments in methods in the bb.fetch module + - the http/https/ftp fetcher will fail if the to be + downloaded file was not found in DL_DIR (this is needed + to avoid unpacking the sourceforge mirror page) + - Switch to a cow like data instance for persistent and non + persisting mode (called data_smart.py) + - Changed the callback of bb.make.collect_bbfiles to carry + additional parameters + - Drastically reduced the amount of needed RAM by not holding + each data instance in memory when using a cache/persistent + storage + +Changes in BitBake 1.2.1: + The 1.2.1 release is meant as a intermediate release to lay the + ground for more radical changes. The most notable changes are: + + - Do not hardcode {}, use bb.data.init() instead if you want to + get a instance of a data class + - bb.data.init() is a factory and the old bb.data methods are delegates + - Do not use deepcopy use bb.data.createCopy() instead. + - Removed default arguments in bb.fetch + diff --git a/bitbake/HEADER b/bitbake/HEADER new file mode 100644 index 0000000..9859255 --- /dev/null +++ b/bitbake/HEADER @@ -0,0 +1,19 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# +# Copyright (C) +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + diff --git a/bitbake/LICENSE b/bitbake/LICENSE new file mode 100644 index 0000000..350140c --- /dev/null +++ b/bitbake/LICENSE @@ -0,0 +1,10 @@ +BitBake is licensed under the GNU General Public License version 2.0. See COPYING for further details. + +The following external components are distributed with this software: + +* The Toaster Simple UI application is based upon the Django project template, the files of which are covered by the BSD license and are copyright (c) Django Software +Foundation and individual contributors. + +* Twitter Bootstrap (including Glyphicons), redistributed under the Apache License 2.0. + +* jQuery is redistributed under the MIT license. diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake new file mode 100755 index 0000000..41cf8c8 --- /dev/null +++ b/bitbake/bin/bitbake @@ -0,0 +1,400 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2003, 2004 Phil Blundell +# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer +# Copyright (C) 2005 Holger Hans Peter Freyther +# Copyright (C) 2005 ROAD GmbH +# Copyright (C) 2006 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import sys, logging +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), + 'lib')) + +import optparse +import warnings +from traceback import format_exception +try: + import bb +except RuntimeError as exc: + sys.exit(str(exc)) +from bb import event +import bb.msg +from bb import cooker +from bb import ui +from bb import server +from bb import cookerdata + +__version__ = "1.24.0" +logger = logging.getLogger("BitBake") + +# Python multiprocessing requires /dev/shm +if not os.access('/dev/shm', os.W_OK | os.X_OK): + sys.exit("FATAL: /dev/shm does not exist or is not writable") + +# Unbuffer stdout to avoid log truncation in the event +# of an unorderly exit as well as to provide timely +# updates to log files for use with tail +try: + if sys.stdout.name == '': + sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) +except: + pass + + +def get_ui(config): + if not config.ui: + # modify 'ui' attribute because it is also read by cooker + config.ui = os.environ.get('BITBAKE_UI', 'knotty') + + interface = config.ui + + try: + # Dynamically load the UI based on the ui name. Although we + # suggest a fixed set this allows you to have flexibility in which + # ones are available. + module = __import__("bb.ui", fromlist = [interface]) + return getattr(module, interface) + except AttributeError: + sys.exit("FATAL: Invalid user interface '%s' specified.\n" + "Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface) + + +# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others""" +warnlog = logging.getLogger("BitBake.Warnings") +_warnings_showwarning = warnings.showwarning +def _showwarning(message, category, filename, lineno, file=None, line=None): + if file is not None: + if _warnings_showwarning is not None: + _warnings_showwarning(message, category, filename, lineno, file, line) + else: + s = warnings.formatwarning(message, category, filename, lineno) + warnlog.warn(s) + +warnings.showwarning = _showwarning +warnings.filterwarnings("ignore") +warnings.filterwarnings("default", module="($|(oe|bb)\.)") +warnings.filterwarnings("ignore", category=PendingDeprecationWarning) +warnings.filterwarnings("ignore", category=ImportWarning) +warnings.filterwarnings("ignore", category=DeprecationWarning, module="$") +warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers") + +class BitBakeConfigParameters(cookerdata.ConfigParameters): + + def parseCommandLine(self): + parser = optparse.OptionParser( + version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__), + usage = """%prog [options] [recipename/target ...] + + Executes the specified task (default is 'build') for a given set of target recipes (.bb files). + It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which + will provide the layer, BBFILES and other configuration information.""") + + parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.", + action = "store", dest = "buildfile", default = None) + + parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.", + action = "store_false", dest = "abort", default = True) + + parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.", + action = "store_true", dest = "tryaltconfigs", default = False) + + parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).", + action = "store_true", dest = "force", default = False) + + parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.", + action = "store", dest = "cmd") + + parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).", + action = "store", dest = "invalidate_stamp") + + parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.", + action = "append", dest = "prefile", default = []) + + parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.", + action = "append", dest = "postfile", default = []) + + parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.", + action = "store_true", dest = "verbose", default = False) + + parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.", + action = "count", dest="debug", default = 0) + + parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.", + action = "store_true", dest = "dry_run", default = False) + + parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.", + action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER") + + parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.", + action = "store_true", dest = "parse_only", default = False) + + parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.", + action = "store_true", dest = "show_versions", default = False) + + parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.", + action = "store_true", dest = "show_environment", default = False) + + parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.", + action = "store_true", dest = "dot_graph", default = False) + + parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""", + action = "append", dest = "extra_assume_provided", default = []) + + parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""", + action = "append", dest = "debug_domains", default = []) + + parser.add_option("-P", "--profile", help = "Profile the command and save reports.", + action = "store_true", dest = "profile", default = False) + + parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).", + action = "store", dest = "ui") + + parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.", + action = "store", dest = "servertype") + + parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.", + action = "store", dest = "xmlrpctoken") + + parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.", + action = "store_true", dest = "revisions_changed", default = False) + + parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.", + action = "store_true", dest = "server_only", default = False) + + parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.", + action = "store", dest = "bind", default = False) + + parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.", + action = "store_true", dest = "nosetscene", default = False) + + parser.add_option("", "--remote-server", help = "Connect to the specified server.", + action = "store", dest = "remote_server", default = False) + + parser.add_option("-m", "--kill-server", help = "Terminate the remote server.", + action = "store_true", dest = "kill_server", default = False) + + parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.", + action = "store_true", dest = "observe_only", default = False) + + parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.", + action = "store_true", dest = "status_only", default = False) + + parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.", + action = "store", dest = "writeeventlog") + + options, targets = parser.parse_args(sys.argv) + + # some environmental variables set also configuration options + if "BBSERVER" in os.environ: + options.servertype = "xmlrpc" + options.remote_server = os.environ["BBSERVER"] + + if "BBTOKEN" in os.environ: + options.xmlrpctoken = os.environ["BBTOKEN"] + + if "BBEVENTLOG" is os.environ: + options.writeeventlog = os.environ["BBEVENTLOG"] + + # fill in proper log name if not supplied + if options.writeeventlog is not None and len(options.writeeventlog) == 0: + import datetime + options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S") + + # if BBSERVER says to autodetect, let's do that + if options.remote_server: + [host, port] = options.remote_server.split(":", 2) + port = int(port) + # use automatic port if port set to -1, means read it from + # the bitbake.lock file; this is a bit tricky, but we always expect + # to be in the base of the build directory if we need to have a + # chance to start the server later, anyway + if port == -1: + lock_location = "./bitbake.lock" + # we try to read the address at all times; if the server is not started, + # we'll try to start it after the first connect fails, below + try: + lf = open(lock_location, 'r') + remotedef = lf.readline() + [host, port] = remotedef.split(":") + port = int(port) + lf.close() + options.remote_server = remotedef + except Exception as e: + sys.exit("Failed to read bitbake.lock (%s), invalid port" % str(e)) + + return options, targets[1:] + + +def start_server(servermodule, configParams, configuration, features): + server = servermodule.BitBakeServer() + if configParams.bind: + (host, port) = configParams.bind.split(':') + server.initServer((host, int(port))) + configuration.interface = [ server.serverImpl.host, server.serverImpl.port ] + else: + server.initServer() + configuration.interface = [] + + try: + configuration.setServerRegIdleCallback(server.getServerIdleCB()) + + cooker = bb.cooker.BBCooker(configuration, features) + + server.addcooker(cooker) + server.saveConnectionDetails() + except Exception as e: + exc_info = sys.exc_info() + while hasattr(server, "event_queue"): + try: + import queue + except ImportError: + import Queue as queue + try: + event = server.event_queue.get(block=False) + except (queue.Empty, IOError): + break + if isinstance(event, logging.LogRecord): + logger.handle(event) + raise exc_info[1], None, exc_info[2] + server.detach() + return server + + +def main(): + + configParams = BitBakeConfigParameters() + configuration = cookerdata.CookerConfiguration() + configuration.setConfigParameters(configParams) + + ui_module = get_ui(configParams) + + # Server type can be xmlrpc or process currently, if nothing is specified, + # the default server is process + if configParams.servertype: + server_type = configParams.servertype + else: + server_type = 'process' + + try: + module = __import__("bb.server", fromlist = [server_type]) + servermodule = getattr(module, server_type) + except AttributeError: + sys.exit("FATAL: Invalid server type '%s' specified.\n" + "Valid interfaces: xmlrpc, process [default]." % server_type) + + if configParams.server_only: + if configParams.servertype != "xmlrpc": + sys.exit("FATAL: If '--server-only' is defined, we must set the servertype as 'xmlrpc'.\n") + if not configParams.bind: + sys.exit("FATAL: The '--server-only' option requires a name/address to bind to with the -B option.\n") + if configParams.remote_server: + sys.exit("FATAL: The '--server-only' option conflicts with %s.\n" % + ("the BBSERVER environment variable" if "BBSERVER" in os.environ else "the '--remote-server' option" )) + + if configParams.bind and configParams.servertype != "xmlrpc": + sys.exit("FATAL: If '-B' or '--bind' is defined, we must set the servertype as 'xmlrpc'.\n") + + if configParams.remote_server and configParams.servertype != "xmlrpc": + sys.exit("FATAL: If '--remote-server' is defined, we must set the servertype as 'xmlrpc'.\n") + + if configParams.observe_only and (not configParams.remote_server or configParams.bind): + sys.exit("FATAL: '--observe-only' can only be used by UI clients connecting to a server.\n") + + if configParams.kill_server and not configParams.remote_server: + sys.exit("FATAL: '--kill-server' can only be used to terminate a remote server") + + if "BBDEBUG" in os.environ: + level = int(os.environ["BBDEBUG"]) + if level > configuration.debug: + configuration.debug = level + + bb.msg.init_msgconfig(configParams.verbose, configuration.debug, + configuration.debug_domains) + + # Ensure logging messages get sent to the UI as events + handler = bb.event.LogHandler() + if not configParams.status_only: + # In status only mode there are no logs and no UI + logger.addHandler(handler) + + # Clear away any spurious environment variables while we stoke up the cooker + cleanedvars = bb.utils.clean_environment() + + featureset = [] + if not configParams.server_only: + # Collect the feature set for the UI + featureset = getattr(ui_module, "featureSet", []) + + if not configParams.remote_server: + # we start a server with a given configuration + server = start_server(servermodule, configParams, configuration, featureset) + bb.event.ui_queue = [] + else: + # we start a stub server that is actually a XMLRPClient that connects to a real server + server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken) + server.saveConnectionDetails(configParams.remote_server) + + + if not configParams.server_only: + try: + server_connection = server.establishConnection(featureset) + except Exception as e: + if configParams.kill_server: + sys.exit(0) + bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e))) + + # Restore the environment in case the UI needs it + for k in cleanedvars: + os.environ[k] = cleanedvars[k] + + logger.removeHandler(handler) + + + if configParams.status_only: + server_connection.terminate() + sys.exit(0) + + if configParams.kill_server: + server_connection.connection.terminateServer() + bb.event.ui_queue = [] + sys.exit(0) + + try: + return ui_module.main(server_connection.connection, server_connection.events, configParams) + finally: + bb.event.ui_queue = [] + server_connection.terminate() + else: + print("server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port)) + return 0 + + return 1 + +if __name__ == "__main__": + try: + ret = main() + except bb.BBHandledException: + ret = 1 + except Exception: + ret = 1 + import traceback + traceback.print_exc() + sys.exit(ret) + diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs new file mode 100755 index 0000000..08ae00d --- /dev/null +++ b/bitbake/bin/bitbake-diffsigs @@ -0,0 +1,122 @@ +#!/usr/bin/env python + +# bitbake-diffsigs +# BitBake task signature data comparison utility +# +# Copyright (C) 2012-2013 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import sys +import warnings +import fnmatch +import optparse +import logging + +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) + +import bb.tinfoil +import bb.siggen + +def logger_create(name, output=sys.stderr): + logger = logging.getLogger(name) + console = logging.StreamHandler(output) + format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") + if output.isatty(): + format.enable_color() + console.setFormatter(format) + logger.addHandler(console) + logger.setLevel(logging.INFO) + return logger + +logger = logger_create('bitbake-diffsigs') + +def find_compare_task(bbhandler, pn, taskname): + """ Find the most recent signature files for the specified PN/task and compare them """ + + if not hasattr(bb.siggen, 'find_siginfo'): + logger.error('Metadata does not support finding signature data files') + sys.exit(1) + + if not taskname.startswith('do_'): + taskname = 'do_%s' % taskname + + filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data) + latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:] + if not latestfiles: + logger.error('No sigdata files found matching %s %s' % (pn, taskname)) + sys.exit(1) + elif len(latestfiles) < 2: + logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname)) + sys.exit(1) + else: + # Define recursion callback + def recursecb(key, hash1, hash2): + hashes = [hash1, hash2] + hashfiles = bb.siggen.find_siginfo(key, None, hashes, bbhandler.config_data) + + recout = [] + if len(hashfiles) == 2: + out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb) + recout.extend(list(' ' + l for l in out2)) + else: + recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) + + return recout + + # Recurse into signature comparison + output = bb.siggen.compare_sigfiles(latestfiles[0], latestfiles[1], recursecb) + if output: + print '\n'.join(output) + sys.exit(0) + + + +parser = optparse.OptionParser( + description = "Compares siginfo/sigdata files written out by BitBake", + usage = """ + %prog -t recipename taskname + %prog sigdatafile1 sigdatafile2 + %prog sigdatafile1""") + +parser.add_option("-t", "--task", + help = "find the signature data files for last two runs of the specified task and compare them", + action="store", dest="taskargs", nargs=2, metavar='recipename taskname') + +options, args = parser.parse_args(sys.argv) + +if options.taskargs: + tinfoil = bb.tinfoil.Tinfoil() + tinfoil.prepare(config_only = True) + find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1]) +else: + if len(args) == 1: + parser.print_help() + else: + import cPickle + try: + if len(args) == 2: + output = bb.siggen.dump_sigfile(sys.argv[1]) + else: + output = bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2]) + except IOError as e: + logger.error(str(e)) + sys.exit(1) + except cPickle.UnpicklingError, EOFError: + logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files') + sys.exit(1) + + if output: + print '\n'.join(output) diff --git a/bitbake/bin/bitbake-dumpsig b/bitbake/bin/bitbake-dumpsig new file mode 100755 index 0000000..656d93a --- /dev/null +++ b/bitbake/bin/bitbake-dumpsig @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +# bitbake-dumpsig +# BitBake task signature dump utility +# +# Copyright (C) 2013 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import sys +import warnings +import optparse +import logging + +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) + +import bb.siggen + +def logger_create(name, output=sys.stderr): + logger = logging.getLogger(name) + console = logging.StreamHandler(output) + format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") + if output.isatty(): + format.enable_color() + console.setFormatter(format) + logger.addHandler(console) + logger.setLevel(logging.INFO) + return logger + +logger = logger_create('bitbake-dumpsig') + +parser = optparse.OptionParser( + description = "Dumps siginfo/sigdata files written out by BitBake", + usage = """ + %prog sigdatafile""") + +options, args = parser.parse_args(sys.argv) + +if len(args) == 1: + parser.print_help() +else: + import cPickle + try: + output = bb.siggen.dump_sigfile(args[1]) + except IOError as e: + logger.error(str(e)) + sys.exit(1) + except cPickle.UnpicklingError, EOFError: + logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file') + sys.exit(1) + + if output: + print '\n'.join(output) diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers new file mode 100755 index 0000000..9964040 --- /dev/null +++ b/bitbake/bin/bitbake-layers @@ -0,0 +1,758 @@ +#!/usr/bin/env python + +# This script has subcommands which operate against your bitbake layers, either +# displaying useful information, or acting against them. +# See the help output for details on available commands. + +# Copyright (C) 2011 Mentor Graphics Corporation +# Copyright (C) 2012 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import cmd +import logging +import os +import sys +import fnmatch +from collections import defaultdict +import re + +bindir = os.path.dirname(__file__) +topdir = os.path.dirname(bindir) +sys.path[0:0] = [os.path.join(topdir, 'lib')] + +import bb.cache +import bb.cooker +import bb.providers +import bb.utils +import bb.tinfoil + + +logger = logging.getLogger('BitBake') + + +def main(args): + cmds = Commands() + if args: + # Allow user to specify e.g. show-layers instead of show_layers + args = [args[0].replace('-', '_')] + args[1:] + cmds.onecmd(' '.join(args)) + else: + cmds.do_help('') + return cmds.returncode + + +class Commands(cmd.Cmd): + def __init__(self): + self.bbhandler = None + self.returncode = 0 + self.bblayers = [] + cmd.Cmd.__init__(self) + + def init_bbhandler(self, config_only = False): + if not self.bbhandler: + self.bbhandler = bb.tinfoil.Tinfoil() + self.bblayers = (self.bbhandler.config_data.getVar('BBLAYERS', True) or "").split() + self.bbhandler.prepare(config_only) + + def default(self, line): + """Handle unrecognised commands""" + sys.stderr.write("Unrecognised command or option\n") + self.do_help('') + + def do_help(self, topic): + """display general help or help on a specified command""" + if topic: + sys.stdout.write('%s: ' % topic) + cmd.Cmd.do_help(self, topic.replace('-', '_')) + else: + sys.stdout.write("usage: bitbake-layers [arguments]\n\n") + sys.stdout.write("Available commands:\n") + procnames = list(set(self.get_names())) + for procname in procnames: + if procname[:3] == 'do_': + sys.stdout.write(" %s\n" % procname[3:].replace('_', '-')) + doc = getattr(self, procname).__doc__ + if doc: + sys.stdout.write(" %s\n" % doc.splitlines()[0]) + + def do_show_layers(self, args): + """show current configured layers""" + self.init_bbhandler(config_only = True) + logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority")) + logger.plain('=' * 74) + for layerdir in self.bblayers: + layername = self.get_layer_name(layerdir) + layerpri = 0 + for layer, _, regex, pri in self.bbhandler.cooker.recipecache.bbfile_config_priorities: + if regex.match(os.path.join(layerdir, 'test')): + layerpri = pri + break + + logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), layerpri)) + + + def version_str(self, pe, pv, pr = None): + verstr = "%s" % pv + if pr: + verstr = "%s-%s" % (verstr, pr) + if pe: + verstr = "%s:%s" % (pe, verstr) + return verstr + + + def do_show_overlayed(self, args): + """list overlayed recipes (where the same recipe exists in another layer) + +usage: show-overlayed [-f] [-s] + +Lists the names of overlayed recipes and the available versions in each +layer, with the preferred version first. Note that skipped recipes that +are overlayed will also be listed, with a " (skipped)" suffix. + +Options: + -f instead of the default formatting, list filenames of higher priority + recipes with the ones they overlay indented underneath + -s only list overlayed recipes where the version is the same +""" + self.init_bbhandler() + + show_filenames = False + show_same_ver_only = False + for arg in args.split(): + if arg == '-f': + show_filenames = True + elif arg == '-s': + show_same_ver_only = True + else: + sys.stderr.write("show-overlayed: invalid option %s\n" % arg) + self.do_help('') + return + + items_listed = self.list_recipes('Overlayed recipes', None, True, show_same_ver_only, show_filenames, True) + + # Check for overlayed .bbclass files + classes = defaultdict(list) + for layerdir in self.bblayers: + classdir = os.path.join(layerdir, 'classes') + if os.path.exists(classdir): + for classfile in os.listdir(classdir): + if os.path.splitext(classfile)[1] == '.bbclass': + classes[classfile].append(classdir) + + # Locating classes and other files is a bit more complicated than recipes - + # layer priority is not a factor; instead BitBake uses the first matching + # file in BBPATH, which is manipulated directly by each layer's + # conf/layer.conf in turn, thus the order of layers in bblayers.conf is a + # factor - however, each layer.conf is free to either prepend or append to + # BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might + # not be exactly the order present in bblayers.conf either. + bbpath = str(self.bbhandler.config_data.getVar('BBPATH', True)) + overlayed_class_found = False + for (classfile, classdirs) in classes.items(): + if len(classdirs) > 1: + if not overlayed_class_found: + logger.plain('=== Overlayed classes ===') + overlayed_class_found = True + + mainfile = bb.utils.which(bbpath, os.path.join('classes', classfile)) + if show_filenames: + logger.plain('%s' % mainfile) + else: + # We effectively have to guess the layer here + logger.plain('%s:' % classfile) + mainlayername = '?' + for layerdir in self.bblayers: + classdir = os.path.join(layerdir, 'classes') + if mainfile.startswith(classdir): + mainlayername = self.get_layer_name(layerdir) + logger.plain(' %s' % mainlayername) + for classdir in classdirs: + fullpath = os.path.join(classdir, classfile) + if fullpath != mainfile: + if show_filenames: + print(' %s' % fullpath) + else: + print(' %s' % self.get_layer_name(os.path.dirname(classdir))) + + if overlayed_class_found: + items_listed = True; + + if not items_listed: + logger.plain('No overlayed files found.') + + + def do_show_recipes(self, args): + """list available recipes, showing the layer they are provided by + +usage: show-recipes [-f] [-m] [pnspec] + +Lists the names of overlayed recipes and the available versions in each +layer, with the preferred version first. Optionally you may specify +pnspec to match a specified recipe name (supports wildcards). Note that +skipped recipes will also be listed, with a " (skipped)" suffix. + +Options: + -f instead of the default formatting, list filenames of higher priority + recipes with other available recipes indented underneath + -m only list where multiple recipes (in the same layer or different + layers) exist for the same recipe name +""" + self.init_bbhandler() + + show_filenames = False + show_multi_provider_only = False + pnspec = None + title = 'Available recipes:' + for arg in args.split(): + if arg == '-f': + show_filenames = True + elif arg == '-m': + show_multi_provider_only = True + elif not arg.startswith('-'): + pnspec = arg + title = 'Available recipes matching %s:' % pnspec + else: + sys.stderr.write("show-recipes: invalid option %s\n" % arg) + self.do_help('') + return + self.list_recipes(title, pnspec, False, False, show_filenames, show_multi_provider_only) + + + def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only): + pkg_pn = self.bbhandler.cooker.recipecache.pkg_pn + (latest_versions, preferred_versions) = bb.providers.findProviders(self.bbhandler.config_data, self.bbhandler.cooker.recipecache, pkg_pn) + allproviders = bb.providers.allProviders(self.bbhandler.cooker.recipecache) + + # Ensure we list skipped recipes + # We are largely guessing about PN, PV and the preferred version here, + # but we have no choice since skipped recipes are not fully parsed + skiplist = self.bbhandler.cooker.skiplist.keys() + skiplist.sort( key=lambda fileitem: self.bbhandler.cooker.collection.calc_bbfile_priority(fileitem) ) + skiplist.reverse() + for fn in skiplist: + recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_') + p = recipe_parts[0] + if len(recipe_parts) > 1: + ver = (None, recipe_parts[1], None) + else: + ver = (None, 'unknown', None) + allproviders[p].append((ver, fn)) + if not p in pkg_pn: + pkg_pn[p] = 'dummy' + preferred_versions[p] = (ver, fn) + + def print_item(f, pn, ver, layer, ispref): + if f in skiplist: + skipped = ' (skipped)' + else: + skipped = '' + if show_filenames: + if ispref: + logger.plain("%s%s", f, skipped) + else: + logger.plain(" %s%s", f, skipped) + else: + if ispref: + logger.plain("%s:", pn) + logger.plain(" %s %s%s", layer.ljust(20), ver, skipped) + + preffiles = [] + items_listed = False + for p in sorted(pkg_pn): + if pnspec: + if not fnmatch.fnmatch(p, pnspec): + continue + + if len(allproviders[p]) > 1 or not show_multi_provider_only: + pref = preferred_versions[p] + preffile = bb.cache.Cache.virtualfn2realfn(pref[1])[0] + if preffile not in preffiles: + preflayer = self.get_file_layer(preffile) + multilayer = False + same_ver = True + provs = [] + for prov in allproviders[p]: + provfile = bb.cache.Cache.virtualfn2realfn(prov[1])[0] + provlayer = self.get_file_layer(provfile) + provs.append((provfile, provlayer, prov[0])) + if provlayer != preflayer: + multilayer = True + if prov[0] != pref[0]: + same_ver = False + + if (multilayer or not show_overlayed_only) and (same_ver or not show_same_ver_only): + if not items_listed: + logger.plain('=== %s ===' % title) + items_listed = True + print_item(preffile, p, self.version_str(pref[0][0], pref[0][1]), preflayer, True) + for (provfile, provlayer, provver) in provs: + if provfile != preffile: + print_item(provfile, p, self.version_str(provver[0], provver[1]), provlayer, False) + # Ensure we don't show two entries for BBCLASSEXTENDed recipes + preffiles.append(preffile) + + return items_listed + + + def do_flatten(self, args): + """flattens layer configuration into a separate output directory. + +usage: flatten [layer1 layer2 [layer3]...] + +Takes the specified layers (or all layers in the current layer +configuration if none are specified) and builds a "flattened" directory +containing the contents of all layers, with any overlayed recipes removed +and bbappends appended to the corresponding recipes. Note that some manual +cleanup may still be necessary afterwards, in particular: + +* where non-recipe files (such as patches) are overwritten (the flatten + command will show a warning for these) +* where anything beyond the normal layer setup has been added to + layer.conf (only the lowest priority number layer's layer.conf is used) +* overridden/appended items from bbappends will need to be tidied up +* when the flattened layers do not have the same directory structure (the + flatten command should show a warning when this will cause a problem) + +Warning: if you flatten several layers where another layer is intended to +be used "inbetween" them (in layer priority order) such that recipes / +bbappends in the layers interact, and then attempt to use the new output +layer together with that other layer, you may no longer get the same +build results (as the layer priority order has effectively changed). +""" + arglist = args.split() + if len(arglist) < 1: + logger.error('Please specify an output directory') + self.do_help('flatten') + return + + if len(arglist) == 2: + logger.error('If you specify layers to flatten you must specify at least two') + self.do_help('flatten') + return + + outputdir = arglist[-1] + if os.path.exists(outputdir) and os.listdir(outputdir): + logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir) + return + + self.init_bbhandler() + layers = self.bblayers + if len(arglist) > 2: + layernames = arglist[:-1] + found_layernames = [] + found_layerdirs = [] + for layerdir in layers: + layername = self.get_layer_name(layerdir) + if layername in layernames: + found_layerdirs.append(layerdir) + found_layernames.append(layername) + + for layername in layernames: + if not layername in found_layernames: + logger.error('Unable to find layer %s in current configuration, please run "%s show-layers" to list configured layers' % (layername, os.path.basename(sys.argv[0]))) + return + layers = found_layerdirs + else: + layernames = [] + + # Ensure a specified path matches our list of layers + def layer_path_match(path): + for layerdir in layers: + if path.startswith(os.path.join(layerdir, '')): + return layerdir + return None + + appended_recipes = [] + for layer in layers: + overlayed = [] + for f in self.bbhandler.cooker.collection.overlayed.iterkeys(): + for of in self.bbhandler.cooker.collection.overlayed[f]: + if of.startswith(layer): + overlayed.append(of) + + logger.plain('Copying files from %s...' % layer ) + for root, dirs, files in os.walk(layer): + for f1 in files: + f1full = os.sep.join([root, f1]) + if f1full in overlayed: + logger.plain(' Skipping overlayed file %s' % f1full ) + else: + ext = os.path.splitext(f1)[1] + if ext != '.bbappend': + fdest = f1full[len(layer):] + fdest = os.path.normpath(os.sep.join([outputdir,fdest])) + bb.utils.mkdirhier(os.path.dirname(fdest)) + if os.path.exists(fdest): + if f1 == 'layer.conf' and root.endswith('/conf'): + logger.plain(' Skipping layer config file %s' % f1full ) + continue + else: + logger.warn('Overwriting file %s', fdest) + bb.utils.copyfile(f1full, fdest) + if ext == '.bb': + if f1 in self.bbhandler.cooker.collection.appendlist: + appends = self.bbhandler.cooker.collection.appendlist[f1] + if appends: + logger.plain(' Applying appends to %s' % fdest ) + for appendname in appends: + if layer_path_match(appendname): + self.apply_append(appendname, fdest) + appended_recipes.append(f1) + + # Take care of when some layers are excluded and yet we have included bbappends for those recipes + for recipename in self.bbhandler.cooker.collection.appendlist.iterkeys(): + if recipename not in appended_recipes: + appends = self.bbhandler.cooker.collection.appendlist[recipename] + first_append = None + for appendname in appends: + layer = layer_path_match(appendname) + if layer: + if first_append: + self.apply_append(appendname, first_append) + else: + fdest = appendname[len(layer):] + fdest = os.path.normpath(os.sep.join([outputdir,fdest])) + bb.utils.mkdirhier(os.path.dirname(fdest)) + bb.utils.copyfile(appendname, fdest) + first_append = fdest + + # Get the regex for the first layer in our list (which is where the conf/layer.conf file will + # have come from) + first_regex = None + layerdir = layers[0] + for layername, pattern, regex, _ in self.bbhandler.cooker.recipecache.bbfile_config_priorities: + if regex.match(os.path.join(layerdir, 'test')): + first_regex = regex + break + + if first_regex: + # Find the BBFILES entries that match (which will have come from this conf/layer.conf file) + bbfiles = str(self.bbhandler.config_data.getVar('BBFILES', True)).split() + bbfiles_layer = [] + for item in bbfiles: + if first_regex.match(item): + newpath = os.path.join(outputdir, item[len(layerdir)+1:]) + bbfiles_layer.append(newpath) + + if bbfiles_layer: + # Check that all important layer files match BBFILES + for root, dirs, files in os.walk(outputdir): + for f1 in files: + ext = os.path.splitext(f1)[1] + if ext in ['.bb', '.bbappend']: + f1full = os.sep.join([root, f1]) + entry_found = False + for item in bbfiles_layer: + if fnmatch.fnmatch(f1full, item): + entry_found = True + break + if not entry_found: + logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full) + + def get_file_layer(self, filename): + for layer, _, regex, _ in self.bbhandler.cooker.recipecache.bbfile_config_priorities: + if regex.match(filename): + for layerdir in self.bblayers: + if regex.match(os.path.join(layerdir, 'test')) and re.match(layerdir, filename): + return self.get_layer_name(layerdir) + return "?" + + def get_file_layerdir(self, filename): + for layer, _, regex, _ in self.bbhandler.cooker.recipecache.bbfile_config_priorities: + if regex.match(filename): + for layerdir in self.bblayers: + if regex.match(os.path.join(layerdir, 'test')) and re.match(layerdir, filename): + return layerdir + return "?" + + def remove_layer_prefix(self, f): + """Remove the layer_dir prefix, e.g., f = /path/to/layer_dir/foo/blah, the + return value will be: layer_dir/foo/blah""" + f_layerdir = self.get_file_layerdir(f) + prefix = os.path.join(os.path.dirname(f_layerdir), '') + return f[len(prefix):] if f.startswith(prefix) else f + + def get_layer_name(self, layerdir): + return os.path.basename(layerdir.rstrip(os.sep)) + + def apply_append(self, appendname, recipename): + appendfile = open(appendname, 'r') + recipefile = open(recipename, 'a') + recipefile.write('\n') + recipefile.write('##### bbappended from %s #####\n' % self.get_file_layer(appendname)) + recipefile.writelines(appendfile.readlines()) + recipefile.close() + appendfile.close() + + def do_show_appends(self, args): + """list bbappend files and recipe files they apply to + +usage: show-appends + +Recipes are listed with the bbappends that apply to them as subitems. +""" + self.init_bbhandler() + if not self.bbhandler.cooker.collection.appendlist: + logger.plain('No append files found') + return + + logger.plain('=== Appended recipes ===') + + pnlist = list(self.bbhandler.cooker_data.pkg_pn.keys()) + pnlist.sort() + for pn in pnlist: + self.show_appends_for_pn(pn) + + self.show_appends_for_skipped() + + def show_appends_for_pn(self, pn): + filenames = self.bbhandler.cooker_data.pkg_pn[pn] + + best = bb.providers.findBestProvider(pn, + self.bbhandler.config_data, + self.bbhandler.cooker_data, + self.bbhandler.cooker_data.pkg_pn) + best_filename = os.path.basename(best[3]) + + self.show_appends_output(filenames, best_filename) + + def show_appends_for_skipped(self): + filenames = [os.path.basename(f) + for f in self.bbhandler.cooker.skiplist.iterkeys()] + self.show_appends_output(filenames, None, " (skipped)") + + def show_appends_output(self, filenames, best_filename, name_suffix = ''): + appended, missing = self.get_appends_for_files(filenames) + if appended: + for basename, appends in appended: + logger.plain('%s%s:', basename, name_suffix) + for append in appends: + logger.plain(' %s', append) + + if best_filename: + if best_filename in missing: + logger.warn('%s: missing append for preferred version', + best_filename) + self.returncode |= 1 + + + def get_appends_for_files(self, filenames): + appended, notappended = [], [] + for filename in filenames: + _, cls = bb.cache.Cache.virtualfn2realfn(filename) + if cls: + continue + + basename = os.path.basename(filename) + appends = self.bbhandler.cooker.collection.get_file_appends(basename) + if appends: + appended.append((basename, list(appends))) + else: + notappended.append(basename) + return appended, notappended + + def do_show_cross_depends(self, args): + """figure out the dependency between recipes that crosses a layer boundary. + +usage: show-cross-depends [-f] [-i layer1[,layer2[,layer3...]]] + +Figure out the dependency between recipes that crosses a layer boundary. + +Options: + -f show full file path + -i ignore dependencies on items in the specified layer(s) + +NOTE: +The .bbappend file can impact the dependency. +""" + import optparse + + parser = optparse.OptionParser(usage="show-cross-depends [-f] [-i layer1[,layer2[,layer3...]]]") + parser.add_option("-f", "", + action="store_true", dest="show_filenames") + parser.add_option("-i", "", + action="store", dest="ignore_layers", default="") + + options, args = parser.parse_args(sys.argv) + ignore_layers = options.ignore_layers.split(',') + + self.init_bbhandler() + + pkg_fn = self.bbhandler.cooker_data.pkg_fn + bbpath = str(self.bbhandler.config_data.getVar('BBPATH', True)) + self.require_re = re.compile(r"require\s+(.+)") + self.include_re = re.compile(r"include\s+(.+)") + self.inherit_re = re.compile(r"inherit\s+(.+)") + + global_inherit = (self.bbhandler.config_data.getVar('INHERIT', True) or "").split() + + # The bb's DEPENDS and RDEPENDS + for f in pkg_fn: + f = bb.cache.Cache.virtualfn2realfn(f)[0] + # Get the layername that the file is in + layername = self.get_file_layer(f) + + # The DEPENDS + deps = self.bbhandler.cooker_data.deps[f] + for pn in deps: + if pn in self.bbhandler.cooker_data.pkg_pn: + best = bb.providers.findBestProvider(pn, + self.bbhandler.config_data, + self.bbhandler.cooker_data, + self.bbhandler.cooker_data.pkg_pn) + self.check_cross_depends("DEPENDS", layername, f, best[3], options.show_filenames, ignore_layers) + + # The RDPENDS + all_rdeps = self.bbhandler.cooker_data.rundeps[f].values() + # Remove the duplicated or null one. + sorted_rdeps = {} + # The all_rdeps is the list in list, so we need two for loops + for k1 in all_rdeps: + for k2 in k1: + sorted_rdeps[k2] = 1 + all_rdeps = sorted_rdeps.keys() + for rdep in all_rdeps: + all_p = bb.providers.getRuntimeProviders(self.bbhandler.cooker_data, rdep) + if all_p: + if f in all_p: + # The recipe provides this one itself, ignore + continue + best = bb.providers.filterProvidersRunTime(all_p, rdep, + self.bbhandler.config_data, + self.bbhandler.cooker_data)[0][0] + self.check_cross_depends("RDEPENDS", layername, f, best, options.show_filenames, ignore_layers) + + # The RRECOMMENDS + all_rrecs = self.bbhandler.cooker_data.runrecs[f].values() + # Remove the duplicated or null one. + sorted_rrecs = {} + # The all_rrecs is the list in list, so we need two for loops + for k1 in all_rrecs: + for k2 in k1: + sorted_rrecs[k2] = 1 + all_rrecs = sorted_rrecs.keys() + for rrec in all_rrecs: + all_p = bb.providers.getRuntimeProviders(self.bbhandler.cooker_data, rrec) + if all_p: + if f in all_p: + # The recipe provides this one itself, ignore + continue + best = bb.providers.filterProvidersRunTime(all_p, rrec, + self.bbhandler.config_data, + self.bbhandler.cooker_data)[0][0] + self.check_cross_depends("RRECOMMENDS", layername, f, best, options.show_filenames, ignore_layers) + + # The inherit class + cls_re = re.compile('classes/') + if f in self.bbhandler.cooker_data.inherits: + inherits = self.bbhandler.cooker_data.inherits[f] + for cls in inherits: + # The inherits' format is [classes/cls, /path/to/classes/cls] + # ignore the classes/cls. + if not cls_re.match(cls): + classname = os.path.splitext(os.path.basename(cls))[0] + if classname in global_inherit: + continue + inherit_layername = self.get_file_layer(cls) + if inherit_layername != layername and not inherit_layername in ignore_layers: + if not options.show_filenames: + f_short = self.remove_layer_prefix(f) + cls = self.remove_layer_prefix(cls) + else: + f_short = f + logger.plain("%s inherits %s" % (f_short, cls)) + + # The 'require/include xxx' in the bb file + pv_re = re.compile(r"\${PV}") + fnfile = open(f, 'r') + line = fnfile.readline() + while line: + m, keyword = self.match_require_include(line) + # Found the 'require/include xxxx' + if m: + needed_file = m.group(1) + # Replace the ${PV} with the real PV + if pv_re.search(needed_file) and f in self.bbhandler.cooker_data.pkg_pepvpr: + pv = self.bbhandler.cooker_data.pkg_pepvpr[f][1] + needed_file = re.sub(r"\${PV}", pv, needed_file) + self.print_cross_files(bbpath, keyword, layername, f, needed_file, options.show_filenames, ignore_layers) + line = fnfile.readline() + fnfile.close() + + # The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass + conf_re = re.compile(".*/conf/machine/[^\/]*\.conf$") + inc_re = re.compile(".*\.inc$") + # The "inherit xxx" in .bbclass + bbclass_re = re.compile(".*\.bbclass$") + for layerdir in self.bblayers: + layername = self.get_layer_name(layerdir) + for dirpath, dirnames, filenames in os.walk(layerdir): + for name in filenames: + f = os.path.join(dirpath, name) + s = conf_re.match(f) or inc_re.match(f) or bbclass_re.match(f) + if s: + ffile = open(f, 'r') + line = ffile.readline() + while line: + m, keyword = self.match_require_include(line) + # Only bbclass has the "inherit xxx" here. + bbclass="" + if not m and f.endswith(".bbclass"): + m, keyword = self.match_inherit(line) + bbclass=".bbclass" + # Find a 'require/include xxxx' + if m: + self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, options.show_filenames, ignore_layers) + line = ffile.readline() + ffile.close() + + def print_cross_files(self, bbpath, keyword, layername, f, needed_filename, show_filenames, ignore_layers): + """Print the depends that crosses a layer boundary""" + needed_file = bb.utils.which(bbpath, needed_filename) + if needed_file: + # Which layer is this file from + needed_layername = self.get_file_layer(needed_file) + if needed_layername != layername and not needed_layername in ignore_layers: + if not show_filenames: + f = self.remove_layer_prefix(f) + needed_file = self.remove_layer_prefix(needed_file) + logger.plain("%s %s %s" %(f, keyword, needed_file)) + + def match_inherit(self, line): + """Match the inherit xxx line""" + return (self.inherit_re.match(line), "inherits") + + def match_require_include(self, line): + """Match the require/include xxx line""" + m = self.require_re.match(line) + keyword = "requires" + if not m: + m = self.include_re.match(line) + keyword = "includes" + return (m, keyword) + + def check_cross_depends(self, keyword, layername, f, needed_file, show_filenames, ignore_layers): + """Print the DEPENDS/RDEPENDS file that crosses a layer boundary""" + best_realfn = bb.cache.Cache.virtualfn2realfn(needed_file)[0] + needed_layername = self.get_file_layer(best_realfn) + if needed_layername != layername and not needed_layername in ignore_layers: + if not show_filenames: + f = self.remove_layer_prefix(f) + best_realfn = self.remove_layer_prefix(best_realfn) + + logger.plain("%s %s %s" % (f, keyword, best_realfn)) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:]) or 0) diff --git a/bitbake/bin/bitbake-prserv b/bitbake/bin/bitbake-prserv new file mode 100755 index 0000000..a8d7acb --- /dev/null +++ b/bitbake/bin/bitbake-prserv @@ -0,0 +1,55 @@ +#!/usr/bin/env python +import os +import sys,logging +import optparse + +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib')) + +import prserv +import prserv.serv + +__version__="1.0.0" + +PRHOST_DEFAULT='0.0.0.0' +PRPORT_DEFAULT=8585 + +def main(): + parser = optparse.OptionParser( + version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__), + usage = "%prog < --start | --stop > [options]") + + parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store", + dest="dbfile", type="string", default="prserv.sqlite3") + parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store", + dest="logfile", type="string", default="prserv.log") + parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG", + action = "store", type="string", dest="loglevel", default = "INFO") + parser.add_option("--start", help="start daemon", + action="store_true", dest="start") + parser.add_option("--stop", help="stop daemon", + action="store_true", dest="stop") + parser.add_option("--host", help="ip address to bind", action="store", + dest="host", type="string", default=PRHOST_DEFAULT) + parser.add_option("--port", help="port number(default: 8585)", action="store", + dest="port", type="int", default=PRPORT_DEFAULT) + + options, args = parser.parse_args(sys.argv) + prserv.init_logger(os.path.abspath(options.logfile),options.loglevel) + + if options.start: + ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile)) + elif options.stop: + ret=prserv.serv.stop_daemon(options.host, options.port) + else: + ret=parser.print_help() + return ret + +if __name__ == "__main__": + try: + ret = main() + except Exception: + ret = 1 + import traceback + traceback.print_exc(5) + sys.exit(ret) + diff --git a/bitbake/bin/bitbake-selftest b/bitbake/bin/bitbake-selftest new file mode 100755 index 0000000..81e4c3c --- /dev/null +++ b/bitbake/bin/bitbake-selftest @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# +# Copyright (C) 2012 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import sys, logging +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib')) + +import unittest +try: + import bb +except RuntimeError as exc: + sys.exit(str(exc)) + +def usage(): + print('usage: %s [testname1 [testname2]...]' % os.path.basename(sys.argv[0])) + +if len(sys.argv) > 1: + if '--help' in sys.argv[1:]: + usage() + sys.exit(0) + + tests = sys.argv[1:] +else: + tests = ["bb.tests.codeparser", + "bb.tests.cow", + "bb.tests.data", + "bb.tests.fetch", + "bb.tests.utils"] + +for t in tests: + t = '.'.join(t.split('.')[:3]) + __import__(t) + +unittest.main(argv=["bitbake-selftest"] + tests) + diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker new file mode 100755 index 0000000..8a24161 --- /dev/null +++ b/bitbake/bin/bitbake-worker @@ -0,0 +1,415 @@ +#!/usr/bin/env python + +import os +import sys +import warnings +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) +from bb import fetch2 +import logging +import bb +import select +import errno +import signal + +# Users shouldn't be running this code directly +if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"): + print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.") + sys.exit(1) + +profiling = False +if sys.argv[1] == "decafbadbad": + profiling = True + try: + import cProfile as profile + except: + import profile + +logger = logging.getLogger("BitBake") + +try: + import cPickle as pickle +except ImportError: + import pickle + bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") + + +worker_pipe = sys.stdout.fileno() +bb.utils.nonblockingfd(worker_pipe) + +handler = bb.event.LogHandler() +logger.addHandler(handler) + +if 0: + # Code to write out a log file of all events passing through the worker + logfilename = "/tmp/workerlogfile" + format_str = "%(levelname)s: %(message)s" + conlogformat = bb.msg.BBLogFormatter(format_str) + consolelog = logging.FileHandler(logfilename) + bb.msg.addDefaultlogFilter(consolelog) + consolelog.setFormatter(conlogformat) + logger.addHandler(consolelog) + +worker_queue = "" + +def worker_fire(event, d): + data = "" + pickle.dumps(event) + "" + worker_fire_prepickled(data) + +def worker_fire_prepickled(event): + global worker_queue + + worker_queue = worker_queue + event + worker_flush() + +def worker_flush(): + global worker_queue, worker_pipe + + if not worker_queue: + return + + try: + written = os.write(worker_pipe, worker_queue) + worker_queue = worker_queue[written:] + except (IOError, OSError) as e: + if e.errno != errno.EAGAIN: + raise + +def worker_child_fire(event, d): + global worker_pipe + + data = "" + pickle.dumps(event) + "" + worker_pipe.write(data) + +bb.event.worker_fire = worker_fire + +lf = None +#lf = open("/tmp/workercommandlog", "w+") +def workerlog_write(msg): + if lf: + lf.write(msg) + lf.flush() + +def sigterm_handler(signum, frame): + signal.signal(signal.SIGTERM, signal.SIG_DFL) + os.killpg(0, signal.SIGTERM) + sys.exit() + +def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False): + # We need to setup the environment BEFORE the fork, since + # a fork() or exec*() activates PSEUDO... + + envbackup = {} + fakeenv = {} + umask = None + + taskdep = workerdata["taskdeps"][fn] + if 'umask' in taskdep and taskname in taskdep['umask']: + # umask might come in as a number or text string.. + try: + umask = int(taskdep['umask'][taskname],8) + except TypeError: + umask = taskdep['umask'][taskname] + + # We can't use the fakeroot environment in a dry run as it possibly hasn't been built + if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not cfg.dry_run: + envvars = (workerdata["fakerootenv"][fn] or "").split() + for key, value in (var.split('=') for var in envvars): + envbackup[key] = os.environ.get(key) + os.environ[key] = value + fakeenv[key] = value + + fakedirs = (workerdata["fakerootdirs"][fn] or "").split() + for p in fakedirs: + bb.utils.mkdirhier(p) + logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' % + (fn, taskname, ', '.join(fakedirs))) + else: + envvars = (workerdata["fakerootnoenv"][fn] or "").split() + for key, value in (var.split('=') for var in envvars): + envbackup[key] = os.environ.get(key) + os.environ[key] = value + fakeenv[key] = value + + sys.stdout.flush() + sys.stderr.flush() + + try: + pipein, pipeout = os.pipe() + pipein = os.fdopen(pipein, 'rb', 4096) + pipeout = os.fdopen(pipeout, 'wb', 0) + pid = os.fork() + except OSError as e: + bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror)) + + if pid == 0: + def child(): + global worker_pipe + pipein.close() + + signal.signal(signal.SIGTERM, sigterm_handler) + # Let SIGHUP exit as SIGTERM + signal.signal(signal.SIGHUP, sigterm_handler) + + # Save out the PID so that the event can include it the + # events + bb.event.worker_pid = os.getpid() + bb.event.worker_fire = worker_child_fire + worker_pipe = pipeout + + # Make the child the process group leader and ensure no + # child process will be controlled by the current terminal + # This ensures signals sent to the controlling terminal like Ctrl+C + # don't stop the child processes. + os.setsid() + # No stdin + newsi = os.open(os.devnull, os.O_RDWR) + os.dup2(newsi, sys.stdin.fileno()) + + if umask: + os.umask(umask) + + data.setVar("BB_WORKERCONTEXT", "1") + data.setVar("BB_TASKDEPDATA", taskdepdata) + data.setVar("BUILDNAME", workerdata["buildname"]) + data.setVar("DATE", workerdata["date"]) + data.setVar("TIME", workerdata["time"]) + bb.parse.siggen.set_taskdata(workerdata["sigdata"]) + ret = 0 + try: + the_data = bb.cache.Cache.loadDataFull(fn, appends, data) + the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task]) + + # exported_vars() returns a generator which *cannot* be passed to os.environ.update() + # successfully. We also need to unset anything from the environment which shouldn't be there + exports = bb.data.exported_vars(the_data) + bb.utils.empty_environment() + for e, v in exports: + os.environ[e] = v + for e in fakeenv: + os.environ[e] = fakeenv[e] + the_data.setVar(e, fakeenv[e]) + the_data.setVarFlag(e, 'export', "1") + + if quieterrors: + the_data.setVarFlag(taskname, "quieterrors", "1") + + except Exception as exc: + if not quieterrors: + logger.critical(str(exc)) + os._exit(1) + try: + if cfg.dry_run: + return 0 + return bb.build.exec_task(fn, taskname, the_data, cfg.profile) + except: + os._exit(1) + if not profiling: + os._exit(child()) + else: + profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname) + prof = profile.Profile() + try: + ret = profile.Profile.runcall(prof, child) + finally: + prof.dump_stats(profname) + bb.utils.process_profilelog(profname) + os._exit(ret) + else: + for key, value in envbackup.iteritems(): + if value is None: + del os.environ[key] + else: + os.environ[key] = value + + return pid, pipein, pipeout + +class runQueueWorkerPipe(): + """ + Abstraction for a pipe between a worker thread and the worker server + """ + def __init__(self, pipein, pipeout): + self.input = pipein + if pipeout: + pipeout.close() + bb.utils.nonblockingfd(self.input) + self.queue = "" + + def read(self): + start = len(self.queue) + try: + self.queue = self.queue + self.input.read(102400) + except (OSError, IOError) as e: + if e.errno != errno.EAGAIN: + raise + + end = len(self.queue) + index = self.queue.find("") + while index != -1: + worker_fire_prepickled(self.queue[:index+8]) + self.queue = self.queue[index+8:] + index = self.queue.find("") + return (end > start) + + def close(self): + while self.read(): + continue + if len(self.queue) > 0: + print("Warning, worker child left partial message: %s" % self.queue) + self.input.close() + +normalexit = False + +class BitbakeWorker(object): + def __init__(self, din): + self.input = din + bb.utils.nonblockingfd(self.input) + self.queue = "" + self.cookercfg = None + self.databuilder = None + self.data = None + self.build_pids = {} + self.build_pipes = {} + + signal.signal(signal.SIGTERM, self.sigterm_exception) + # Let SIGHUP exit as SIGTERM + signal.signal(signal.SIGHUP, self.sigterm_exception) + + def sigterm_exception(self, signum, stackframe): + if signum == signal.SIGTERM: + bb.warn("Worker recieved SIGTERM, shutting down...") + elif signum == signal.SIGHUP: + bb.warn("Worker recieved SIGHUP, shutting down...") + self.handle_finishnow(None) + signal.signal(signal.SIGTERM, signal.SIG_DFL) + os.kill(os.getpid(), signal.SIGTERM) + + def serve(self): + while True: + (ready, _, _) = select.select([self.input] + [i.input for i in self.build_pipes.values()], [] , [], 1) + if self.input in ready or len(self.queue): + start = len(self.queue) + try: + self.queue = self.queue + self.input.read() + except (OSError, IOError): + pass + end = len(self.queue) + self.handle_item("cookerconfig", self.handle_cookercfg) + self.handle_item("workerdata", self.handle_workerdata) + self.handle_item("runtask", self.handle_runtask) + self.handle_item("finishnow", self.handle_finishnow) + self.handle_item("ping", self.handle_ping) + self.handle_item("quit", self.handle_quit) + + for pipe in self.build_pipes: + self.build_pipes[pipe].read() + if len(self.build_pids): + self.process_waitpid() + worker_flush() + + + def handle_item(self, item, func): + if self.queue.startswith("<" + item + ">"): + index = self.queue.find("") + while index != -1: + func(self.queue[(len(item) + 2):index]) + self.queue = self.queue[(index + len(item) + 3):] + index = self.queue.find("") + + def handle_cookercfg(self, data): + self.cookercfg = pickle.loads(data) + self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True) + self.databuilder.parseBaseConfiguration() + self.data = self.databuilder.data + + def handle_workerdata(self, data): + self.workerdata = pickle.loads(data) + bb.msg.loggerDefaultDebugLevel = self.workerdata["logdefaultdebug"] + bb.msg.loggerDefaultVerbose = self.workerdata["logdefaultverbose"] + bb.msg.loggerVerboseLogs = self.workerdata["logdefaultverboselogs"] + bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"] + self.data.setVar("PRSERV_HOST", self.workerdata["prhost"]) + + def handle_ping(self, _): + workerlog_write("Handling ping\n") + + logger.warn("Pong from bitbake-worker!") + + def handle_quit(self, data): + workerlog_write("Handling quit\n") + + global normalexit + normalexit = True + sys.exit(0) + + def handle_runtask(self, data): + fn, task, taskname, quieterrors, appends, taskdepdata = pickle.loads(data) + workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname)) + + pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.workerdata, fn, task, taskname, appends, taskdepdata, quieterrors) + + self.build_pids[pid] = task + self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout) + + def process_waitpid(self): + """ + Return none is there are no processes awaiting result collection, otherwise + collect the process exit codes and close the information pipe. + """ + try: + pid, status = os.waitpid(-1, os.WNOHANG) + if pid == 0 or os.WIFSTOPPED(status): + return None + except OSError: + return None + + workerlog_write("Exit code of %s for pid %s\n" % (status, pid)) + + if os.WIFEXITED(status): + status = os.WEXITSTATUS(status) + elif os.WIFSIGNALED(status): + # Per shell conventions for $?, when a process exits due to + # a signal, we return an exit code of 128 + SIGNUM + status = 128 + os.WTERMSIG(status) + + task = self.build_pids[pid] + del self.build_pids[pid] + + self.build_pipes[pid].close() + del self.build_pipes[pid] + + worker_fire_prepickled("" + pickle.dumps((task, status)) + "") + + def handle_finishnow(self, _): + if self.build_pids: + logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids)) + for k, v in self.build_pids.iteritems(): + try: + os.kill(-k, signal.SIGTERM) + os.waitpid(-1, 0) + except: + pass + for pipe in self.build_pipes: + self.build_pipes[pipe].read() + +try: + worker = BitbakeWorker(sys.stdin) + if not profiling: + worker.serve() + else: + profname = "profile-worker.log" + prof = profile.Profile() + try: + profile.Profile.runcall(prof, worker.serve) + finally: + prof.dump_stats(profname) + bb.utils.process_profilelog(profname) +except BaseException as e: + if not normalexit: + import traceback + sys.stderr.write(traceback.format_exc()) + sys.stderr.write(str(e)) +while len(worker_queue): + worker_flush() +workerlog_write("exitting") +sys.exit(0) + diff --git a/bitbake/bin/bitdoc b/bitbake/bin/bitdoc new file mode 100755 index 0000000..576d88b --- /dev/null +++ b/bitbake/bin/bitdoc @@ -0,0 +1,531 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (C) 2005 Holger Hans Peter Freyther +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import optparse, os, sys + +# bitbake +sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__), 'lib')) +import bb +import bb.parse +from string import split, join + +__version__ = "0.0.2" + +class HTMLFormatter: + """ + Simple class to help to generate some sort of HTML files. It is + quite inferior solution compared to docbook, gtkdoc, doxygen but it + should work for now. + We've a global introduction site (index.html) and then one site for + the list of keys (alphabetical sorted) and one for the list of groups, + one site for each key with links to the relations and groups. + + index.html + all_keys.html + all_groups.html + groupNAME.html + keyNAME.html + """ + + def replace(self, text, *pairs): + """ + From pydoc... almost identical at least + """ + while pairs: + (a, b) = pairs[0] + text = join(split(text, a), b) + pairs = pairs[1:] + return text + def escape(self, text): + """ + Escape string to be conform HTML + """ + return self.replace(text, + ('&', '&'), + ('<', '<' ), + ('>', '>' ) ) + def createNavigator(self): + """ + Create the navgiator + """ + return """ + + + + + +""" + + def relatedKeys(self, item): + """ + Create HTML to link to foreign keys + """ + + if len(item.related()) == 0: + return "" + + txt = "

See also:
" + txts = [] + for it in item.related(): + txts.append("""%(it)s""" % vars() ) + + return txt + ",".join(txts) + + def groups(self, item): + """ + Create HTML to link to related groups + """ + + if len(item.groups()) == 0: + return "" + + + txt = "

See also:
" + txts = [] + for group in item.groups(): + txts.append( """%s """ % (group, group) ) + + return txt + ",".join(txts) + + + def createKeySite(self, item): + """ + Create a site for a key. It contains the header/navigator, a heading, + the description, links to related keys and to the groups. + """ + + return """ +Key %s + + +%s +

%s

+ +
+

Synopsis

+

+%s +

+
+ +
+

Related Keys

+

+%s +

+
+ +
+

Groups

+

+%s +

+
+ + + +""" % (item.name(), self.createNavigator(), item.name(), + self.escape(item.description()), self.relatedKeys(item), self.groups(item)) + + def createGroupsSite(self, doc): + """ + Create the Group Overview site + """ + + groups = "" + sorted_groups = sorted(doc.groups()) + for group in sorted_groups: + groups += """%s
""" % (group, group) + + return """ +Group overview + + +%s +

Available Groups

+%s + +""" % (self.createNavigator(), groups) + + def createIndex(self): + """ + Create the index file + """ + + return """ +Bitbake Documentation + + +%s +

Documentation Entrance

+All available groups
+All available keys
+ +""" % self.createNavigator() + + def createKeysSite(self, doc): + """ + Create Overview of all avilable keys + """ + keys = "" + sorted_keys = sorted(doc.doc_keys()) + for key in sorted_keys: + keys += """%s
""" % (key, key) + + return """ +Key overview + + +%s +

Available Keys

+%s + +""" % (self.createNavigator(), keys) + + def createGroupSite(self, gr, items, _description = None): + """ + Create a site for a group: + Group the name of the group, items contain the name of the keys + inside this group + """ + groups = "" + description = "" + + # create a section with the group descriptions + if _description: + description += "

" % gr + description += _description + + items.sort(lambda x, y:cmp(x.name(), y.name())) + for group in items: + groups += """%s
""" % (group.name(), group.name()) + + return """ +Group %s + + +%s +%s +
+

Keys in Group %s

+
+%s
+
+
+ +""" % (gr, self.createNavigator(), description, gr, groups) + + + + def createCSS(self): + """ + Create the CSS file + """ + return """.synopsis, .classsynopsis +{ + background: #eeeeee; + border: solid 1px #aaaaaa; + padding: 0.5em; +} +.programlisting +{ + background: #eeeeff; + border: solid 1px #aaaaff; + padding: 0.5em; +} +.variablelist +{ + padding: 4px; + margin-left: 3em; +} +.variablelist td:first-child +{ + vertical-align: top; +} +table.navigation +{ + background: #ffeeee; + border: solid 1px #ffaaaa; + margin-top: 0.5em; + margin-bottom: 0.5em; +} +.navigation a +{ + color: #770000; +} +.navigation a:visited +{ + color: #550000; +} +.navigation .title +{ + font-size: 200%; +} +div.refnamediv +{ + margin-top: 2em; +} +div.gallery-float +{ + float: left; + padding: 10px; +} +div.gallery-float img +{ + border-style: none; +} +div.gallery-spacer +{ + clear: both; +} +a +{ + text-decoration: none; +} +a:hover +{ + text-decoration: underline; + color: #FF0000; +} +""" + + + +class DocumentationItem: + """ + A class to hold information about a configuration + item. It contains the key name, description, a list of related names, + and the group this item is contained in. + """ + + def __init__(self): + self._groups = [] + self._related = [] + self._name = "" + self._desc = "" + + def groups(self): + return self._groups + + def name(self): + return self._name + + def description(self): + return self._desc + + def related(self): + return self._related + + def setName(self, name): + self._name = name + + def setDescription(self, desc): + self._desc = desc + + def addGroup(self, group): + self._groups.append(group) + + def addRelation(self, relation): + self._related.append(relation) + + def sort(self): + self._related.sort() + self._groups.sort() + + +class Documentation: + """ + Holds the documentation... with mappings from key to items... + """ + + def __init__(self): + self.__keys = {} + self.__groups = {} + + def insert_doc_item(self, item): + """ + Insert the Doc Item into the internal list + of representation + """ + item.sort() + self.__keys[item.name()] = item + + for group in item.groups(): + if not group in self.__groups: + self.__groups[group] = [] + self.__groups[group].append(item) + self.__groups[group].sort() + + + def doc_item(self, key): + """ + Return the DocumentationInstance describing the key + """ + try: + return self.__keys[key] + except KeyError: + return None + + def doc_keys(self): + """ + Return the documented KEYS (names) + """ + return self.__keys.keys() + + def groups(self): + """ + Return the names of available groups + """ + return self.__groups.keys() + + def group_content(self, group_name): + """ + Return a list of keys/names that are in a specefic + group or the empty list + """ + try: + return self.__groups[group_name] + except KeyError: + return [] + + +def parse_cmdline(args): + """ + Parse the CMD line and return the result as a n-tuple + """ + + parser = optparse.OptionParser( version = "Bitbake Documentation Tool Core version %s, %%prog version %s" % (bb.__version__, __version__)) + usage = """%prog [options] + +Create a set of html pages (documentation) for a bitbake.conf.... +""" + + # Add the needed options + parser.add_option( "-c", "--config", help = "Use the specified configuration file as source", + action = "store", dest = "config", default = os.path.join("conf", "documentation.conf") ) + + parser.add_option( "-o", "--output", help = "Output directory for html files", + action = "store", dest = "output", default = "html/" ) + + parser.add_option( "-D", "--debug", help = "Increase the debug level", + action = "count", dest = "debug", default = 0 ) + + parser.add_option( "-v", "--verbose", help = "output more chit-char to the terminal", + action = "store_true", dest = "verbose", default = False ) + + options, args = parser.parse_args( sys.argv ) + + bb.msg.init_msgconfig(options.verbose, options.debug) + + return options.config, options.output + +def main(): + """ + The main Method + """ + + (config_file, output_dir) = parse_cmdline( sys.argv ) + + # right to let us load the file now + try: + documentation = bb.parse.handle( config_file, bb.data.init() ) + except IOError: + bb.fatal( "Unable to open %s" % config_file ) + except bb.parse.ParseError: + bb.fatal( "Unable to parse %s" % config_file ) + + if isinstance(documentation, dict): + documentation = documentation[""] + + # Assuming we've the file loaded now, we will initialize the 'tree' + doc = Documentation() + + # defined states + state_begin = 0 + state_see = 1 + state_group = 2 + + for key in bb.data.keys(documentation): + data = documentation.getVarFlag(key, "doc") + if not data: + continue + + # The Documentation now starts + doc_ins = DocumentationItem() + doc_ins.setName(key) + + + tokens = data.split(' ') + state = state_begin + string= "" + for token in tokens: + token = token.strip(',') + + if not state == state_see and token == "@see": + state = state_see + continue + elif not state == state_group and token == "@group": + state = state_group + continue + + if state == state_begin: + string += " %s" % token + elif state == state_see: + doc_ins.addRelation(token) + elif state == state_group: + doc_ins.addGroup(token) + + # set the description + doc_ins.setDescription(string) + doc.insert_doc_item(doc_ins) + + # let us create the HTML now + bb.utils.mkdirhier(output_dir) + os.chdir(output_dir) + + # Let us create the sites now. We do it in the following order + # Start with the index.html. It will point to sites explaining all + # keys and groups + html_slave = HTMLFormatter() + + f = file('style.css', 'w') + print >> f, html_slave.createCSS() + + f = file('index.html', 'w') + print >> f, html_slave.createIndex() + + f = file('all_groups.html', 'w') + print >> f, html_slave.createGroupsSite(doc) + + f = file('all_keys.html', 'w') + print >> f, html_slave.createKeysSite(doc) + + # now for each group create the site + for group in doc.groups(): + f = file('group%s.html' % group, 'w') + print >> f, html_slave.createGroupSite(group, doc.group_content(group)) + + # now for the keys + for key in doc.doc_keys(): + f = file('key%s.html' % doc.doc_item(key).name(), 'w') + print >> f, html_slave.createKeySite(doc.doc_item(key)) + + +if __name__ == "__main__": + main() diff --git a/bitbake/bin/image-writer b/bitbake/bin/image-writer new file mode 100755 index 0000000..86c38b5 --- /dev/null +++ b/bitbake/bin/image-writer @@ -0,0 +1,122 @@ +#!/usr/bin/env python + +# Copyright (c) 2012 Wind River Systems, Inc. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +import os +import sys +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname( \ + os.path.abspath(__file__))), 'lib')) +try: + import bb +except RuntimeError as exc: + sys.exit(str(exc)) + +import gtk +import optparse +import pygtk + +from bb.ui.crumbs.hobwidget import HobAltButton, HobButton +from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog +from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog +from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog + +# I put all the fs bitbake supported here. Need more test. +DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"] +Title = "USB Image Writer" + +class DeployWindow(gtk.Window): + def __init__(self, image_path=''): + super(DeployWindow, self).__init__() + + if len(image_path) > 0: + valid = True + if not os.path.exists(image_path): + valid = False + lbl = "Invalid image file path: %s.\nPress Select Image to select an image." % image_path + else: + image_path = os.path.abspath(image_path) + extend_name = os.path.splitext(image_path)[1][1:] + if extend_name not in DEPLOYABLE_IMAGE_TYPES: + valid = False + lbl = "Undeployable imge type: %s\nPress Select Image to select an image." % extend_name + + if not valid: + image_path = '' + crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO) + button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK) + HobButton.style_button(button) + crumbs_dialog.run() + crumbs_dialog.destroy() + + self.deploy_dialog = DeployImageDialog(Title, image_path, self, + gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT + | gtk.DIALOG_NO_SEPARATOR, None, standalone=True) + close_button = self.deploy_dialog.add_button("Close", gtk.RESPONSE_NO) + HobAltButton.style_button(close_button) + close_button.connect('clicked', gtk.main_quit) + + write_button = self.deploy_dialog.add_button("Write USB image", gtk.RESPONSE_YES) + HobAltButton.style_button(write_button) + + self.deploy_dialog.connect('select_image_clicked', self.select_image_clicked_cb) + self.deploy_dialog.connect('destroy', gtk.main_quit) + response = self.deploy_dialog.show() + + def select_image_clicked_cb(self, dialog): + cwd = os.getcwd() + dialog = ImageSelectionDialog(cwd, DEPLOYABLE_IMAGE_TYPES, Title, self, gtk.FILE_CHOOSER_ACTION_SAVE ) + button = dialog.add_button("Cancel", gtk.RESPONSE_NO) + HobAltButton.style_button(button) + button = dialog.add_button("Open", gtk.RESPONSE_YES) + HobAltButton.style_button(button) + response = dialog.run() + + if response == gtk.RESPONSE_YES: + if not dialog.image_names: + lbl = "No selections made\nClicked the radio button to select a image." + crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO) + button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK) + HobButton.style_button(button) + crumbs_dialog.run() + crumbs_dialog.destroy() + dialog.destroy() + return + + # get the full path of image + image_path = os.path.join(dialog.image_folder, dialog.image_names[0]) + self.deploy_dialog.set_image_text_buffer(image_path) + self.deploy_dialog.set_image_path(image_path) + + dialog.destroy() + +def main(): + parser = optparse.OptionParser( + usage = """%prog [-h] [image_file] + +%prog writes bootable images to USB devices. You can +provide the image file on the command line or select it using the GUI.""") + + options, args = parser.parse_args(sys.argv) + image_file = args[1] if len(args) > 1 else '' + dw = DeployWindow(image_file) + +if __name__ == '__main__': + try: + main() + gtk.main() + except Exception: + import traceback + traceback.print_exc(3) diff --git a/bitbake/bin/toaster b/bitbake/bin/toaster new file mode 100755 index 0000000..75c7a07 --- /dev/null +++ b/bitbake/bin/toaster @@ -0,0 +1,267 @@ +#!/bin/bash +# (c) 2013 Intel Corp. + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + +# This script can be run in two modes. + +# When used with "source", from a build directory, +# it enables toaster event logging and starts the bitbake resident server. +# use as: source toaster [start|stop] [noweb] [noui] + +# When it is called as a stand-alone script, it starts just the +# web server, and the building shall be done through the web interface. +# As script, it will not return to the command prompt. Stop with Ctrl-C. + +# Helper function to kill a background toaster development server + +function webserverKillAll() +{ + local pidfile + for pidfile in ${BUILDDIR}/.toastermain.pid; do + if [ -f ${pidfile} ]; then + while kill -0 $(< ${pidfile}) 2>/dev/null; do + kill -SIGTERM -$(< ${pidfile}) 2>/dev/null + sleep 1; + # Kill processes if they are still running - may happen in interactive shells + ps fux | grep "python.*manage.py" | awk '{print $2}' | xargs kill + done; + rm ${pidfile} + fi + done +} + +function webserverStartAll() +{ + # do not start if toastermain points to a valid process + if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then + retval=1 + rm "${BUILDDIR}/.toastermain.pid" + fi + + retval=0 + python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1 + python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2 + if [ $retval -eq 1 ]; then + echo "Failed db sync, stopping system start" 1>&2 + elif [ $retval -eq 2 ]; then + echo -e "\nError on migration, trying to recover... \n" + python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake + retval=0 + python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1 + fi + if [ "x$TOASTER_MANAGED" == "x1" ]; then + python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1 + python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1 + fi + if [ $retval -eq 0 ]; then + echo "Starting webserver" + python $BBBASEDIR/lib/toaster/manage.py runserver 0.0.0.0:8000 ${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid + sleep 1 + if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then + retval=1 + rm "${BUILDDIR}/.toastermain.pid" + fi + fi + return $retval +} + +# Helper functions to add a special configuration file + +function addtoConfiguration() +{ + echo "#Created by toaster start script" > ${BUILDDIR}/conf/$2 + echo $1 >> ${BUILDDIR}/conf/$2 +} + +INSTOPSYSTEM=0 + +# define the stop command +function stop_system() +{ + # prevent reentry + if [ $INSTOPSYSTEM == 1 ]; then return; fi + INSTOPSYSTEM=1 + if [ -f ${BUILDDIR}/.toasterui.pid ]; then + kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null + rm ${BUILDDIR}/.toasterui.pid + fi + BBSERVER=0.0.0.0:8200 bitbake -m + unset BBSERVER + webserverKillAll + # force stop any misbehaving bitbake server + lsof bitbake.lock | awk '{print $2}' | grep "[0-9]\+" | xargs -n1 -r kill + trap - SIGHUP + #trap - SIGCHLD + INSTOPSYSTEM=0 +} + +function check_pidbyfile() { + [ -e $1 ] && kill -0 $(< $1) 2>/dev/null +} + + +function notify_chldexit() { + if [ $NOTOASTERUI == 0 ]; then + check_pidbyfile ${BUILDDIR}/.toasterui.pid && return + stop_system + fi +} + + +BBBASEDIR=`dirname ${BASH_SOURCE}`/.. +RUNNING=0 + +if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then + # We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that. + # Start just the web server, point the web browser to the interface, and start any Django services. + + if [ -n "$BUILDDIR" ]; then + echo -e "Error: build/ directory detected. Toaster will not start in managed mode if a build environment is detected.\nUse a clean terminal to start Toaster." 1>&2; + exit 1; + fi + + # Define a fake builddir where only the pid files are actually created. No real builds will take place here. + BUILDDIR=/tmp + RUNNING=1 + function trap_ctrlc() { + echo "** Stopping system" + webserverKillAll + RUNNING=0 + } + TOASTER_MANAGED=1 + export TOASTER_MANAGED=1 + if ! webserverStartAll; then + echo "Failed to start the web server, stopping" 1>&2; + exit 1; + fi + xdg-open http://0.0.0.0:8000/ >/dev/null 2>&1 & + trap trap_ctrlc SIGINT + echo "Running. Stop with Ctrl-C" + while [ $RUNNING -gt 0 ]; do + python $BBBASEDIR/lib/toaster/manage.py runbuilds + sleep 1 + done + echo "**** Exit" + exit 0 +fi + +# We make sure we're running in the current shell and in a good environment +if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then + echo "Error: Build environment is not setup or bitbake is not in path." 1>&2; + return 2 +fi + + + +# Verify prerequisites + +if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (5,6)" | python 2>/dev/null | grep True >/dev/null; then + echo -e "This program needs Django 1.5 or 1.6. Please install with\n\npip install django==1.6" + return 2 +fi + +if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then + echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4" + return 2 +fi + + + + + +# Determine the action. If specified by arguments, fine, if not, toggle it +if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then + CMD="$1" +else + if [ -z "$BBSERVER" ]; then + CMD="start" + else + CMD="stop" + fi; +fi + +NOTOASTERUI=0 +WEBSERVER=1 +for param in $*; do + case $param in + noui ) + NOTOASTERUI=1 + ;; + noweb ) + WEBSERVER=0 + ;; + esac +done + +echo "The system will $CMD." + +# Make sure it's safe to run by checking bitbake lock + +lock=1 +if [ -e $BUILDDIR/bitbake.lock ]; then + (flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0 +fi + +if [ ${CMD} == "start" ] && ( [ $lock -eq 0 ] || [ -e $BUILDDIR/.toastermain.pid ] ); then + echo "Error: bitbake lock state error. File locks show that the system is on." 2>&1 + echo "If you see problems, stop and then start the system again." 2>&1 + return 3 +fi + + +# Execute the commands + +case $CMD in + start ) + start_success=1 + addtoConfiguration "INHERIT+=\"toaster buildhistory\"" toaster.conf + if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then + echo "Failed ${CMD}." + return 4 + fi + unset BBSERVER + bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:8200 + if [ $? -ne 0 ]; then + start_success=0 + echo "Bitbake server start failed" + else + export BBSERVER=0.0.0.0:8200 + if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited + bitbake --observe-only -u toasterui >${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid + fi + fi + if [ $start_success -eq 1 ]; then + # set fail safe stop system on terminal exit + trap stop_system SIGHUP + echo "Successful ${CMD}." + else + # failed start, do stop + stop_system + echo "Failed ${CMD}." + fi + # stop system on terminal exit + set -o monitor + trap stop_system SIGHUP + #trap notify_chldexit SIGCHLD + ;; + stop ) + stop_system + echo "Successful ${CMD}." + ;; +esac + + diff --git a/bitbake/bin/toaster-eventreplay b/bitbake/bin/toaster-eventreplay new file mode 100755 index 0000000..624829a --- /dev/null +++ b/bitbake/bin/toaster-eventreplay @@ -0,0 +1,179 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (C) 2014 Alex Damian +# +# This file re-uses code spread throughout other Bitbake source files. +# As such, all other copyrights belong to their own right holders. +# +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +# This command takes a filename as a single parameter. The filename is read +# as a build eventlog, and the ToasterUI is used to process events in the file +# and log data in the database + +import os +import sys, logging + +# mangle syspath to allow easy import of modules +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), + 'lib')) + + +import bb.cooker +from bb.ui import toasterui +import sys +import logging + +logger = logging.getLogger(__name__) +console = logging.StreamHandler(sys.stdout) +format_str = "%(levelname)s: %(message)s" +logging.basicConfig(format=format_str) + + +import json, pickle + + +class FileReadEventsServerConnection(): + """ Emulates a connection to a bitbake server that feeds + events coming actually read from a saved log file. + """ + + class MockConnection(): + """ fill-in for the proxy to the server. we just return generic data + """ + def __init__(self, sc): + self._sc = sc + + def runCommand(self, commandArray): + """ emulates running a command on the server; only read-only commands are accepted """ + command_name = commandArray[0] + + if command_name == "getVariable": + if commandArray[1] in self._sc._variables: + return (self._sc._variables[commandArray[1]]['v'], None) + return (None, "Missing variable") + + elif command_name == "getAllKeysWithFlags": + dump = {} + flaglist = commandArray[1] + for k in self._sc._variables.keys(): + try: + if not k.startswith("__"): + v = self._sc._variables[k]['v'] + dump[k] = { + 'v' : v , + 'history' : self._sc._variables[k]['history'], + } + for d in flaglist: + dump[k][d] = self._sc._variables[k][d] + except Exception as e: + print(e) + return (dump, None) + else: + raise Exception("Command %s not implemented" % commandArray[0]) + + def terminateServer(self): + """ do not do anything """ + pass + + + + class EventReader(): + def __init__(self, sc): + self._sc = sc + self.firstraise = 0 + + def _create_event(self, line): + def _import_class(name): + assert len(name) > 0 + assert "." in name, name + + components = name.strip().split(".") + modulename = ".".join(components[:-1]) + moduleklass = components[-1] + + module = __import__(modulename, fromlist=[str(moduleklass)]) + return getattr(module, moduleklass) + + # we build a toaster event out of current event log line + try: + event_data = json.loads(line.strip()) + event_class = _import_class(event_data['class']) + event_object = pickle.loads(json.loads(event_data['vars'])) + except ValueError as e: + print("Failed loading ", line) + raise e + + if not isinstance(event_object, event_class): + raise Exception("Error loading objects %s class %s ", event_object, event_class) + + return event_object + + def waitEvent(self, timeout): + + nextline = self._sc._eventfile.readline() + if len(nextline) == 0: + # the build data ended, while toasterui still waits for events. + # this happens when the server was abruptly stopped, so we simulate this + self.firstraise += 1 + if self.firstraise == 1: + raise KeyboardInterrupt() + else: + return None + else: + self._sc.lineno += 1 + return self._create_event(nextline) + + + def _readVariables(self, variableline): + self._variables = json.loads(variableline.strip())['allvariables'] + + + def __init__(self, file_name): + self.connection = FileReadEventsServerConnection.MockConnection(self) + self._eventfile = open(file_name, "r") + + # we expect to have the variable dump at the start of the file + self.lineno = 1 + self._readVariables(self._eventfile.readline()) + + self.events = FileReadEventsServerConnection.EventReader(self) + + + + + +class MockConfigParameters(): + """ stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this + serves just to supply needed interfaces for the toaster ui to work """ + def __init__(self): + self.observe_only = True # we can only read files + + +# run toaster ui on our mock bitbake class +if __name__ == "__main__": + if len(sys.argv) < 2: + logger.error("Usage: %s event.log " % sys.argv[0]) + sys.exit(1) + + file_name = sys.argv[-1] + mock_connection = FileReadEventsServerConnection(file_name) + configParams = MockConfigParameters() + + # run the main program + toasterui.main(mock_connection.connection, mock_connection.events, configParams) diff --git a/bitbake/contrib/README b/bitbake/contrib/README new file mode 100644 index 0000000..25e5156 --- /dev/null +++ b/bitbake/contrib/README @@ -0,0 +1 @@ +This directory is for additional contributed files which may be useful. diff --git a/bitbake/contrib/bbdev.sh b/bitbake/contrib/bbdev.sh new file mode 100644 index 0000000..33a7853 --- /dev/null +++ b/bitbake/contrib/bbdev.sh @@ -0,0 +1,31 @@ +# This is a shell function to be sourced into your shell or placed in your .profile, +# which makes setting things up for BitBake a bit easier. +# +# The author disclaims copyright to the contents of this file and places it in the +# public domain. + +bbdev () { + local BBDIR PKGDIR BUILDDIR + if test x"$1" = "x--help"; then echo >&2 "syntax: bbdev [bbdir [pkgdir [builddir]]]"; return 1; fi + if test x"$1" = x; then BBDIR=`pwd`; else BBDIR=$1; fi + if test x"$2" = x; then PKGDIR=`pwd`; else PKGDIR=$2; fi + if test x"$3" = x; then BUILDDIR=`pwd`; else BUILDDIR=$3; fi + + BBDIR=`readlink -f $BBDIR` + PKGDIR=`readlink -f $PKGDIR` + BUILDDIR=`readlink -f $BUILDDIR` + if ! (test -d $BBDIR && test -d $PKGDIR && test -d $BUILDDIR); then + echo >&2 "syntax: bbdev [bbdir [pkgdir [builddir]]]" + return 1 + fi + + PATH=$BBDIR/bin:$PATH + BBPATH=$BBDIR + if test x"$BBDIR" != x"$PKGDIR"; then + BBPATH=$PKGDIR:$BBPATH + fi + if test x"$PKGDIR" != x"$BUILDDIR"; then + BBPATH=$BUILDDIR:$BBPATH + fi + export BBPATH +} diff --git a/bitbake/contrib/dump_cache.py b/bitbake/contrib/dump_cache.py new file mode 100755 index 0000000..e1f2309 --- /dev/null +++ b/bitbake/contrib/dump_cache.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (C) 2012 Wind River Systems, Inc. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +# +# This is used for dumping the bb_cache.dat, the output format is: +# recipe_path PN PV PACKAGES +# +import os +import sys +import warnings + +# For importing bb.cache +sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib')) +from bb.cache import CoreRecipeInfo + +import cPickle as pickle + +def main(argv=None): + """ + Get the mapping for the target recipe. + """ + if len(argv) != 1: + print >>sys.stderr, "Error, need one argument!" + return 2 + + cachefile = argv[0] + + with open(cachefile, "rb") as cachefile: + pickled = pickle.Unpickler(cachefile) + while cachefile: + try: + key = pickled.load() + val = pickled.load() + except Exception: + break + if isinstance(val, CoreRecipeInfo) and (not val.skipped): + pn = val.pn + # Filter out the native recipes. + if key.startswith('virtual:native:') or pn.endswith("-native"): + continue + + # 1.0 is the default version for a no PV recipe. + if val.__dict__.has_key("pv"): + pv = val.pv + else: + pv = "1.0" + + print("%s %s %s %s" % (key, pn, pv, ' '.join(val.packages))) + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) + diff --git a/bitbake/contrib/vim/ftdetect/bitbake.vim b/bitbake/contrib/vim/ftdetect/bitbake.vim new file mode 100644 index 0000000..200f8ae --- /dev/null +++ b/bitbake/contrib/vim/ftdetect/bitbake.vim @@ -0,0 +1,24 @@ +" Vim filetype detection file +" Language: BitBake +" Author: Ricardo Salveti +" Copyright: Copyright (C) 2008 Ricardo Salveti +" Licence: You may redistribute this under the same terms as Vim itself +" +" This sets up the syntax highlighting for BitBake files, like .bb, .bbclass and .inc + +if &compatible || version < 600 + finish +endif + +" .bb, .bbappend and .bbclass +au BufNewFile,BufRead *.{bb,bbappend,bbclass} set filetype=bitbake + +" .inc +au BufNewFile,BufRead *.inc set filetype=bitbake + +" .conf +au BufNewFile,BufRead *.conf + \ if (match(expand("%:p:h"), "conf") > 0) | + \ set filetype=bitbake | + \ endif + diff --git a/bitbake/contrib/vim/ftplugin/bitbake.vim b/bitbake/contrib/vim/ftplugin/bitbake.vim new file mode 100644 index 0000000..db0d753 --- /dev/null +++ b/bitbake/contrib/vim/ftplugin/bitbake.vim @@ -0,0 +1,2 @@ +set sts=4 sw=4 et +set cms=#%s diff --git a/bitbake/contrib/vim/plugin/newbb.vim b/bitbake/contrib/vim/plugin/newbb.vim new file mode 100755 index 0000000..874e338 --- /dev/null +++ b/bitbake/contrib/vim/plugin/newbb.vim @@ -0,0 +1,84 @@ +" Vim plugin file +" Purpose: Create a template for new bb files +" Author: Ricardo Salveti +" Copyright: Copyright (C) 2008 Ricardo Salveti +" +" This file is licensed under the MIT license, see COPYING.MIT in +" this source distribution for the terms. +" +" Based on the gentoo-syntax package +" +" Will try to use git to find the user name and email + +if &compatible || v:version < 600 + finish +endif + +fun! GetUserName() + let l:user_name = system("git config --get user.name") + if v:shell_error + return "Unknown User" + else + return substitute(l:user_name, "\n", "", "") +endfun + +fun! GetUserEmail() + let l:user_email = system("git config --get user.email") + if v:shell_error + return "unknow@user.org" + else + return substitute(l:user_email, "\n", "", "") +endfun + +fun! BBHeader() + let l:current_year = strftime("%Y") + let l:user_name = GetUserName() + let l:user_email = GetUserEmail() + 0 put ='# Copyright (C) ' . l:current_year . + \ ' ' . l:user_name . ' <' . l:user_email . '>' + put ='# Released under the MIT license (see COPYING.MIT for the terms)' + $ +endfun + +fun! NewBBTemplate() + let l:paste = &paste + set nopaste + + " Get the header + call BBHeader() + + " New the bb template + put ='DESCRIPTION = \"\"' + put ='HOMEPAGE = \"\"' + put ='LICENSE = \"\"' + put ='SECTION = \"\"' + put ='DEPENDS = \"\"' + put ='' + put ='SRC_URI = \"\"' + + " Go to the first place to edit + 0 + /^DESCRIPTION =/ + exec "normal 2f\"" + + if paste == 1 + set paste + endif +endfun + +if !exists("g:bb_create_on_empty") + let g:bb_create_on_empty = 1 +endif + +" disable in case of vimdiff +if v:progname =~ "vimdiff" + let g:bb_create_on_empty = 0 +endif + +augroup NewBB + au BufNewFile *.bb + \ if g:bb_create_on_empty | + \ call NewBBTemplate() | + \ endif +augroup END + diff --git a/bitbake/contrib/vim/syntax/bitbake.vim b/bitbake/contrib/vim/syntax/bitbake.vim new file mode 100644 index 0000000..fb55f91 --- /dev/null +++ b/bitbake/contrib/vim/syntax/bitbake.vim @@ -0,0 +1,126 @@ +" Vim syntax file +" Language: BitBake bb/bbclasses/inc +" Author: Chris Larson +" Ricardo Salveti +" Copyright: Copyright (C) 2004 Chris Larson +" Copyright (C) 2008 Ricardo Salveti +" +" This file is licensed under the MIT license, see COPYING.MIT in +" this source distribution for the terms. +" +" Syntax highlighting for bb, bbclasses and inc files. +" +" It's an entirely new type, just has specific syntax in shell and python code + +if &compatible || v:version < 600 + finish +endif +if exists("b:current_syntax") + finish +endif + +syn include @python syntax/python.vim +if exists("b:current_syntax") + unlet b:current_syntax +endif + +" BitBake syntax + +" Matching case +syn case match + +" Indicates the error when nothing is matched +syn match bbUnmatched "." + +" Comments +syn cluster bbCommentGroup contains=bbTodo,@Spell +syn keyword bbTodo COMBAK FIXME TODO XXX contained +syn match bbComment "#.*$" contains=@bbCommentGroup + +" String helpers +syn match bbQuote +['"]+ contained +syn match bbDelimiter "[(){}=]" contained +syn match bbArrayBrackets "[\[\]]" contained + +" BitBake strings +syn match bbContinue "\\$" +syn region bbString matchgroup=bbQuote start=+"+ skip=+\\$+ end=+"+ contained contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell +syn region bbString matchgroup=bbQuote start=+'+ skip=+\\$+ end=+'+ contained contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell + +" Vars definition +syn match bbExport "^export" nextgroup=bbIdentifier skipwhite +syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite +syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained +syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained +syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue +syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq +syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue +syn region bbVarPyValue start=+${@+ skip=+\\$+ end=+}+ contained contains=@python + +" Vars metadata flags +syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag +syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\|+=\|=+\|?=\)\@=" contained contains=bbIdentifier nextgroup=bbVarEq + +" Includes and requires +syn keyword bbInclude inherit include require contained +syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref +syn match bbIncludeLine "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest + +" Add taks and similar +syn keyword bbStatement addtask addhandler after before EXPORT_FUNCTIONS contained +syn match bbStatementRest ".*$" skipwhite contained contains=bbStatement +syn match bbStatementLine "^\(addtask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest + +" OE Important Functions +syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained + +" Generic Functions +syn match bbFunction "\h[0-9A-Za-z_-]*" display contained contains=bbOEFunctions + +" BitBake shell metadata +syn include @shell syntax/sh.vim +if exists("b:current_syntax") + unlet b:current_syntax +endif +syn keyword bbShFakeRootFlag fakeroot contained +syn match bbShFuncDef "^\(fakeroot\s*\)\?\([0-9A-Za-z_${}-]\+\)\(python\)\@ + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/bitbake/doc/COPYING.MIT b/bitbake/doc/COPYING.MIT new file mode 100644 index 0000000..7e7d574 --- /dev/null +++ b/bitbake/doc/COPYING.MIT @@ -0,0 +1,17 @@ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/bitbake/doc/Makefile b/bitbake/doc/Makefile new file mode 100644 index 0000000..8e98c57 --- /dev/null +++ b/bitbake/doc/Makefile @@ -0,0 +1,91 @@ +# This is a single Makefile to handle all generated BitBake documents. +# The Makefile needs to live in the documentation directory and all figures used +# in any manuals must be .PNG files and live in the individual book's figures +# directory. +# +# The Makefile has these targets: +# +# pdf: generates a PDF version of a manual. +# html: generates an HTML version of a manual. +# tarball: creates a tarball for the doc files. +# validate: validates +# clean: removes files +# +# The Makefile generates an HTML and PDF version of every document. The +# variable DOC indicates the folder name for a given manual. +# +# To build a manual, you must invoke 'make' with the DOC argument. +# +# Examples: +# +# make DOC=bitbake-user-manual +# make pdf DOC=bitbake-user-manual +# +# The first example generates the HTML and PDF versions of the User Manual. +# The second example generates the HTML version only of the User Manual. +# + +ifeq ($(DOC),bitbake-user-manual) +XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \ + --stringparam chapter.autolabel 1 \ + --stringparam section.autolabel 1 \ + --stringparam section.label.includes.component.label 1 \ + --xinclude +ALLPREQ = html pdf tarball +TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html bitbake-user-manual.pdf figures/bitbake-title.png +MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf +FIGURES = figures +STYLESHEET = $(DOC)/*.css + +endif + +## +# These URI should be rewritten by your distribution's xml catalog to +# match your localy installed XSL stylesheets. +XSL_BASE_URI = http://docbook.sourceforge.net/release/xsl/current +XSL_XHTML_URI = $(XSL_BASE_URI)/xhtml/docbook.xsl + +all: $(ALLPREQ) + +pdf: +ifeq ($(DOC),bitbake-user-manual) + @echo " " + @echo "********** Building."$(DOC) + @echo " " + cd $(DOC); ../tools/docbook-to-pdf $(DOC).xml ../template; cd .. +endif + +html: +ifeq ($(DOC),bitbake-user-manual) +# See http://www.sagehill.net/docbookxsl/HtmlOutput.html + @echo " " + @echo "******** Building "$(DOC) + @echo " " + cd $(DOC); xsltproc $(XSLTOPTS) -o $(DOC).html $(DOC)-customization.xsl $(DOC).xml; cd .. +endif + +tarball: html + @echo " " + @echo "******** Creating Tarball of document files" + @echo " " + cd $(DOC); tar -cvzf $(DOC).tgz $(TARFILES); cd .. + +validate: + cd $(DOC); xmllint --postvalid --xinclude --noout $(DOC).xml; cd .. + +publish: + @if test -f $(DOC)/$(DOC).html; \ + then \ + echo " "; \ + echo "******** Publishing "$(DOC)".html"; \ + echo " "; \ + scp -r $(MANUALS) $(STYLESHEET) docs.yp:/var/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \ + cd $(DOC); scp -r $(FIGURES) docs.yp:/var/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \ + else \ + echo " "; \ + echo $(DOC)".html missing. Generate the file first then try again."; \ + echo " "; \ + fi + +clean: + rm -rf $(MANUALS); rm $(DOC)/$(DOC).tgz; diff --git a/bitbake/doc/README b/bitbake/doc/README new file mode 100644 index 0000000..303cf8e --- /dev/null +++ b/bitbake/doc/README @@ -0,0 +1,39 @@ +Documentation +============= + +This is the directory that contains the BitBake documentation. + +Manual Organization +=================== + +Folders exist for individual manuals as follows: + +* bitbake-user-manual - The BitBake User Manual + +Each folder is self-contained regarding content and figures. + +If you want to find HTML versions of the BitBake manuals on the web, +go to http://www.openembedded.org/wiki/Documentation. + +Makefile +======== + +The Makefile processes manual directories to create HTML, PDF, +tarballs, etc. Details on how the Makefile work are documented +inside the Makefile. See that file for more information. + +To build a manual, you run the make command and pass it the name +of the folder containing the manual's contents. +For example, the following command run from the documentation directory +creates an HTML and a PDF version of the BitBake User Manual. +The DOC variable specifies the manual you are making: + + $ make DOC=bitbake-user-manual + +template +======== +Contains various templates, fonts, and some old PNG files. + +tools +===== +Contains a tool to convert the DocBook files to PDF format. diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-customization.xsl b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-customization.xsl new file mode 100644 index 0000000..ebfa540 --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-customization.xsl @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + A + + + + diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml new file mode 100644 index 0000000..571424b --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml @@ -0,0 +1,910 @@ + + + + Execution + + + The primary purpose for running BitBake is to produce some kind + of output such as a single installable package, a kernel, a software + development kit, or even a full, board-specific bootable Linux image, + complete with bootloader, kernel, and root filesystem. + Of course, you can execute the bitbake + command with options that cause it to execute single tasks, + compile single recipe files, capture or clear data, or simply + return information about the execution environment. + + + + This chapter describes BitBake's execution process from start + to finish when you use it to create an image. + The execution process is launched using the following command + form: + + $ bitbake <target> + + For information on the BitBake command and its options, + see + "The BitBake Command" + section. + + + Prior to executing BitBake, you should take advantage of available + parallel thread execution on your build host by setting the + BB_NUMBER_THREADS + variable in your project's local.conf + configuration file. + + + + A common way to determine this value for your build host is to run: + + $ grep processor /proc/cpuinfo + + and count the number of processors displayed. Note that the number of + processors will take into account hyper-threading, so that a quad-core + build host with hyper-threading will most likely show eight processors, + which is the value you would then assign to that variable. + + + + A possibly simpler solution is that some Linux distributions + (e.g. Debian and Ubuntu) provide the ncpus command. + + + + +
+ Parsing the Base Configuration Metadata + + + The first thing BitBake does is parse base configuration + metadata. + Base configuration metadata consists of your project's + bblayers.conf file to determine what + layers BitBake needs to recognize, all necessary + layer.conf files (one from each layer), + and bitbake.conf. + The data itself is of various types: + + Recipes: + Details about particular pieces of software. + + Class Data: + An abstraction of common build information + (e.g. how to build a Linux kernel). + + Configuration Data: + Machine-specific settings, policy decisions, + and so forth. + Configuration data acts as the glue to bind everything + together. + + + + + The layer.conf files are used to + construct key variables such as + BBPATH + and + BBFILES. + BBPATH is used to search for + configuration and class files under the + conf and classes + directories, respectively. + BBFILES is used to locate both recipe + and recipe append files + (.bb and .bbappend). + If there is no bblayers.conf file, + it is assumed the user has set the BBPATH + and BBFILES directly in the environment. + + + + Next, the bitbake.conf file is located + using the BBPATH variable that was + just constructed. + The bitbake.conf file may also include other + configuration files using the + include or + require directives. + + + + Prior to parsing configuration files, Bitbake looks + at certain variables, including: + + BB_ENV_WHITELIST + BB_PRESERVE_ENV + BB_ENV_EXTRAWHITE + + BITBAKE_UI + + + You can find information on how to pass environment variables into the BitBake + execution environment in the + "Passing Information Into the Build Task Environment" section. + + + + The base configuration metadata is global + and therefore affects all recipes and tasks that are executed. + + + + BitBake first searches the current working directory for an + optional conf/bblayers.conf configuration file. + This file is expected to contain a + BBLAYERS + variable that is a space-delimited list of 'layer' directories. + Recall that if BitBake cannot find a bblayers.conf + file, then it is assumed the user has set the BBPATH + and BBFILES variables directly in the environment. + + + + For each directory (layer) in this list, a conf/layer.conf + file is located and parsed with the + LAYERDIR + variable being set to the directory where the layer was found. + The idea is these files automatically set up + BBPATH + and other variables correctly for a given build directory. + + + + BitBake then expects to find the conf/bitbake.conf + file somewhere in the user-specified BBPATH. + That configuration file generally has include directives to pull + in any other metadata such as files specific to the architecture, + the machine, the local environment, and so forth. + + + + Only variable definitions and include directives are allowed + in BitBake .conf files. + Some variables directly influence BitBake's behavior. + These variables might have been set from the environment + depending on the environment variables previously + mentioned or set in the configuration files. + The + "Variables Glossary" + chapter presents a full list of variables. + + + + After parsing configuration files, BitBake uses its rudimentary + inheritance mechanism, which is through class files, to inherit + some standard classes. + BitBake parses a class when the inherit directive responsible + for getting that class is encountered. + + + + The base.bbclass file is always included. + Other classes that are specified in the configuration using the + INHERIT + variable are also included. + BitBake searches for class files in a + classes subdirectory under + the paths in BBPATH in the same way as + configuration files. + + + + A good way to get an idea of the configuration files and + the class files used in your execution environment is to + run the following BitBake command: + + $ bitbake -e > mybb.log + + Examining the top of the mybb.log + shows you the many configuration files and class files + used in your execution environment. + + + + + You need to be aware of how BitBake parses curly braces. + If a recipe uses a closing curly brace within the function and + the character has no leading spaces, BitBake produces a parsing + error. + If you use a pair of curly braces in a shell function, the + closing curly brace must not be located at the start of the line + without leading spaces. + + + + Here is an example that causes BitBake to produce a parsing + error: + + fakeroot create_shar() { + cat << "EOF" > ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh + usage() + { + echo "test" + ###### The following "}" at the start of the line causes a parsing error ###### + } + EOF + } + + Writing the recipe this way avoids the error: + + fakeroot create_shar() { + cat << "EOF" > ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh + usage() + { + echo "test" + ######The following "}" with a leading space at the start of the line avoids the error ###### + } + EOF + } + + + +
+ +
+ Locating and Parsing Recipes + + + During the configuration phase, BitBake will have set + BBFILES. + BitBake now uses it to construct a list of recipes to parse, + along with any append files (.bbappend) + to apply. + BBFILES is a space-separated list of + available files and supports wildcards. + An example would be: + + BBFILES = "/path/to/bbfiles/*.bb /path/to/appends/*.bbappend" + + BitBake parses each recipe and append file located + with BBFILES and stores the values of + various variables into the datastore. + + Append files are applied in the order they are encountered in + BBFILES. + + For each file, a fresh copy of the base configuration is + made, then the recipe is parsed line by line. + Any inherit statements cause BitBake to find and + then parse class files (.bbclass) + using + BBPATH + as the search path. + Finally, BitBake parses in order any append files found in + BBFILES. + + + + One common convention is to use the recipe filename to define + pieces of metadata. + For example, in bitbake.conf the recipe + name and version are used to set the variables + PN and + PV: + + PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}" + PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}" + + In this example, a recipe called "something_1.2.3.bb" would set + PN to "something" and + PV to "1.2.3". + + + + By the time parsing is complete for a recipe, BitBake + has a list of tasks that the recipe defines and a set of + data consisting of keys and values as well as + dependency information about the tasks. + + + + BitBake does not need all of this information. + It only needs a small subset of the information to make + decisions about the recipe. + Consequently, BitBake caches the values in which it is + interested and does not store the rest of the information. + Experience has shown it is faster to re-parse the metadata than to + try and write it out to the disk and then reload it. + + + + Where possible, subsequent BitBake commands reuse this cache of + recipe information. + The validity of this cache is determined by first computing a + checksum of the base configuration data (see + BB_HASHCONFIG_WHITELIST) + and then checking if the checksum matches. + If that checksum matches what is in the cache and the recipe + and class files have not changed, Bitbake is able to use + the cache. + BitBake then reloads the cached information about the recipe + instead of reparsing it from scratch. + + + + Recipe file collections exist to allow the user to + have multiple repositories of + .bb files that contain the same + exact package. + For example, one could easily use them to make one's + own local copy of an upstream repository, but with + custom modifications that one does not want upstream. + Here is an example: + + BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb" + BBFILE_COLLECTIONS = "upstream local" + BBFILE_PATTERN_upstream = "^/stuff/openembedded/" + BBFILE_PATTERN_local = "^/stuff/openembedded.modified/" + BBFILE_PRIORITY_upstream = "5" + BBFILE_PRIORITY_local = "10" + + + The layers mechanism is now the preferred method of collecting + code. + While the collections code remains, its main use is to set layer + priorities and to deal with overlap (conflicts) between layers. + + +
+ +
+ Providers + + + Assuming BitBake has been instructed to execute a target + and that all the recipe files have been parsed, BitBake + starts to figure out how to build the target. + BitBake looks through the PROVIDES list + for each of the recipes. + A PROVIDES list is the list of names by which + the recipe can be known. + Each recipe's PROVIDES list is created + implicitly through the recipe's + PN variable + and explicitly through the recipe's + PROVIDES + variable, which is optional. + + + + When a recipe uses PROVIDES, that recipe's + functionality can be found under an alternative name or names other + than the implicit PN name. + As an example, suppose a recipe named keyboard_1.0.bb + contained the following: + + PROVIDES += "fullkeyboard" + + The PROVIDES list for this recipe becomes + "keyboard", which is implicit, and "fullkeyboard", which is explicit. + Consequently, the functionality found in + keyboard_1.0.bb can be found under two + different names. + +
+ +
+ Preferences + + + The PROVIDES list is only part of the solution + for figuring out a target's recipes. + Because targets might have multiple providers, BitBake needs + to prioritize providers by determining provider preferences. + + + + A common example in which a target has multiple providers + is "virtual/kernel", which is on the + PROVIDES list for each kernel recipe. + Each machine often selects the best kernel provider by using a + line similar to the following in the machine configuration file: + + PREFERRED_PROVIDER_virtual/kernel = "linux-yocto" + + The default + PREFERRED_PROVIDER + is the provider with the same name as the target. + Bitbake iterates through each target it needs to build and + resolves them and their dependencies using this process. + + + + Understanding how providers are chosen is made complicated by the fact + that multiple versions might exist for a given provider. + BitBake defaults to the highest version of a provider. + Version comparisons are made using the same method as Debian. + You can use the + PREFERRED_VERSION + variable to specify a particular version. + You can influence the order by using the + DEFAULT_PREFERENCE + variable. + + + + By default, files have a preference of "0". + Setting DEFAULT_PREFERENCE to "-1" makes the + recipe unlikely to be used unless it is explicitly referenced. + Setting DEFAULT_PREFERENCE to "1" makes it + likely the recipe is used. + PREFERRED_VERSION overrides any + DEFAULT_PREFERENCE setting. + DEFAULT_PREFERENCE is often used to mark newer + and more experimental recipe versions until they have undergone + sufficient testing to be considered stable. + + + + When there are multiple “versions†of a given recipe, + BitBake defaults to selecting the most recent + version, unless otherwise specified. + If the recipe in question has a + DEFAULT_PREFERENCE + set lower than the other recipes (default is 0), then + it will not be selected. + This allows the person or persons maintaining + the repository of recipe files to specify + their preference for the default selected version. + Additionally, the user can specify their preferred version. + + + + If the first recipe is named a_1.1.bb, then the + PN variable + will be set to “aâ€, and the + PV + variable will be set to 1.1. + + + + Thus, if a recipe named a_1.2.bb exists, BitBake + will choose 1.2 by default. + However, if you define the following variable in a + .conf file that BitBake parses, you + can change that preference: + + PREFERRED_VERSION_a = "1.1" + + + + + + It is common for a recipe to provide two versions -- a stable, + numbered (and preferred) version, and a version that is + automatically checked out from a source code repository that + is considered more "bleeding edge" but can be selected only + explicitly. + + + + For example, in the OpenEmbedded codebase, there is a standard, + versioned recipe file for BusyBox, + busybox_1.22.1.bb, + but there is also a Git-based version, + busybox_git.bb, which explicitly contains the line + + DEFAULT_PREFERENCE = "-1" + + to ensure that the numbered, stable version is always preferred + unless the developer selects otherwise. + + +
+ +
+ Dependencies + + + Each target BitBake builds consists of multiple tasks such as + fetch, unpack, + patch, configure, + and compile. + For best performance on multi-core systems, BitBake considers each + task as an independent + entity with its own set of dependencies. + + + + Dependencies are defined through several variables. + You can find information about variables BitBake uses in + the Variables Glossary + near the end of this manual. + At a basic level, it is sufficient to know that BitBake uses the + DEPENDS and + RDEPENDS variables when + calculating dependencies. + + + + For more information on how BitBake handles dependencies, see the + "Dependencies" section. + +
+ +
+ The Task List + + + Based on the generated list of providers and the dependency information, + BitBake can now calculate exactly what tasks it needs to run and in what + order it needs to run them. + The + "Executing Tasks" section has more + information on how BitBake chooses which task to execute next. + + + + The build now starts with BitBake forking off threads up to the limit set in the + BB_NUMBER_THREADS + variable. + BitBake continues to fork threads as long as there are tasks ready to run, + those tasks have all their dependencies met, and the thread threshold has not been + exceeded. + + + + It is worth noting that you can greatly speed up the build time by properly setting + the BB_NUMBER_THREADS variable. + + + + As each task completes, a timestamp is written to the directory specified by the + STAMP variable. + On subsequent runs, BitBake looks in the build directory within + tmp/stamps and does not rerun + tasks that are already completed unless a timestamp is found to be invalid. + Currently, invalid timestamps are only considered on a per + recipe file basis. + So, for example, if the configure stamp has a timestamp greater than the + compile timestamp for a given target, then the compile task would rerun. + Running the compile task again, however, has no effect on other providers + that depend on that target. + + + + The exact format of the stamps is partly configurable. + In modern versions of BitBake, a hash is appended to the + stamp so that if the configuration changes, the stamp becomes + invalid and the task is automatically rerun. + This hash, or signature used, is governed by the signature policy + that is configured (see the + "Checksums (Signatures)" + section for information). + It is also possible to append extra metadata to the stamp using + the "stamp-extra-info" task flag. + For example, OpenEmbedded uses this flag to make some tasks machine-specific. + + + + Some tasks are marked as "nostamp" tasks. + No timestamp file is created when these tasks are run. + Consequently, "nostamp" tasks are always rerun. + + + + For more information on tasks, see the + "Tasks" section. + +
+ +
+ Executing Tasks + + + Tasks can be either a shell task or a Python task. + For shell tasks, BitBake writes a shell script to + ${T}/run.do_taskname.pid + and then executes the script. + The generated shell script contains all the exported variables, + and the shell functions with all variables expanded. + Output from the shell script goes to the file + ${T}/log.do_taskname.pid. + Looking at the expanded shell functions in the run file and + the output in the log files is a useful debugging technique. + + + + For Python tasks, BitBake executes the task internally and logs + information to the controlling terminal. + Future versions of BitBake will write the functions to files + similar to the way shell tasks are handled. + Logging will be handled in a way similar to shell tasks as well. + + + + The order in which BitBake runs the tasks is controlled by its + task scheduler. + It is possible to configure the scheduler and define custom + implementations for specific use cases. + For more information, see these variables that control the + behavior: + + + BB_SCHEDULER + + + BB_SCHEDULERS + + + It is possible to have functions run before and after a task's main + function. + This is done using the "prefuncs" and "postfuncs" flags of the task + that lists the functions to run. + +
+ +
+ Checksums (Signatures) + + + A checksum is a unique signature of a task's inputs. + The signature of a task can be used to determine if a task + needs to be run. + Because it is a change in a task's inputs that triggers running + the task, BitBake needs to detect all the inputs to a given task. + For shell tasks, this turns out to be fairly easy because + BitBake generates a "run" shell script for each task and + it is possible to create a checksum that gives you a good idea of when + the task's data changes. + + + + To complicate the problem, some things should not be included in + the checksum. + First, there is the actual specific build path of a given task - + the working directory. + It does not matter if the working directory changes because it should not + affect the output for target packages. + The simplistic approach for excluding the working directory is to set + it to some fixed value and create the checksum for the "run" script. + BitBake goes one step better and uses the + BB_HASHBASE_WHITELIST + variable to define a list of variables that should never be included + when generating the signatures. + + + + Another problem results from the "run" scripts containing functions that + might or might not get called. + The incremental build solution contains code that figures out dependencies + between shell functions. + This code is used to prune the "run" scripts down to the minimum set, + thereby alleviating this problem and making the "run" scripts much more + readable as a bonus. + + + + So far we have solutions for shell scripts. + What about Python tasks? + The same approach applies even though these tasks are more difficult. + The process needs to figure out what variables a Python function accesses + and what functions it calls. + Again, the incremental build solution contains code that first figures out + the variable and function dependencies, and then creates a checksum for the data + used as the input to the task. + + + + Like the working directory case, situations exist where dependencies + should be ignored. + For these cases, you can instruct the build process to ignore a dependency + by using a line like the following: + + PACKAGE_ARCHS[vardepsexclude] = "MACHINE" + + This example ensures that the PACKAGE_ARCHS variable does not + depend on the value of MACHINE, even if it does reference it. + + + + Equally, there are cases where we need to add dependencies BitBake + is not able to find. + You can accomplish this by using a line like the following: + + PACKAGE_ARCHS[vardeps] = "MACHINE" + + This example explicitly adds the MACHINE variable as a + dependency for PACKAGE_ARCHS. + + + + Consider a case with in-line Python, for example, where BitBake is not + able to figure out dependencies. + When running in debug mode (i.e. using -DDD), BitBake + produces output when it discovers something for which it cannot figure out + dependencies. + + + + Thus far, this section has limited discussion to the direct inputs into a task. + Information based on direct inputs is referred to as the "basehash" in the + code. + However, there is still the question of a task's indirect inputs - the + things that were already built and present in the build directory. + The checksum (or signature) for a particular task needs to add the hashes + of all the tasks on which the particular task depends. + Choosing which dependencies to add is a policy decision. + However, the effect is to generate a master checksum that combines the basehash + and the hashes of the task's dependencies. + + + + At the code level, there are a variety of ways both the basehash and the + dependent task hashes can be influenced. + Within the BitBake configuration file, we can give BitBake some extra information + to help it construct the basehash. + The following statement effectively results in a list of global variable + dependency excludes - variables never included in any checksum. + This example uses variables from OpenEmbedded to help illustrate + the concept: + + BB_HASHBASE_WHITELIST ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \ + SSTATE_DIR THISDIR FILESEXTRAPATHS FILE_DIRNAME HOME LOGNAME SHELL TERM \ + USER FILESPATH STAGING_DIR_HOST STAGING_DIR_TARGET COREBASE PRSERV_HOST \ + PRSERV_DUMPDIR PRSERV_DUMPFILE PRSERV_LOCKDOWN PARALLEL_MAKE \ + CCACHE_DIR EXTERNAL_TOOLCHAIN CCACHE CCACHE_DISABLE LICENSE_PATH SDKPKGSUFFIX" + + The previous example excludes the work directory, which is part of + TMPDIR. + + + + The rules for deciding which hashes of dependent tasks to include through + dependency chains are more complex and are generally accomplished with a + Python function. + The code in meta/lib/oe/sstatesig.py shows two examples + of this and also illustrates how you can insert your own policy into the system + if so desired. + This file defines the two basic signature generators OpenEmbedded Core + uses: "OEBasic" and "OEBasicHash". + By default, there is a dummy "noop" signature handler enabled in BitBake. + This means that behavior is unchanged from previous versions. + OE-Core uses the "OEBasicHash" signature handler by default + through this setting in the bitbake.conf file: + + BB_SIGNATURE_HANDLER ?= "OEBasicHash" + + The "OEBasicHash" BB_SIGNATURE_HANDLER is the same as the + "OEBasic" version but adds the task hash to the stamp files. + This results in any metadata change that changes the task hash, automatically + causing the task to be run again. + This removes the need to bump + PR + values, and changes to metadata automatically ripple across the build. + + + + It is also worth noting that the end result of these signature generators is to + make some dependency and hash information available to the build. + This information includes: + + BB_BASEHASH_task-<taskname>: + The base hashes for each task in the recipe. + + BB_BASEHASH_<filename:taskname>: + The base hashes for each dependent task. + + BBHASHDEPS_<filename:taskname>: + The task dependencies for each task. + + BB_TASKHASH: + The hash of the currently running task. + + + + + + It is worth noting that BitBake's "-S" option lets you + debug Bitbake's processing of signatures. + The options passed to -S allow different debugging modes + to be used, either using BitBake's own debug functions + or possibly those defined in the metadata/signature handler + itself. + The simplest parameter to pass is "none", which causes a + set of signature information to be written out into + STAMP_DIR + corresponding to the targets specified. + The other currently available parameter is "printdiff", + which causes BitBake to try to establish the closest + signature match it can (e.g. in the sstate cache) and then + run bitbake-diffsigs over the matches + to determine the stamps and delta where these two + stamp trees diverge. + + It is likely that future versions of BitBake will + provide other signature handlers triggered through + additional "-S" parameters. + + + + + You can find more information on checksum metadata in the + "Task Checksums and Setscene" + section. + +
+ +
+ Setscene + + + The setscene process enables BitBake to handle "pre-built" artifacts. + The ability to handle and reuse these artifacts allows BitBake + the luxury of not having to build something from scratch every time. + Instead, BitBake can use, when possible, existing build artifacts. + + + + BitBake needs to have reliable data indicating whether or not an + artifact is compatible. + Signatures, described in the previous section, provide an ideal + way of representing whether an artifact is compatible. + If a signature is the same, an object can be reused. + + + + If an object can be reused, the problem then becomes how to + replace a given task or set of tasks with the pre-built artifact. + BitBake solves the problem with the "setscene" process. + + + + When BitBake is asked to build a given target, before building anything, + it first asks whether cached information is available for any of the + targets it's building, or any of the intermediate targets. + If cached information is available, BitBake uses this information instead of + running the main tasks. + + + + BitBake first calls the function defined by the + BB_HASHCHECK_FUNCTION + variable with a list of tasks and corresponding + hashes it wants to build. + This function is designed to be fast and returns a list + of the tasks for which it believes in can obtain artifacts. + + + + Next, for each of the tasks that were returned as possibilities, + BitBake executes a setscene version of the task that the possible + artifact covers. + Setscene versions of a task have the string "_setscene" appended to the + task name. + So, for example, the task with the name xxx has + a setscene task named xxx_setscene. + The setscene version of the task executes and provides the necessary + artifacts returning either success or failure. + + + + As previously mentioned, an artifact can cover more than one task. + For example, it is pointless to obtain a compiler if you + already have the compiled binary. + To handle this, BitBake calls the + BB_SETSCENE_DEPVALID + function for each successful setscene task to know whether or not it needs + to obtain the dependencies of that task. + + + + Finally, after all the setscene tasks have executed, BitBake calls the + function listed in + BB_SETSCENE_VERIFY_FUNCTION + with the list of tasks BitBake thinks has been "covered". + The metadata can then ensure that this list is correct and can + inform BitBake that it wants specific tasks to be run regardless + of the setscene result. + + + + You can find more information on setscene metadata in the + "Task Checksums and Setscene" + section. + +
+
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml new file mode 100644 index 0000000..0dff736 --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml @@ -0,0 +1,738 @@ + + + +File Download Support + + + BitBake's fetch module is a standalone piece of library code + that deals with the intricacies of downloading source code + and files from remote systems. + Fetching source code is one of the cornerstones of building software. + As such, this module forms an important part of BitBake. + + + + The current fetch module is called "fetch2" and refers to the + fact that it is the second major version of the API. + The original version is obsolete and has been removed from the codebase. + Thus, in all cases, "fetch" refers to "fetch2" in this + manual. + + +
+ The Download (Fetch) + + + BitBake takes several steps when fetching source code or files. + The fetcher codebase deals with two distinct processes in order: + obtaining the files from somewhere (cached or otherwise) + and then unpacking those files into a specific location and + perhaps in a specific way. + Getting and unpacking the files is often optionally followed + by patching. + Patching, however, is not covered by this module. + + + + The code to execute the first part of this process, a fetch, + looks something like the following: + + src_uri = (d.getVar('SRC_URI', True) or "").split() + fetcher = bb.fetch2.Fetch(src_uri, d) + fetcher.download() + + This code sets up an instance of the fetch class. + The instance uses a space-separated list of URLs from the + SRC_URI + variable and then calls the download + method to download the files. + + + + The instantiation of the fetch class is usually followed by: + + rootdir = l.getVar('WORKDIR', True) + fetcher.unpack(rootdir) + + This code unpacks the downloaded files to the + specified by WORKDIR. + + For convenience, the naming in these examples matches + the variables used by OpenEmbedded. + If you want to see the above code in action, examine + the OpenEmbedded class file base.bbclass. + + The SRC_URI and WORKDIR + variables are not hardcoded into the fetcher, since those fetcher + methods can be (and are) called with different variable names. + In OpenEmbedded for example, the shared state (sstate) code uses + the fetch module to fetch the sstate files. + + + + When the download() method is called, + BitBake tries to resolve the URLs by looking for source files + in a specific search order: + + Pre-mirror Sites: + BitBake first uses pre-mirrors to try and find source files. + These locations are defined using the + PREMIRRORS + variable. + + Source URI: + If pre-mirrors fail, BitBake uses the original URL (e.g from + SRC_URI). + + Mirror Sites: + If fetch failures occur, BitBake next uses mirror locations as + defined by the + MIRRORS + variable. + + + + + + For each URL passed to the fetcher, the fetcher + calls the submodule that handles that particular URL type. + This behavior can be the source of some confusion when you + are providing URLs for the SRC_URI + variable. + Consider the following two URLs: + + http://git.yoctoproject.org/git/poky;protocol=git + git://git.yoctoproject.org/git/poky;protocol=http + + In the former case, the URL is passed to the + wget fetcher, which does not + understand "git". + Therefore, the latter case is the correct form since the + Git fetcher does know how to use HTTP as a transport. + + + + Here are some examples that show commonly used mirror + definitions: + + PREMIRRORS ?= "\ + bzr://.*/.* http://somemirror.org/sources/ \n \ + cvs://.*/.* http://somemirror.org/sources/ \n \ + git://.*/.* http://somemirror.org/sources/ \n \ + hg://.*/.* http://somemirror.org/sources/ \n \ + osc://.*/.* http://somemirror.org/sources/ \n \ + p4://.*/.* http://somemirror.org/sources/ \n \ + svn://.*/.* http://somemirror.org/sources/ \n" + + MIRRORS =+ "\ + ftp://.*/.* http://somemirror.org/sources/ \n \ + http://.*/.* http://somemirror.org/sources/ \n \ + https://.*/.* http://somemirror.org/sources/ \n" + + It is useful to note that BitBake supports + cross-URLs. + It is possible to mirror a Git repository on an HTTP + server as a tarball. + This is what the git:// mapping in + the previous example does. + + + + Since network accesses are slow, Bitbake maintains a + cache of files downloaded from the network. + Any source files that are not local (i.e. + downloaded from the Internet) are placed into the download + directory, which is specified by the + DL_DIR + variable. + + + + File integrity is of key importance for reproducing builds. + For non-local archive downloads, the fetcher code can verify + SHA-256 and MD5 checksums to ensure the archives have been + downloaded correctly. + You can specify these checksums by using the + SRC_URI variable with the appropriate + varflags as follows: + + SRC_URI[md5sum] = "value" + SRC_URI[sha256sum] = "value" + + You can also specify the checksums as parameters on the + SRC_URI as shown below: + + SRC_URI = "http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d" + + If multiple URIs exist, you can specify the checksums either + directly as in the previous example, or you can name the URLs. + The following syntax shows how you name the URIs: + + SRC_URI = "http://example.com/foobar.tar.bz2;name=foo" + SRC_URI[foo.md5sum] = 4a8e0f237e961fd7785d19d07fdb994d + + After a file has been downloaded and has had its checksum checked, + a ".done" stamp is placed in DL_DIR. + BitBake uses this stamp during subsequent builds to avoid + downloading or comparing a checksum for the file again. + + It is assumed that local storage is safe from data corruption. + If this were not the case, there would be bigger issues to worry about. + + + + + If + BB_STRICT_CHECKSUM + is set, any download without a checksum triggers an + error message. + The + BB_NO_NETWORK + variable can be used to make any attempted network access a fatal + error, which is useful for checking that mirrors are complete + as well as other things. + +
+ +
+ The Unpack + + + The unpack process usually immediately follows the download. + For all URLs except Git URLs, BitBake uses the common + unpack method. + + + + A number of parameters exist that you can specify within the + URL to govern the behavior of the unpack stage: + + unpack: + Controls whether the URL components are unpacked. + If set to "1", which is the default, the components + are unpacked. + If set to "0", the unpack stage leaves the file alone. + This parameter is useful when you want an archive to be + copied in and not be unpacked. + + dos: + Applies to .zip and + .jar files and specifies whether to + use DOS line ending conversion on text files. + + basepath: + Instructs the unpack stage to strip the specified + directories from the source path when unpacking. + + subdir: + Unpacks the specific URL to the specified subdirectory + within the root directory. + + + The unpack call automatically decompresses and extracts files + with ".Z", ".z", ".gz", ".xz", ".zip", ".jar", ".ipk", ".rpm". + ".srpm", ".deb" and ".bz2" extensions as well as various combinations + of tarball extensions. + + + + As mentioned, the Git fetcher has its own unpack method that + is optimized to work with Git trees. + Basically, this method works by cloning the tree into the final + directory. + The process is completed using references so that there is + only one central copy of the Git metadata needed. + +
+ +
+ Fetchers + + + As mentioned earlier, the URL prefix determines which + fetcher submodule BitBake uses. + Each submodule can support different URL parameters, + which are described in the following sections. + + +
+ Local file fetcher (<filename>file://</filename>) + + + This submodule handles URLs that begin with + file://. + The filename you specify within the URL can be + either an absolute or relative path to a file. + If the filename is relative, the contents of the + FILESPATH + variable is used in the same way + PATH is used to find executables. + Failing that, + FILESDIR + is used to find the appropriate relative file. + + FILESDIR is deprecated and can + be replaced with FILESPATH. + Because FILESDIR is likely to be + removed, you should not use this variable in any new code. + + If the file cannot be found, it is assumed that it is available in + DL_DIR + by the time the download() method is called. + + + + If you specify a directory, the entire directory is + unpacked. + + + + Here are a couple of example URLs, the first relative and + the second absolute: + + SRC_URI = "file://relativefile.patch" + SRC_URI = "file:///Users/ich/very_important_software" + + +
+ +
+ HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>) + + + This fetcher obtains files from web and FTP servers. + Internally, the fetcher uses the wget utility. + + + + The executable and parameters used are specified by the + FETCHCMD_wget variable, which defaults + to sensible values. + The fetcher supports a parameter "downloadfilename" that + allows the name of the downloaded file to be specified. + Specifying the name of the downloaded file is useful + for avoiding collisions in + DL_DIR + when dealing with multiple files that have the same name. + + + + Some example URLs are as follows: + + SRC_URI = "http://oe.handhelds.org/not_there.aac" + SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac" + SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan" + + +
+ +
+ CVS fetcher (<filename>(cvs://</filename>) + + + This submodule handles checking out files from the + CVS version control system. + You can configure it using a number of different variables: + + FETCHCMD_cvs: + The name of the executable to use when running + the cvs command. + This name is usually "cvs". + + SRCDATE: + The date to use when fetching the CVS source code. + A special value of "now" causes the checkout to + be updated on every build. + + CVSDIR: + Specifies where a temporary checkout is saved. + The location is often DL_DIR/cvs. + + CVS_PROXY_HOST: + The name to use as a "proxy=" parameter to the + cvs command. + + CVS_PROXY_PORT: + The port number to use as a "proxyport=" parameter to + the cvs command. + + + As well as the standard username and password URL syntax, + you can also configure the fetcher with various URL parameters: + + + + The supported parameters are as follows: + + "method": + The protocol over which to communicate with the CVS server. + By default, this protocol is "pserver". + If "method" is set to "ext", BitBake examines the + "rsh" parameter and sets CVS_RSH. + You can use "dir" for local directories. + + "module": + Specifies the module to check out. + You must supply this parameter. + + "tag": + Describes which CVS TAG should be used for + the checkout. + By default, the TAG is empty. + + "date": + Specifies a date. + If no "date" is specified, the + SRCDATE + of the configuration is used to checkout a specific date. + The special value of "now" causes the checkout to be + updated on every build. + + "localdir": + Used to rename the module. + Effectively, you are renaming the output directory + to which the module is unpacked. + You are forcing the module into a special + directory relative to CVSDIR. + + "rsh" + Used in conjunction with the "method" parameter. + + "scmdata": + Causes the CVS metadata to be maintained in the tarball + the fetcher creates when set to "keep". + The tarball is expanded into the work directory. + By default, the CVS metadata is removed. + + "fullpath": + Controls whether the resulting checkout is at the + module level, which is the default, or is at deeper + paths. + + "norecurse": + Causes the fetcher to only checkout the specified + directory with no recurse into any subdirectories. + + "port": + The port to which the CVS server connects. + + + Some example URLs are as follows: + + SRC_URI = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext" + SRC_URI = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat" + + +
+ +
+ Subversion (SVN) Fetcher (<filename>svn://</filename>) + + + This fetcher submodule fetches code from the + Subversion source control system. + The executable used is specified by + FETCHCMD_svn, which defaults + to "svn". + The fetcher's temporary working directory is set + by SVNDIR, which is usually + DL_DIR/svn. + + + + The supported parameters are as follows: + + "module": + The name of the svn module to checkout. + You must provide this parameter. + You can think of this parameter as the top-level + directory of the repository data you want. + + "protocol": + The protocol to use, which defaults to "svn". + Other options are "svn+ssh" and "rsh". + For "rsh", the "rsh" parameter is also used. + + "rev": + The revision of the source code to checkout. + + "date": + The date of the source code to checkout. + Specific revisions are generally much safer to checkout + rather than by date as they do not involve timezones + (e.g. they are much more deterministic). + + "scmdata": + Causes the “.svn†directories to be available during + compile-time when set to "keep". + By default, these directories are removed. + + "transportuser": + When required, sets the username for the transport. + By default, this parameter is empty. + The transport username is different than the username + used in the main URL, which is passed to the subversion + command. + + + Following are two examples using svn: + + SRC_URI = "svn://svn.oe.handhelds.org/svn;module=vip;proto=http;rev=667" + SRC_URI = "svn://svn.oe.handhelds.org/svn/;module=opie;proto=svn+ssh;date=20060126" + + +
+ +
+ Git Fetcher (<filename>git://</filename>) + + + This fetcher submodule fetches code from the Git + source control system. + The fetcher works by creating a bare clone of the + remote into GITDIR, which is + usually DL_DIR/git2. + This bare clone is then cloned into the work directory during the + unpack stage when a specific tree is checked out. + This is done using alternates and by reference to + minimize the amount of duplicate data on the disk and + make the unpack process fast. + The executable used can be set with + FETCHCMD_git. + + + + This fetcher supports the following parameters: + + "protocol": + The protocol used to fetch the files. + The default is "git" when a hostname is set. + If a hostname is not set, the Git protocol is "file". + You can also use "http", "https", "ssh" and "rsync". + + "nocheckout": + Tells the fetcher to not checkout source code when + unpacking when set to "1". + Set this option for the URL where there is a custom + routine to checkout code. + The default is "0". + + "rebaseable": + Indicates that the upstream Git repository can be rebased. + You should set this parameter to "1" if + revisions can become detached from branches. + In this case, the source mirror tarball is done per + revision, which has a loss of efficiency. + Rebasing the upstream Git repository could cause the + current revision to disappear from the upstream repository. + This option reminds the fetcher to preserve the local cache + carefully for future use. + The default value for this parameter is "0". + + "nobranch": + Tells the fetcher to not check the SHA validation + for the branch when set to "1". + The default is "0". + Set this option for the recipe that refers to + the commit that is valid for a tag instead of + the branch. + + "bareclone": + Tells the fetcher to clone a bare clone into the + destination directory without checking out a working tree. + Only the raw Git metadata is provided. + This parameter implies the "nocheckout" parameter as well. + + "branch": + The branch(es) of the Git tree to clone. + If unset, this is assumed to be "master". + The number of branch parameters much match the number of + name parameters. + + "rev": + The revision to use for the checkout. + The default is "master". + + "tag": + Specifies a tag to use for the checkout. + To correctly resolve tags, BitBake must access the + network. + For that reason, tags are often not used. + As far as Git is concerned, the "tag" parameter behaves + effectively the same as the "rev" parameter. + + "subpath": + Limits the checkout to a specific subpath of the tree. + By default, the whole tree is checked out. + + "destsuffix": + The name of the path in which to place the checkout. + By default, the path is git/. + + + Here are some example URLs: + + SRC_URI = "git://git.oe.handhelds.org/git/vip.git;tag=version-1" + SRC_URI = "git://git.oe.handhelds.org/git/vip.git;protocol=http" + + +
+ +
+ Git Submodule Fetcher (<filename>gitsm://</filename>) + + + This fetcher submodule inherits from the + Git fetcher and extends + that fetcher's behavior by fetching a repository's submodules. + SRC_URI + is passed to the Git fetcher as described in the + "Git Fetcher (git://)" + section. + + Notes and Warnings + + You must clean a recipe when switching between + 'git://' and + 'gitsm://' URLs. + + + + The Git Submodules fetcher is not a complete fetcher + implementation. + The fetcher has known issues where it does not use the + normal source mirroring infrastructure properly. + + + +
+ +
+ ClearCase Fetcher (<filename>ccrc://</filename>) + + + This fetcher submodule fetches code from a + ClearCase + repository. + + + + To use this fetcher, make sure your recipe has proper + SRC_URI, + SRCREV, and + PV settings. + Here is an example: + + SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module" + SRCREV = "EXAMPLE_CLEARCASE_TAG" + PV = "${@d.getVar("SRCREV").replace("/", "+")}" + + The fetcher uses the rcleartool or + cleartool remote client, depending on + which one is available. + + + + Following are options for the SRC_URI + statement: + + vob: + The name, which must include the + prepending "/" character, of the ClearCase VOB. + This option is required. + + module: + The module, which must include the + prepending "/" character, in the selected VOB + The module and vob + options are combined to create the following load rule in + the view config spec: + + load <vob><module> + + + proto: + The protocol, which can be either http or + https. + + + + + + By default, the fetcher creates a configuration specification. + If you want this specification written to an area other than the default, + use the CCASE_CUSTOM_CONFIG_SPEC variable + in your recipe to define where the specification is written. + + the SRCREV loses its functionality if you + specify this variable. + However, SRCREV is still used to label the + archive after a fetch even though it does not define what is + fetched. + + + + + Here are a couple of other behaviors worth mentioning: + + + When using cleartool, the login of + cleartool is handled by the system. + The login require no special steps. + + + In order to use rcleartool with authenticated + users, an "rcleartool login" is necessary before using the fetcher. + + + +
+ +
+ Other Fetchers + + + Fetch submodules also exist for the following: + + + Bazaar (bzr://) + + + Perforce (p4://) + + + Trees using Git Annex (gitannex://) + + + Secure FTP (sftp://) + + + Secure Shell (ssh://) + + + Repo (repo://) + + + OSC (osc://) + + + Mercurial (hg://) + + + No documentation currently exists for these lesser used + fetcher submodules. + However, you might find the code helpful and readable. + +
+
+ +
+ Auto Revisions + + + We need to document AUTOREV and + SRCREV_FORMAT here. + +
+
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml new file mode 100644 index 0000000..4ce7ed9 --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml @@ -0,0 +1,506 @@ + + + + Hello World Example + +
+ BitBake Hello World + + + The simplest example commonly used to demonstrate any new + programming language or tool is the + "Hello World" + example. + This appendix demonstrates, in tutorial form, Hello + World within the context of BitBake. + The tutorial describes how to create a new project + and the applicable metadata files necessary to allow + BitBake to build it. + +
+ +
+ Obtaining BitBake + + + See the + "Obtaining BitBake" + section for information on how to obtain BitBake. + Once you have the source code on your machine, the BitBake directory + appears as follows: + + $ ls -al + total 100 + drwxrwxr-x. 9 wmat wmat 4096 Jan 31 13:44 . + drwxrwxr-x. 3 wmat wmat 4096 Feb 4 10:45 .. + -rw-rw-r--. 1 wmat wmat 365 Nov 26 04:55 AUTHORS + drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 bin + drwxrwxr-x. 4 wmat wmat 4096 Jan 31 13:44 build + -rw-rw-r--. 1 wmat wmat 16501 Nov 26 04:55 ChangeLog + drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 classes + drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 conf + drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 contrib + -rw-rw-r--. 1 wmat wmat 17987 Nov 26 04:55 COPYING + drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 doc + -rw-rw-r--. 1 wmat wmat 69 Nov 26 04:55 .gitignore + -rw-rw-r--. 1 wmat wmat 849 Nov 26 04:55 HEADER + drwxrwxr-x. 5 wmat wmat 4096 Jan 31 13:44 lib + -rw-rw-r--. 1 wmat wmat 195 Nov 26 04:55 MANIFEST.in + -rwxrwxr-x. 1 wmat wmat 3195 Jan 31 11:57 setup.py + -rw-rw-r--. 1 wmat wmat 2887 Nov 26 04:55 TODO + + + + + At this point, you should have BitBake cloned to + a directory that matches the previous listing except for + dates and user names. + +
+ +
+ Setting Up the BitBake Environment + + + First, you need to be sure that you can run BitBake. + Set your working directory to where your local BitBake + files are and run the following command: + + $ ./bin/bitbake --version + BitBake Build Tool Core version 1.23.0, bitbake version 1.23.0 + + The console output tells you what version you are running. + + + + The recommended method to run BitBake is from a directory of your + choice. + To be able to run BitBake from any directory, you need to add the + executable binary to your binary to your shell's environment + PATH variable. + First, look at your current PATH variable + by entering the following: + + $ echo $PATH + + Next, add the directory location for the BitBake binary to the + PATH. + Here is an example that adds the + /home/scott-lenovo/bitbake/bin directory + to the front of the PATH variable: + + $ export PATH=/home/scott-lenovo/bitbake/bin:$PATH + + You should now be able to enter the bitbake + command from the command line while working from any directory. + +
+ +
+ The Hello World Example + + + The overall goal of this exercise is to build a + complete "Hello World" example utilizing task and layer + concepts. + Because this is how modern projects such as OpenEmbedded and + the Yocto Project utilize BitBake, the example + provides an excellent starting point for understanding + BitBake. + + + + To help you understand how to use BitBake to build targets, + the example starts with nothing but the bitbake + command, which causes BitBake to fail and report problems. + The example progresses by adding pieces to the build to + eventually conclude with a working, minimal "Hello World" + example. + + + + While every attempt is made to explain what is happening during + the example, the descriptions cannot cover everything. + You can find further information throughout this manual. + Also, you can actively participate in the + + discussion mailing list about the BitBake build tool. + + + + This example was inspired by and drew heavily from these sources: + + + Mailing List post - The BitBake equivalent of "Hello, World!" + + + Hambedded Linux blog post - From Bitbake Hello World to an Image + + + + + + As stated earlier, the goal of this example + is to eventually compile "Hello World". + However, it is unknown what BitBake needs and what you have + to provide in order to achieve that goal. + Recall that BitBake utilizes three types of metadata files: + Configuration Files, + Classes, and + Recipes. + But where do they go? + How does BitBake find them? + BitBake's error messaging helps you answer these types of questions + and helps you better understand exactly what is going on. + + + + Following is the complete "Hello World" example. + + + + Create a Project Directory: + First, set up a directory for the "Hello World" project. + Here is how you can do so in your home directory: + + $ mkdir ~/hello + $ cd ~/hello + + This is the directory that BitBake will use to do all of + its work. + You can use this directory to keep all the metafiles needed + by BitBake. + Having a project directory is a good way to isolate your + project. + + Run Bitbake: + At this point, you have nothing but a project directory. + Run the bitbake command and see what + it does: + + $ bitbake + The BBPATH variable is not set and bitbake did not + find a conf/bblayers.conf file in the expected location. + Maybe you accidentally invoked bitbake from the wrong directory? + DEBUG: Removed the following variables from the environment: + GNOME_DESKTOP_SESSION_ID, XDG_CURRENT_DESKTOP, + GNOME_KEYRING_CONTROL, DISPLAY, SSH_AGENT_PID, LANG, no_proxy, + XDG_SESSION_PATH, XAUTHORITY, SESSION_MANAGER, SHLVL, + MANDATORY_PATH, COMPIZ_CONFIG_PROFILE, WINDOWID, EDITOR, + GPG_AGENT_INFO, SSH_AUTH_SOCK, GDMSESSION, GNOME_KEYRING_PID, + XDG_SEAT_PATH, XDG_CONFIG_DIRS, LESSOPEN, DBUS_SESSION_BUS_ADDRESS, + _, XDG_SESSION_COOKIE, DESKTOP_SESSION, LESSCLOSE, DEFAULTS_PATH, + UBUNTU_MENUPROXY, OLDPWD, XDG_DATA_DIRS, COLORTERM, LS_COLORS + + The majority of this output is specific to environment variables + that are not directly relevant to BitBake. + However, the very first message regarding the + BBPATH variable and the + conf/bblayers.conf file + is relevant. + + When you run BitBake, it begins looking for metadata files. + The + BBPATH + variable is what tells BitBake where to look for those files. + BBPATH is not set and you need to set it. + Without BBPATH, Bitbake cannot + find any configuration files (.conf) + or recipe files (.bb) at all. + BitBake also cannot find the bitbake.conf + file. + + Setting BBPATH: + For this example, you can set BBPATH + in the same manner that you set PATH + earlier in the appendix. + You should realize, though, that it is much more flexible to set the + BBPATH variable up in a configuration + file for each project. + From your shell, enter the following commands to set and + export the BBPATH variable: + + $ BBPATH="<projectdirectory>" + $ export BBPATH + + Use your actual project directory in the command. + BitBake uses that directory to find the metadata it needs for + your project. + + When specifying your project directory, do not use the + tilde ("~") character as BitBake does not expand that character + as the shell would. + + + Run Bitbake: + Now that you have BBPATH defined, run + the bitbake command again: + + $ bitbake + ERROR: Traceback (most recent call last): + File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped + return func(fn, *args) + File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 173, in parse_config_file + return bb.parse.handle(fn, data, include) + File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 99, in handle + return h['handle'](fn, data, include) + File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 120, in handle + abs_fn = resolve_file(fn, data) + File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 117, in resolve_file + raise IOError("file %s not found in %s" % (fn, bbpath)) + IOError: file conf/bitbake.conf not found in /home/scott-lenovo/hello + + ERROR: Unable to parse conf/bitbake.conf: file conf/bitbake.conf not found in /home/scott-lenovo/hello + + This sample output shows that BitBake could not find the + conf/bitbake.conf file in the project + directory. + This file is the first thing BitBake must find in order + to build a target. + And, since the project directory for this example is + empty, you need to provide a conf/bitbake.conf + file. + + Creating conf/bitbake.conf: + The conf/bitbake.conf includes a number of + configuration variables BitBake uses for metadata and recipe + files. + For this example, you need to create the file in your project directory + and define some key BitBake variables. + For more information on the bitbake.conf, + see + + + Use the following commands to create the conf + directory in the project directory: + + $ mkdir conf + + From within the conf directory, use + some editor to create the bitbake.conf + so that it contains the following: + + TMPDIR = "${TOPDIR}/tmp" + CACHE = "${TMPDIR}/cache" + STAMP = "${TMPDIR}/stamps" + T = "${TMPDIR}/work" + B = "${TMPDIR}" + + The TMPDIR variable establishes a directory + that BitBake uses for build output and intermediate files (other + than the cached information used by the + Setscene process. + Here, the TMPDIR directory is set to + hello/tmp. + Tip + You can always safely delete the tmp + directory in order to rebuild a BitBake target. + The build process creates the directory for you + when you run BitBake. + + For information about each of the other variables defined in this + example, click on the links to take you to the definitions in + the glossary. + + Run Bitbake: + After making sure that the conf/bitbake.conf + file exists, you can run the bitbake + command again: + +$ bitbake +ERROR: Traceback (most recent call last): + File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped + return func(fn, *args) + File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 177, in _inherit + bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data) + File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 92, in inherit + include(fn, file, lineno, d, "inherit") + File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 100, in include + raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno) +ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass + +ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass + + In the sample output, BitBake could not find the + classes/base.bbclass file. + You need to create that file next. + + Creating classes/base.bbclass: + BitBake uses class files to provide common code and functionality. + The minimally required class for BitBake is the + classes/base.bbclass file. + The base class is implicitly inherited by + every recipe. + BitBake looks for the class in the classes + directory of the project (i.e hello/classes + in this example). + + Create the classes directory as follows: + + $ cd $HOME/hello + $ mkdir classes + + Move to the classes directory and then + create the base.bbclass file by inserting + this single line: + + addtask build + + The minimal task that BitBake runs is the + do_build task. + This is all the example needs in order to build the project. + Of course, the base.bbclass can have much + more depending on which build environments BitBake is + supporting. + For more information on the base.bbclass file, + you can look at + . + + Run Bitbake: + After making sure that the classes/base.bbclass + file exists, you can run the bitbake + command again: + + $ bitbake + Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information. + + BitBake is finally reporting no errors. + However, you can see that it really does not have anything + to do. + You need to create a recipe that gives BitBake something to do. + + Creating a Layer: + While it is not really necessary for such a small example, + it is good practice to create a layer in which to keep your + code separate from the general metadata used by BitBake. + Thus, this example creates and uses a layer called "mylayer". + + You can find additional information on adding a layer at + . + + + Minimally, you need a recipe file and a layer configuration + file in your layer. + The configuration file needs to be in the conf + directory inside the layer. + Use these commands to set up the layer and the conf + directory: + + $ cd $HOME + $ mkdir mylayer + $ cd mylayer + $ mkdir conf + + Move to the conf directory and create a + layer.conf file that has the following: + + BBPATH .= ":${LAYERDIR}" + + BBFILES += "${LAYERDIR}/*.bb" + + BBFILE_COLLECTIONS += "mylayer" + BBFILE_PATTERN_mylayer := "^${LAYERDIR}/" + + For information on these variables, click the links + to go to the definitions in the glossary. + You need to create the recipe file next. + Inside your layer at the top-level, use an editor and create + a recipe file named printhello.bb that + has the following: + + DESCRIPTION = "Prints Hello World" + PN = 'printhello' + PV = '1' + + python do_build() { + bb.plain("********************"); + bb.plain("* *"); + bb.plain("* Hello, World! *"); + bb.plain("* *"); + bb.plain("********************"); + } + + The recipe file simply provides a description of the + recipe, the name, version, and the do_build + task, which prints out "Hello World" to the console. + For more information on these variables, follow the links + to the glossary. + + Run Bitbake With a Target: + Now that a BitBake target exists, run the command and provide + that target: + + $ cd $HOME/hello + $ bitbake printhello + ERROR: no recipe files to build, check your BBPATH and BBFILES? + + Summary: There was 1 ERROR message shown, returning a non-zero exit code. + + We have created the layer with the recipe and the layer + configuration file but it still seems that BitBake cannot + find the recipe. + BitBake needs a conf/bblayers.conf that + lists the layers for the project. + Without this file, BitBake cannot find the recipe. + + Creating conf/bblayers.conf: + BitBake uses the conf/bblayers.conf file + to locate layers needed for the project. + This file must reside in the conf directory + of the project (i.e. hello/conf for this + example). + Set your working directory to the hello/conf + directory and then create the bblayers.conf + file so that it contains the following: + + BBLAYERS ?= " \ + /home/<you>/mylayer \ + " + + You need to provide your own information for + you in the file. + + Run Bitbake With a Target: + Now that you have supplied the bblayers.conf + file, run the bitbake command and provide + the target: + + $ bitbake printhello + Parsing recipes: 100% |##################################################################################| + Time: 00:00:00 + Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors. + NOTE: Resolving any missing task queue dependencies + NOTE: Preparing runqueue + NOTE: Executing RunQueue Tasks + ******************** + * * + * Hello, World! * + * * + ******************** + NOTE: Tasks Summary: Attempted 1 tasks of which 0 didn't need to be rerun and all succeeded. + + BitBake finds the printhello recipe and + successfully runs the task. + + After the first execution, re-running + bitbake printhello again will not + result in a BitBake run that prints the same console + output. + The reason for this is that the first time the + printhello.bb recipe's + do_build task executes + successfully, BitBake writes a stamp file for the task. + Thus, the next time you attempt to run the task + using that same bitbake command, + BitBake notices the stamp and therefore determines + that the task does not need to be re-run. + If you delete the tmp directory + or run bitbake -c clean printhello + and then re-run the build, the "Hello, World!" message will + be printed again. + + + +
+
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml new file mode 100644 index 0000000..2188655 --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml @@ -0,0 +1,685 @@ + + + + Overview + + + Welcome to the BitBake User Manual. + This manual provides information on the BitBake tool. + The information attempts to be as independent as possible regarding + systems that use BitBake, such as OpenEmbedded and the + Yocto Project. + In some cases, scenarios or examples within the context of + a build system are used in the manual to help with understanding. + For these cases, the manual clearly states the context. + + +
+ Introduction + + + Fundamentally, BitBake is a generic task execution + engine that allows shell and Python tasks to be run + efficiently and in parallel while working within + complex inter-task dependency constraints. + One of BitBake's main users, OpenEmbedded, takes this core + and builds embedded Linux software stacks using + a task-oriented approach. + + + + Conceptually, BitBake is similar to GNU Make in + some regards but has significant differences: + + + BitBake executes tasks according to provided + metadata that builds up the tasks. + Metadata is stored in recipe (.bb) + and related recipe "append" (.bbappend) + files, configuration (.conf) and + underlying include (.inc) files, and + in class (.bbclass) files. + The metadata provides + BitBake with instructions on what tasks to run and + the dependencies between those tasks. + + + BitBake includes a fetcher library for obtaining source + code from various places such as local files, source control + systems, or websites. + + + The instructions for each unit to be built (e.g. a piece + of software) are known as "recipe" files and + contain all the information about the unit + (dependencies, source file locations, checksums, description + and so on). + + + BitBake includes a client/server abstraction and can + be used from a command line or used as a service over + XML-RPC and has several different user interfaces. + + + +
+ +
+ History and Goals + + + BitBake was originally a part of the OpenEmbedded project. + It was inspired by the Portage package management system + used by the Gentoo Linux distribution. + On December 7, 2004, OpenEmbedded project team member + Chris Larson split the project into two distinct pieces: + + BitBake, a generic task executor + OpenEmbedded, a metadata set utilized by + BitBake + + Today, BitBake is the primary basis of the + OpenEmbedded + project, which is being used to build and maintain Linux + distributions such as the + Angstrom Distribution, + and which is also being used as the build tool for Linux projects + such as the + Yocto Project. + + + + Prior to BitBake, no other build tool adequately met the needs of + an aspiring embedded Linux distribution. + All of the build systems used by traditional desktop Linux + distributions lacked important functionality, and none of the + ad hoc Buildroot-based systems, prevalent in the + embedded space, were scalable or maintainable. + + + + Some important original goals for BitBake were: + + + Handle cross-compilation. + + + Handle inter-package dependencies (build time on + target architecture, build time on native + architecture, and runtime). + + + Support running any number of tasks within a given + package, including, but not limited to, fetching + upstream sources, unpacking them, patching them, + configuring them, and so forth. + + + Be Linux distribution agnostic for both build and + target systems. + + + Be architecture agnostic. + + + Support multiple build and target operating systems + (e.g. Cygwin, the BSDs, and so forth). + + + Be self contained, rather than tightly + integrated into the build machine's root + filesystem. + + + Handle conditional metadata on the target architecture, + operating system, distribution, and machine. + + + Be easy to use the tools to supply local metadata and packages + against which to operate. + + + Be easy to use BitBake to collaborate between multiple + projects for their builds. + + + Provide an inheritance mechanism to share + common metadata between many packages. + + + Over time it became apparent that some further requirements + were necessary: + + + Handle variants of a base recipe (e.g. native, sdk, + and multilib). + + + Split metadata into layers and allow layers + to enhance or override other layers. + + + Allow representation of a given set of input variables + to a task as a checksum. + Based on that checksum, allow acceleration of builds + with prebuilt components. + + + BitBake satisfies all the original requirements and many more + with extensions being made to the basic functionality to + reflect the additional requirements. + Flexibility and power have always been the priorities. + BitBake is highly extensible and supports embedded Python code and + execution of any arbitrary tasks. + +
+ +
+ Concepts + + + BitBake is a program written in the Python language. + At the highest level, BitBake interprets metadata, decides + what tasks are required to run, and executes those tasks. + Similar to GNU Make, BitBake controls how software is + built. + GNU Make achieves its control through "makefiles", while + BitBake uses "recipes". + + + + BitBake extends the capabilities of a simple + tool like GNU Make by allowing for the definition of much more + complex tasks, such as assembling entire embedded Linux + distributions. + + + + The remainder of this section introduces several concepts + that should be understood in order to better leverage + the power of BitBake. + + +
+ Recipes + + + BitBake Recipes, which are denoted by the file extension + .bb, are the most basic metadata files. + These recipe files provide BitBake with the following: + + Descriptive information about the + package (author, homepage, license, and so on) + The version of the recipe + Existing dependencies (both build + and runtime dependencies) + Where the source code resides and + how to fetch it + Whether the source code requires + any patches, where to find them, and how to apply + them + How to configure and compile the + source code + Where on the target machine to install the + package or packages created + + + + + Within the context of BitBake, or any project utilizing BitBake + as its build system, files with the .bb + extension are referred to as recipes. + + The term "package" is also commonly used to describe recipes. + However, since the same word is used to describe packaged + output from a project, it is best to maintain a single + descriptive term - "recipes". + Put another way, a single "recipe" file is quite capable + of generating a number of related but separately installable + "packages". + In fact, that ability is fairly common. + + +
+ +
+ Configuration Files + + + Configuration files, which are denoted by the + .conf extension, define + various configuration variables that govern the project's build + process. + These files fall into several areas that define + machine configuration options, distribution configuration + options, compiler tuning options, general common + configuration options, and user configuration options. + The main configuration file is the sample + bitbake.conf file, which is + located within the BitBake source tree + conf directory. + +
+ +
+ Classes + + + Class files, which are denoted by the + .bbclass extension, contain + information that is useful to share between metadata files. + The BitBake source tree currently comes with one class metadata file + called base.bbclass. + You can find this file in the + classes directory. + The base.bbclass class files is special since it + is always included automatically for all recipes + and classes. + This class contains definitions for standard basic tasks such + as fetching, unpacking, configuring (empty by default), + compiling (runs any Makefile present), installing (empty by + default) and packaging (empty by default). + These tasks are often overridden or extended by other classes + added during the project development process. + +
+ +
+ Layers + + + Layers allow you to isolate different types of + customizations from each other. + While you might find it tempting to keep everything in one layer + when working on a single project, the more modular you organize + your metadata, the easier it is to cope with future changes. + + + + To illustrate how you can use layers to keep things modular, + consider customizations you might make to support a specific target machine. + These types of customizations typically reside in a special layer, + rather than a general layer, called a Board Support Package (BSP) + Layer. + Furthermore, the machine customizations should be isolated from + recipes and metadata that support a new GUI environment, for + example. + This situation gives you a couple of layers: one for the machine + configurations and one for the GUI environment. + It is important to understand, however, that the BSP layer can still + make machine-specific additions to recipes within + the GUI environment layer without polluting the GUI layer itself + with those machine-specific changes. + You can accomplish this through a recipe that is a BitBake append + (.bbappend) file. + +
+ +
+ Append Files + + + Append files, which are files that have the + .bbappend file extension, extend or + override information in an existing recipe file. + + + + BitBake expects every append file to have a corresponding recipe file. + Furthermore, the append file and corresponding recipe file + must use the same root filename. + The filenames can differ only in the file type suffix used + (e.g. formfactor_0.0.bb and + formfactor_0.0.bbappend). + + + + Information in append files extends or + overrides the information in the underlying, + similarly-named recipe files. + + + + When you name an append file, you can use the + wildcard character (%) to allow for matching recipe names. + For example, suppose you have an append file named + as follows: + + busybox_1.21.%.bbappend + + That append file would match any busybox_1.21.x.bb + version of the recipe. + So, the append file would match the following recipe names: + + busybox_1.21.1.bb + busybox_1.21.2.bb + busybox_1.21.3.bb + + If the busybox recipe was updated to + busybox_1.3.0.bb, the append name would not + match. + However, if you named the append file + busybox_1.%.bbappend, then you would have a match. + + + + In the most general case, you could name the append file something as + simple as busybox_%.bbappend to be entirely + version independent. + +
+
+ +
+ Obtaining BitBake + + + You can obtain BitBake several different ways: + + Cloning BitBake: + Using Git to clone the BitBake source code repository + is the recommended method for obtaining BitBake. + Cloning the repository makes it easy to get bug fixes + and have access to stable branches and the master + branch. + Once you have cloned BitBake, you should use + the latest stable + branch for development since the master branch is for + BitBake development and might contain less stable changes. + + You usually need a version of BitBake + that matches the metadata you are using. + The metadata is generally backwards compatible but + not forward compatible. + Here is an example that clones the BitBake repository: + + $ git clone git://git.openembedded.org/bitbake + + This command clones the BitBake Git repository into a + directory called bitbake. + Alternatively, you can + designate a directory after the + git clone command + if you want to call the new directory something + other than bitbake. + Here is an example that names the directory + bbdev: + + $ git clone git://git.openembedded.org/bitbake bbdev + + Installation using your Distribution + Package Management System: + This method is not + recommended because the BitBake version that is + provided by your distribution, in most cases, + is several + releases behind a snapshot of the BitBake repository. + + Taking a snapshot of BitBake: + Downloading a snapshot of BitBake from the + source code repository gives you access to a known + branch or release of BitBake. + + Cloning the Git repository, as described earlier, + is the preferred method for getting BitBake. + Cloning the repository makes it easier to update as + patches are added to the stable branches. + + The following example downloads a snapshot of + BitBake version 1.17.0: + + $ wget http://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz + $ tar zxpvf bitbake-1.17.0.tar.gz + + After extraction of the tarball using the tar utility, + you have a directory entitled + bitbake-1.17.0. + + Using the BitBake that Comes With Your + Build Checkout: + A final possibility for getting a copy of BitBake is that it + already comes with your checkout of a larger Bitbake-based build + system, such as Poky or Yocto Project. + Rather than manually checking out individual layers and + gluing them together yourself, you can check + out an entire build system. + The checkout will already include a version of BitBake that + has been thoroughly tested for compatibility with the other + components. + For information on how to check out a particular BitBake-based + build system, consult that build system's supporting documentation. + + + +
+ +
+ The BitBake Command + + + The bitbake command is the primary interface + to the BitBake tool. + This section presents the BitBake command syntax and provides + several execution examples. + + +
+ Usage and syntax + + + Following is the usage and syntax for BitBake: + + $ bitbake -h + Usage: bitbake [options] [recipename/target ...] + + Executes the specified task (default is 'build') for a given set of target recipes (.bb files). + It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which + will provide the layer, BBFILES and other configuration information. + + Options: + --version show program's version number and exit + -h, --help show this help message and exit + -b BUILDFILE, --buildfile=BUILDFILE + Execute tasks from a specific .bb recipe directly. + WARNING: Does not handle any dependencies from other + recipes. + -k, --continue Continue as much as possible after an error. While the + target that failed and anything depending on it cannot + be built, as much as possible will be built before + stopping. + -a, --tryaltconfigs Continue with builds by trying to use alternative + providers where possible. + -f, --force Force the specified targets/task to run (invalidating + any existing stamp file). + -c CMD, --cmd=CMD Specify the task to execute. The exact options + available depend on the metadata. Some examples might + be 'compile' or 'populate_sysroot' or 'listtasks' may + give a list of the tasks available. + -C INVALIDATE_STAMP, --clear-stamp=INVALIDATE_STAMP + Invalidate the stamp for the specified task such as + 'compile' and then run the default task for the + specified target(s). + -r PREFILE, --read=PREFILE + Read the specified file before bitbake.conf. + -R POSTFILE, --postread=POSTFILE + Read the specified file after bitbake.conf. + -v, --verbose Output more log message data to the terminal. + -D, --debug Increase the debug level. You can specify this more + than once. + -n, --dry-run Don't execute, just go through the motions. + -S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER + Dump out the signature construction information, with + no task execution. The SIGNATURE_HANDLER parameter is + passed to the handler. Two common values are none and + printdiff but the handler may define more/less. none + means only dump the signature, printdiff means compare + the dumped signature with the cached one. + -p, --parse-only Quit after parsing the BB recipes. + -s, --show-versions Show current and preferred versions of all recipes. + -e, --environment Show the global or per-recipe environment complete + with information about where variables were + set/changed. + -g, --graphviz Save dependency tree information for the specified + targets in the dot syntax. + -I EXTRA_ASSUME_PROVIDED, --ignore-deps=EXTRA_ASSUME_PROVIDED + Assume these dependencies don't exist and are already + provided (equivalent to ASSUME_PROVIDED). Useful to + make dependency graphs more appealing + -l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS + Show debug logging for the specified logging domains + -P, --profile Profile the command and save reports. + -u UI, --ui=UI The user interface to use (e.g. knotty, hob, depexp). + -t SERVERTYPE, --servertype=SERVERTYPE + Choose which server to use, process or xmlrpc. + --token=XMLRPCTOKEN Specify the connection token to be used when + connecting to a remote server. + --revisions-changed Set the exit code depending on whether upstream + floating revisions have changed or not. + --server-only Run bitbake without a UI, only starting a server + (cooker) process. + -B BIND, --bind=BIND The name/address for the bitbake server to bind to. + --no-setscene Do not run any setscene tasks. sstate will be ignored + and everything needed, built. + --remote-server=REMOTE_SERVER + Connect to the specified server. + -m, --kill-server Terminate the remote server. + --observe-only Connect to a server as an observing-only client. + --status-only Check the status of the remote bitbake server. + + +
+ +
+ Examples + + + This section presents some examples showing how to use BitBake. + + +
+ Executing a Task Against a Single Recipe + + + Executing tasks for a single recipe file is relatively simple. + You specify the file in question, and BitBake parses + it and executes the specified task. + If you do not specify a task, BitBake executes the default + task, which is "buildâ€. + BitBake obeys inter-task dependencies when doing + so. + + + + The following command runs the build task, which is + the default task, on the foo_1.0.bb + recipe file: + + $ bitbake -b foo_1.0.bb + + The following command runs the clean task on the + foo.bb recipe file: + + $ bitbake -b foo.bb -c clean + + + The "-b" option explicitly does not handle recipe + dependencies. + Other than for debugging purposes, it is instead + recommended that you use the syntax presented in the + next section. + + +
+ +
+ Executing Tasks Against a Set of Recipe Files + + + There are a number of additional complexities introduced + when one wants to manage multiple .bb + files. + Clearly there needs to be a way to tell BitBake what + files are available and, of those, which you + want to execute. + There also needs to be a way for each recipe + to express its dependencies, both for build-time and + runtime. + There must be a way for you to express recipe preferences + when multiple recipes provide the same functionality, or when + there are multiple versions of a recipe. + + + + The bitbake command, when not using + "--buildfile" or "-b" only accepts a "PROVIDES". + You cannot provide anything else. + By default, a recipe file generally "PROVIDES" its + "packagename" as shown in the following example: + + $ bitbake foo + + This next example "PROVIDES" the package name and also uses + the "-c" option to tell BitBake to just execute the + do_clean task: + + $ bitbake -c clean foo + + +
+ +
+ Generating Dependency Graphs + + + BitBake is able to generate dependency graphs using + the dot syntax. + You can convert these graphs into images using the + dot tool from + Graphviz. + + + + When you generate a dependency graph, BitBake writes four files + to the current working directory: + + package-depends.dot: + Shows BitBake's knowledge of dependencies between + runtime targets. + + pn-depends.dot: + Shows dependencies between build-time targets + (i.e. recipes). + + task-depends.dot: + Shows dependencies between tasks. + + pn-buildlist: + Shows a simple list of targets that are to be built. + + + + + + To stop depending on common depends, use the "-I" depend + option and BitBake omits them from the graph. + Leaving this information out can produce more readable graphs. + This way, you can remove from the graph + DEPENDS from inherited classes + such as base.bbclass. + + + + Here are two examples that create dependency graphs. + The second example omits depends common in OpenEmbedded from + the graph: + + $ bitbake -g foo + + $ bitbake -g -I virtual/kernel -I eglibc foo + + +
+
+
+
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml new file mode 100644 index 0000000..0dd543b --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml @@ -0,0 +1,1790 @@ + + + + Syntax and Operators + + + Bitbake files have their own syntax. + The syntax has similarities to several + other languages but also has some unique features. + This section describes the available syntax and operators + as well as provides examples. + + +
+ Basic Syntax + + + This section provides some basic syntax examples. + + +
+ Basic Variable Setting + + + The following example sets VARIABLE to + "value". + This assignment occurs immediately as the statement is parsed. + It is a "hard" assignment. + + VARIABLE = "value" + + As expected, if you include leading or trailing spaces as part of + an assignment, the spaces are retained: + + VARIABLE = " value" + VARIABLE = "value " + + Setting VARIABLE to "" sets it to an empty string, + while setting the variable to " " sets it to a blank space + (i.e. these are not the same values). + + VARIABLE = "" + VARIABLE = " " + + +
+ +
+ Variable Expansion + + + BitBake supports variables referencing one another's + contents using a syntax that is similar to shell scripting. + Following is an example that results in A + containing "aval" and B evaluating to + "preavalpost" based on that current value of + A. + + A = "aval" + B = "pre${A}post" + + You should realize that whenever B is + referenced, its evaluation will depend on the state of + A at that time. + Thus, later evaluations of B in the + previous example could result in different values + depending on the value of A. + +
+ +
+ Setting a default value (?=) + + + You can use the "?=" operator to achieve a "softer" assignment + for a variable. + This type of assignment allows you to define a variable if it + is undefined when the statement is parsed, but to leave the + value alone if the variable has a value. + Here is an example: + + A ?= "aval" + + If A is set at the time this statement is parsed, + the variable retains its value. + However, if A is not set, + the variable is set to "aval". + + This assignment is immediate. + Consequently, if multiple "?=" assignments + to a single variable exist, the first of those ends up getting + used. + + +
+ +
+ Setting a weak default value (??=) + + + It is possible to use a "weaker" assignment than in the + previous section by using the "??=" operator. + This assignment behaves identical to "?=" except that the + assignment is made at the end of the parsing process rather + than immediately. + Consequently, when multiple "??=" assignments exist, the last + one is used. + Also, any "=" or "?=" assignment will override the value set with + "??=". + Here is an example: + + A ??= "somevalue" + A ??= "someothervalue" + + If A is set before the above statements are parsed, + the variable retains its value. + If A is not set, + the variable is set to "someothervalue". + + + + Again, this assignment is a "lazy" or "weak" assignment + because it does not occur until the end + of the parsing process. + +
+ +
+ Immediate variable expansion (:=) + + + The ":=" operator results in a variable's + contents being expanded immediately, + rather than when the variable is actually used: + + T = "123" + A := "${B} ${A} test ${T}" + T = "456" + B = "${T} bval" + C = "cval" + C := "${C}append" + + In this example, A contains + "test 123" because ${B} and + ${A} at the time of parsing are undefined, + which leaves "test 123". + And, the variable C + contains "cvalappend" since ${C} immediately + expands to "cval". + +
+ +
+ Appending (+=) and prepending (=+) With Spaces + + + Appending and prepending values is common and can be accomplished + using the "+=" and "=+" operators. + These operators insert a space between the current + value and prepended or appended value. + + + + These operators take immediate effect during parsing. + Here are some examples: + + B = "bval" + B += "additionaldata" + C = "cval" + C =+ "test" + + The variable B contains + "bval additionaldata" and C + contains "test cval". + +
+ +
+ Appending (.=) and Prepending (=.) Without Spaces + + + If you want to append or prepend values without an + inserted space, use the ".=" and "=." operators. + + + + These operators take immediate effect during parsing. + Here are some examples: + + B = "bval" + B .= "additionaldata" + C = "cval" + C =. "test" + + The variable B contains + "bvaladditionaldata" and + C contains "testcval". + +
+ +
+ Appending and Prepending (Override Style Syntax) + + + You can also append and prepend a variable's value + using an override style syntax. + When you use this syntax, no spaces are inserted. + + + + These operators differ from the ":=", ".=", "=.", "+=", and "=+" + operators in that their effects are deferred + until after parsing completes rather than being immediately + applied. + Here are some examples: + + B = "bval" + B_append = " additional data" + C = "cval" + C_prepend = "additional data " + D = "dval" + D_append = "additional data" + + The variable B becomes + "bval additional data" and C becomes + "additional data cval". + The variable D becomes + "dvaladditional data". + + You must control all spacing when you use the + override syntax. + + +
+ +
+ Removal (Override Style Syntax) + + + You can remove values from lists using the removal + override style syntax. + Specifying a value for removal causes all occurrences of that + value to be removed from the variable. + + + + When you use this syntax, BitBake expects one or more strings. + Surrounding spaces are removed as well. + Here is an example: + + FOO = "123 456 789 123456 123 456 123 456" + FOO_remove = "123" + FOO_remove = "456" + FOO2 = "abc def ghi abcdef abc def abc def" + FOO2_remove = "abc def" + + The variable FOO becomes + "789 123456" and FOO2 becomes + "ghi abcdef". + +
+ +
+ Variable Flag Syntax + + + Variable flags are BitBake's implementation of variable properties + or attributes. + It is a way of tagging extra information onto a variable. + You can find more out about variable flags in general in the + "Variable Flags" + section. + + + + You can define, append, and prepend values to variable flags. + All the standard syntax operations previously mentioned work + for variable flags except for override style syntax + (i.e. _prepend, _append, + and _remove). + + + + Here are some examples showing how to set variable flags: + + FOO[a] = "abc" + FOO[b] = "123" + FOO[a] += "456" + + The variable FOO has two flags: + a and b. + The flags are immediately set to "abc" and "123", respectively. + The a flag becomes "abc 456". + + + + No need exists to pre-define variable flags. + You can simply start using them. + One extremely common application + is to attach some brief documentation to a BitBake variable as + follows: + + CACHE[doc] = "The directory holding the cache of the metadata." + + +
+ +
+ Inline Python Variable Expansion + + + You can use inline Python variable expansion to + set variables. + Here is an example: + + DATE = "${@time.strftime('%Y%m%d',time.gmtime())}" + + This example results in the DATE + variable being set to the current date. + + + + Probably the most common use of this feature is to extract + the value of variables from BitBake's internal data dictionary, + d. + The following lines select the values of a package name + and its version number, respectively: + + PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}" + PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}" + + +
+ +
+ Providing Pathnames + + + When specifying pathnames for use with BitBake, + do not use the tilde ("~") character as a shortcut + for your home directory. + Doing so might cause BitBake to not recognize the + path since BitBake does not expand this character in + the same way a shell would. + + + + Instead, provide a fuller path as the following + example illustrates: + + BBLAYERS ?= " \ + /home/scott-lenovo/LayerA \ + " + + +
+
+ +
+ Conditional Syntax (Overrides) + + + BitBake uses + OVERRIDES + to control what variables are overridden after BitBake + parses recipes and configuration files. + This section describes how you can use + OVERRIDES as conditional metadata, + talks about key expansion in relationship to + OVERRIDES, and provides some examples + to help with understanding. + + +
+ Conditional Metadata + + + You can use OVERRIDES to conditionally select + a specific version of a variable and to conditionally + append or prepend the value of a variable. + + Selecting a Variable: + The OVERRIDES variable is + a colon-character-separated list that contains items + for which you want to satisfy conditions. + Thus, if you have a variable that is conditional on “armâ€, and “arm†+ is in OVERRIDES, then the “armâ€-specific + version of the variable is used rather than the non-conditional + version. + Here is an example: + + OVERRIDES = "architecture:os:machine" + TEST = "default" + TEST_os = "osspecific" + TEST_nooverride = "othercondvalue" + + In this example, the OVERRIDES + variable lists three overrides: + "architecture", "os", and "machine". + The variable TEST by itself has a default + value of "default". + You select the os-specific version of the TEST + variable by appending the "os" override to the variable + (i.e.TEST_os). + + + + To better understand this, consider a practical example + that assumes an OpenEmbedded metadata-based Linux + kernel recipe file. + The following lines from the recipe file first set + the kernel branch variable KBRANCH + to a default value, then conditionally override that + value based on the architecture of the build: + + KBRANCH = "standard/base" + KBRANCH_qemuarm = "standard/arm-versatile-926ejs" + KBRANCH_qemumips = "standard/mti-malta32" + KBRANCH_qemuppc = "standard/qemuppc" + KBRANCH_qemux86 = "standard/common-pc/base" + KBRANCH_qemux86-64 = "standard/common-pc-64/base" + KBRANCH_qemumips64 = "standard/mti-malta64" + + + Appending and Prepending: + BitBake also supports append and prepend operations to + variable values based on whether a specific item is + listed in OVERRIDES. + Here is an example: + + DEPENDS = "glibc ncurses" + OVERRIDES = "machine:local" + DEPENDS_append_machine = "libmad" + + In this example, DEPENDS becomes + "glibc ncurses libmad". + + + + Again, using an OpenEmbedded metadata-based + kernel recipe file as an example, the + following lines will conditionally append to the + KERNEL_FEATURES variable based + on the architecture: + + KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}" + KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc" + KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc" + + + + +
+ +
+ Key Expansion + + + Key expansion happens when the BitBake datastore is finalized + just before BitBake expands overrides. + To better understand this, consider the following example: + + A${B} = "X" + B = "2" + A2 = "Y" + + In this case, after all the parsing is complete, and + before any overrides are handled, BitBake expands + ${B} into "2". + This expansion causes A2, which was + set to "Y" before the expansion, to become "X". + +
+ +
+ Examples + + + Despite the previous explanations that show the different forms of + variable definitions, it can be hard to work + out exactly what happens when variable operators, conditional + overrides, and unconditional overrides are combined. + This section presents some common scenarios along + with explanations for variable interactions that + typically confuse users. + + + + There is often confusion concerning the order in which + overrides and various "append" operators take effect. + Recall that an append or prepend operation using "_append" + and "_prepend" does not result in an immediate assignment + as would "+=", ".=", "=+", or "=.". + Consider the following example: + + OVERRIDES = "foo" + A = "Z" + A_foo_append = "X" + + For this case, A is + unconditionally set to "Z" and "X" is + unconditionally and immediately appended to the variable + A_foo. + Because overrides have not been applied yet, + A_foo is set to "X" due to the append + and A simply equals "Z". + + + + Applying overrides, however, changes things. + Since "foo" is listed in OVERRIDES, + the conditional variable A is replaced + with the "foo" version, which is equal to "X". + So effectively, A_foo replaces A. + + + + This next example changes the order of the override and + the append: + + OVERRIDES = "foo" + A = "Z" + A_append_foo = "X" + + For this case, before overrides are handled, + A is set to "Z" and A_append_foo + is set to "X". + Once the override for "foo" is applied, however, + A gets appended with "X". + Consequently, A becomes "ZX". + Notice that spaces are not appended. + + + + This next example has the order of the appends and overrides reversed + back as in the first example: + + OVERRIDES = "foo" + A = "Y" + A_foo_append = "Z" + A_foo_append += "X" + + For this case, before any overrides are resolved, + A is set to "Y" using an immediate assignment. + After this immediate assignment, A_foo is set + to "Z", and then further appended with + "X" leaving the variable set to "Z X". + Finally, applying the override for "foo" results in the conditional + variable A becoming "Z X" (i.e. + A is replaced with A_foo). + + + + This final example mixes in some varying operators: + + A = "1" + A_append = "2" + A_append = "3" + A += "4" + A .= "5" + + For this case, the type of append operators are affecting the + order of assignments as BitBake passes through the code + multiple times. + Initially, A is set to "1 45" because + of the three statements that use immediate operators. + After these assignments are made, BitBake applies the + _append operations. + Those operations result in A becoming "1 4523". + +
+
+ +
+ Sharing Functionality + + + BitBake allows for metadata sharing through include files + (.inc) and class files + (.bbclass). + For example, suppose you have a piece of common functionality + such as a task definition that you want to share between + more than one recipe. + In this case, creating a .bbclass + file that contains the common functionality and then using + the inherit directive in your recipes to + inherit the class would be a common way to share the task. + + + + This section presents the mechanisms BitBake provides to + allow you to share functionality between recipes. + Specifically, the mechanisms include include, + inherit, INHERIT, and + require directives. + + +
+ Locating Include and Class Files + + + BitBake uses the + BBPATH + variable to locate needed include and class files. + The BBPATH variable is analogous to + the environment variable PATH. + + + + In order for include and class files to be found by BitBake, + they need to be located in a "classes" subdirectory that can + be found in BBPATH. + +
+ +
+ <filename>inherit</filename> Directive + + + When writing a recipe or class file, you can use the + inherit directive to inherit the + functionality of a class (.bbclass). + BitBake only supports this directive when used within recipe + and class files (i.e. .bb and + .bbclass). + + + + The inherit directive is a rudimentary + means of specifying what classes of functionality your + recipes require. + For example, you can easily abstract out the tasks involved in + building a package that uses Autoconf and Automake and put + those tasks into a class file that can be used by your recipe. + + + + As an example, your recipes could use the following directive + to inherit an autotools.bbclass file. + The class file would contain common functionality for using + Autotools that could be shared across recipes: + + inherit autotools + + In this case, BitBake would search for the directory + classes/autotools.bbclass + in BBPATH. + + You can override any values and functions of the + inherited class within your recipe by doing so + after the "inherit" statement. + + +
+ +
+ <filename>include</filename> Directive + + + BitBake understands the include + directive. + This directive causes BitBake to parse whatever file you specify, + and to insert that file at that location. + The directive is much like its equivalent in Make except + that if the path specified on the include line is a relative + path, BitBake locates the first file it can find + within BBPATH. + + + + As an example, suppose you needed a recipe to include some + self-test definitions: + + include test_defs.inc + + + The include directive does not + produce an error when the file cannot be found. + Consequently, it is recommended that if the file you + are including is expected to exist, you should use + require + instead of include. + Doing so makes sure that an error is produced if the + file cannot be found. + + +
+ +
+ <filename>require</filename> Directive + + + BitBake understands the require + directive. + This directive behaves just like the + include directive with the exception that + BitBake raises a parsing error if the file to be included cannot + be found. + Thus, any file you require is inserted into the file that is + being parsed at the location of the directive. + + + + Similar to how BitBake handles + include, + if the path specified + on the require line is a relative path, BitBake locates + the first file it can find within BBPATH. + + + + As an example, suppose you have two versions of a recipe + (e.g. foo_1.2.2.bb and + foo_2.0.0.bb) where + each version contains some identical functionality that could be + shared. + You could create an include file named foo.inc + that contains the common definitions needed to build "foo". + You need to be sure foo.inc is located in the + same directory as your two recipe files as well. + Once these conditions are set up, you can share the functionality + using a require directive from within each + recipe: + + require foo.inc + + +
+ +
+ <filename>INHERIT</filename> Configuration Directive + + + When creating a configuration file (.conf), + you can use the INHERIT directive to + inherit a class. + BitBake only supports this directive when used within + a configuration file. + + + + As an example, suppose you needed to inherit a class + file called abc.bbclass from a + configuration file as follows: + + INHERIT += "abc" + + This configuration directive causes the named + class to be inherited at the point of the directive + during parsing. + As with the inherit directive, the + .bbclass file must be located in a + "classes" subdirectory in one of the directories specified + in BBPATH. + + Because .conf files are parsed + first during BitBake's execution, using + INHERIT to inherit a class effectively + inherits the class globally (i.e. for all recipes). + + +
+
+ +
+ Functions + + + As with most languages, functions are the building blocks that + are used to build up operations into tasks. + BitBake supports these types of functions: + + Shell Functions: + Functions written in shell script and executed either + directly as functions, tasks, or both. + They can also be called by other shell functions. + + BitBake Style Python Functions: + Functions written in Python and executed by BitBake or other + Python functions using bb.build.exec_func(). + + Python Functions: + Functions written in Python and executed by Python. + + Anonymous Python Functions: + Python functions executed automatically during + parsing. + + + Regardless of the type of function, you can only + define them in class (.bbclass) + and recipe (.bb or .inc) + files. + + +
+ Shell Functions + + + Functions written in shell script and executed either + directly as functions, tasks, or both. + They can also be called by other shell functions. + Here is an example shell function definition: + + some_function () { + echo "Hello World" + } + + When you create these types of functions in your recipe + or class files, you need to follow the shell programming + rules. + The scripts are executed by /bin/sh, + which may not be a bash shell but might be something + such as dash. + You should not use Bash-specific script (bashisms). + +
+ +
+ BitBake Style Python Functions + + + These functions are written in Python and executed by + BitBake or other Python functions using + bb.build.exec_func(). + + + + An example BitBake function is: + + python some_python_function () { + d.setVar("TEXT", "Hello World") + print d.getVar("TEXT", True) + } + + Because the Python "bb" and "os" modules are already + imported, you do not need to import these modules. + Also in these types of functions, the datastore ("d") + is a global variable and is always automatically + available. + +
+ +
+ Python Functions + + + These functions are written in Python and are executed by + other Python code. + Examples of Python functions are utility functions + that you intend to call from in-line Python or + from within other Python functions. + Here is an example: + + def get_depends(d): + if d.getVar('SOMECONDITION', True): + return "dependencywithcond" + else: + return "dependency" + SOMECONDITION = "1" + DEPENDS = "${@get_depends(d)}" + + This would result in DEPENDS + containing dependencywithcond. + + + + Here are some things to know about Python functions: + + Python functions can take parameters. + + The BitBake datastore is not + automatically available. + Consequently, you must pass it in as a + parameter to the function. + + The "bb" and "os" Python modules are + automatically available. + You do not need to import them. + + + +
+ +
+ Anonymous Python Functions + + + Sometimes it is useful to run some code during + parsing to set variables or to perform other operations + programmatically. + To do this, you can define an anonymous Python function. + Here is an example that conditionally sets a + variable based on the value of another variable: + + python __anonymous () { + if d.getVar('SOMEVAR', True) == 'value': + d.setVar('ANOTHERVAR', 'value2') + } + + The "__anonymous" function name is optional, so the + following example is functionally equivalent to the above: + + python () { + if d.getVar('SOMEVAR', True) == 'value': + d.setVar('ANOTHERVAR', 'value2') + } + + Because unlike other Python functions anonymous + Python functions are executed during parsing, the + "d" variable within an anonymous Python function represents + the datastore for the entire recipe. + Consequently, you can set variable values here and + those values can be picked up by other functions. + +
+ +
+ Flexible Inheritance for Class Functions + + + Through coding techniques and the use of + EXPORT_FUNCTIONS, BitBake supports + exporting a function from a class such that the + class function appears as the default implementation + of the function, but can still be called if a recipe + inheriting the class needs to define its own version of + the function. + + + + To understand the benefits of this feature, consider + the basic scenario where a class defines a task function + and your recipe inherits the class. + In this basic scenario, your recipe inherits the task + function as defined in the class. + If desired, your recipe can add to the start and end of the + function by using the "_prepend" or "_append" operations + respectively, or it can redefine the function completely. + However, if it redefines the function, there is + no means for it to call the class version of the function. + EXPORT_FUNCTIONS provides a mechanism + that enables the recipe's version of the function to call + the original version of the function. + + + + To make use of this technique, you need the following + things in place: + + + The class needs to define the function as follows: + + <classname>_<functionname> + + For example, if you have a class file + bar.bbclass and a function named + do_foo, the class must define the function + as follows: + + bar_do_foo + + + + The class needs to contain the EXPORT_FUNCTIONS + statement as follows: + + EXPORT_FUNCTIONS <functionname> + + For example, continuing with the same example, the + statement in the bar.bbclass would be + as follows: + + EXPORT_FUNCTIONS do_foo + + + + You need to call the function appropriately from within your + recipe. + Continuing with the same example, if your recipe + needs to call the class version of the function, + it should call bar_do_foo. + Assuming do_foo was a shell function + and EXPORT_FUNCTIONS was used as above, + the recipe's function could conditionally call the + class version of the function as follows: + + do_foo() { + if [ somecondition ] ; then + bar_do_foo + else + # Do something else + fi + } + + To call your modified version of the function as defined + in your recipe, call it as do_foo. + + + With these conditions met, your single recipe + can freely choose between the original function + as defined in the class file and the modified function in your recipe. + If you do not set up these conditions, you are limited to using one function + or the other. + +
+
+ +
+ Tasks + + + Tasks are BitBake execution units that originate as + functions and make up the steps that BitBake needs to run + for given recipe. + Tasks are only supported in recipe (.bb + or .inc) and class + (.bbclass) files. + By convention, task names begin with the string "do_". + + + + Here is an example of a task that prints out the date: + + python do_printdate () { + import time + print time.strftime('%Y%m%d', time.gmtime()) + } + addtask printdate after do_fetch before do_build + + + +
+ Promoting a Function to a Task + + + Any function can be promoted to a task by applying the + addtask command. + The addtask command also describes + inter-task dependencies. + Here is the function from the previous section but with the + addtask command promoting it to a task + and defining some dependencies: + + python do_printdate () { + import time + print time.strftime('%Y%m%d', time.gmtime()) + } + addtask printdate after do_fetch before do_build + + In the example, the function is defined and then promoted + as a task. + The do_printdate task becomes a dependency of + the do_build task, which is the default + task. + And, the do_printdate task is dependent upon + the do_fetch task. + Execution of the do_build task results + in the do_printdate task running first. + +
+ +
+ Deleting a Task + + + As well as being able to add tasks, tasks can also be deleted. + This is done simply with deltask command. + For example, to delete the example task used in the previous + sections, you would use: + + deltask printdate + + +
+ +
+ Passing Information Into the Build Task Environment + + + When running a task, BitBake tightly controls the execution + environment of the build tasks to make + sure unwanted contamination from the build machine cannot + influence the build. + Consequently, if you do want something to get passed into the + build task environment, you must take these two steps: + + + Tell BitBake to load what you want from the environment + into the datastore. + You can do so through the + BB_ENV_EXTRAWHITE + variable. + For example, assume you want to prevent the build system from + accessing your $HOME/.ccache + directory. + The following command tells BitBake to load + CCACHE_DIR from the environment into + the datastore: + + export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE CCACHE_DIR" + + + Tell BitBake to export what you have loaded into the + datastore to the task environment of every running task. + Loading something from the environment into the datastore + (previous step) only makes it available in the datastore. + To export it to the task environment of every running task, + use a command similar to the following in your local configuration + file local.conf or your + distribution configuration file: + + export CCACHE_DIR + + + A side effect of the previous steps is that BitBake + records the variable as a dependency of the build process + in things like the setscene checksums. + If doing so results in unnecessary rebuilds of tasks, you can + whitelist the variable so that the setscene code + ignores the dependency when it creates checksums. + + + + + + Sometimes, it is useful to be able to obtain information + from the original execution environment. + Bitbake saves a copy of the original environment into + a special variable named + BB_ORIGENV. + + + + The BB_ORIGENV variable returns a datastore + object that can be queried using the standard datastore operators + such as getVar(). + The datastore object is useful, for example, to find the original + DISPLAY variable. + Here is an example: + + BB_ORIGENV - add example? + + origenv = d.getVar("BB_ORIGENV", False) + bar = origenv.getVar("BAR", False) + + The previous example returns BAR from the original + execution environment. + + + + By default, BitBake cleans the environment to include only those + things exported or listed in its whitelist to ensure that the build + environment is reproducible and consistent. + +
+
+ +
+ Variable Flags + + + Variable flags (varflags) help control a task's functionality + and dependencies. + BitBake reads and writes varflags to the datastore using the following + command forms: + + <variable> = d.getVarFlags("<variable>") + self.d.setVarFlags("FOO", {"func": True}) + + + + + When working with varflags, the same syntax, with the exception of + overrides, applies. + In other words, you can set, append, and prepend varflags just like + variables. + See the + "Variable Flag Syntax" + section for details. + + + + BitBake has a defined set of varflags available for recipes and + classes. + Tasks support a number of these flags which control various + functionality of the task: + + dirs: + Directories that should be created before the task runs. + + cleandirs: + Empty directories that should created before the task runs. + + noexec: + Marks the tasks as being empty and no execution required. + The noexec flag can be used to set up + tasks as dependency placeholders, or to disable tasks defined + elsewhere that are not needed in a particular recipe. + + nostamp: + Tells BitBake to not generate a stamp file for a task, + which implies the task should always be executed. + + umask: + The umask to run the task under. + + deptask: + Controls task build-time dependencies. + See the + DEPENDS + variable and the + "Build Dependencies" + section for more information. + + rdeptask: + Controls task runtime dependencies. + See the + RDEPENDS + variable, the + RRECOMMENDS + variable, and the + "Runtime Dependencies" + section for more information. + + recrdeptask: + Controls task recursive runtime dependencies. + See the + RDEPENDS + variable, the + RRECOMMENDS + variable, and the + "Recursive Dependencies" + section for more information. + + depends: + Controls inter-task dependencies. + See the + DEPENDS + variable and the + "Inter-Task Dependencies" + section for more information. + + rdepends: + Controls inter-task runtime dependencies. + See the + RDEPENDS + variable, the + RRECOMMENDS + variable, and the + "Inter-Task Dependencies" + section for more information. + + postfuncs: + List of functions to call after the completion of the task. + + prefuncs: + List of functions to call before the task executes. + + stamp-extra-info: + Extra stamp information to append to the task's stamp. + As an example, OpenEmbedded uses this flag to allow + machine-specific tasks. + + + + + + Several varflags are useful for controlling how signatures are + calculated for variables. + For more information on this process, see the + "Checksums (Signatures)" + section. + + vardeps: + Specifies a space-separated list of additional + variables to add to a variable's dependencies + for the purposes of calculating its signature. + Adding variables to this list is useful, for example, when + a function refers to a variable in a manner that + does not allow BitBake to automatically determine + that the variable is referred to. + + vardepvalue: + If set, instructs BitBake to ignore the actual + value of the variable and instead use the specified + value when calculating the variable's signature. + + vardepsexclude: + Specifies a space-separated list of variables + that should be excluded from a variable's dependencies + for the purposes of calculating its signature. + + vardepvalueexclude: + Specifies a pipe-separated list of strings to exclude + from the variable's value when calculating the + variable's signature. + + + +
+ +
+ Events + + + BitBake allows installation of event handlers within + recipe and class files. + Events are triggered at certain points during operation, + such as the beginning of an operation against a given recipe + (*.bb file), the start of a given task, + task failure, task success, and so forth. + The intent is to make it easy to do things like email + notification on build failure. + + + + Following is an example event handler that + prints the name of the event and the content of + the FILE variable: + + addhandler myclass_eventhandler + python myclass_eventhandler() { + from bb.event import getName + from bb import data + print("The name of the Event is %s" % getName(e)) + print("The file we run for is %s" % data.getVar('FILE', e.data, True)) + } + + This event handler gets called every time an event is + triggered. + A global variable "e" is defined and + "e.data" contains an instance of + "bb.data". + With the getName(e) method, one can get + the name of the triggered event. + + + + Because you probably are only interested in a subset of events, + you would likely use the [eventmask] flag + for your event handler to be sure that only certain events + trigger the handler. + Given the previous example, suppose you only wanted the + bb.build.TaskFailed event to trigger that + event handler. + Use the flag as follows: + + addhandler myclass_eventhandler + myclass_eventhandler[eventmask] = "bb.build.TaskFailed" + python myclass_eventhandler() { + from bb.event import getName + from bb import data + print("The name of the Event is %s" % getName(e)) + print("The file we run for is %s" % data.getVar('FILE', e.data, True)) + } + + + + + During a standard build, the following common events might occur: + + + bb.event.ConfigParsed() + + + bb.event.ParseStarted() + + + bb.event.ParseProgress() + + + bb.event.ParseCompleted() + + + bb.event.BuildStarted() + + + bb.build.TaskStarted() + + + bb.build.TaskInvalid() + + + bb.build.TaskFailedSilent() + + + bb.build.TaskFailed() + + + bb.build.TaskSucceeded() + + + bb.event.BuildCompleted() + + + bb.cooker.CookerExit() + + + Here is a list of other events that occur based on specific requests + to the server: + + + bb.event.TreeDataPreparationStarted() + + + bb.event.TreeDataPreparationProgress + + + bb.event.TreeDataPreparationCompleted + + + bb.event.DepTreeGenerated + + + bb.event.CoreBaseFilesFound + + + bb.event.ConfigFilePathFound + + + bb.event.FilesMatchingFound + + + bb.event.ConfigFilesFound + + + bb.event.TargetsTreeGenerated + + + +
+ +
+ Variants - Class Extension Mechanism + + + BitBake supports two features that facilitate creating + from a single recipe file multiple incarnations of that + recipe file where all incarnations are buildable. + These features are enabled through the + BBCLASSEXTEND + and + BBVERSIONS + variables. + + The mechanism for this class extension is extremely + specific to the implementation. + Usually, the recipe's + PROVIDES, + PN, and + DEPENDS + variables would need to be modified by the extension class. + For specific examples, see the OE-Core + native, nativesdk, + and multilib classes. + + + BBCLASSEXTEND: + This variable is a space separated list of classes used to "extend" the + recipe for each variant. + Here is an example that results in a second incarnation of the current + recipe being available. + This second incarnation will have the "native" class inherited. + + BBCLASSEXTEND = "native" + + BBVERSIONS: + This variable allows a single recipe to build multiple versions of a + project from a single recipe file. + You can also specify conditional metadata + (using the + OVERRIDES + mechanism) for a single version, or an optionally named range of versions. + Here is an example: + + BBVERSIONS = "1.0 2.0 git" + SRC_URI_git = "git://someurl/somepath.git" + + BBVERSIONS = "1.0.[0-6]:1.0.0+ \ 1.0.[7-9]:1.0.7+" + SRC_URI_append_1.0.7+ = "file://some_patch_which_the_new_versions_need.patch;patch=1" + + The name of the range defaults to the original version of the + recipe. + For example, in OpenEmbedded, the recipe file + foo_1.0.0+.bb creates a default name range + of 1.0.0+. + This is useful because the range name is not only placed + into overrides, but it is also made available for the metadata to use + in the variable that defines the base recipe versions for use in + file:// search paths + (FILESPATH). + + + +
+ +
+ Dependencies + + + To allow for efficient operation given multiple processes + executing in parallel, BitBake handles dependencies at + the task level. + BitBake supports a robust method to handle these dependencies. + + + + This section describes several types of dependency mechanisms. + + +
+ Dependencies Internal to the <filename>.bb</filename> File + + + BitBake uses the addtask directive + to manage dependencies that are internal to a given recipe + file. + You can use the addtask directive to + indicate when a task is dependent on other tasks or when + other tasks depend on that recipe. + Here is an example: + + addtask printdate after do_fetch before do_build + + In this example, the printdate task is + depends on the completion of the do_fetch + task. + And, the do_build depends on the completion + of the printdate task. + +
+ +
+ Build Dependencies + + + BitBake uses the + DEPENDS + variable to manage build time dependencies. + The "deptask" varflag for tasks signifies the task of each + item listed in DEPENDS that must + complete before that task can be executed. + Here is an example: + + do_configure[deptask] = "do_populate_sysroot" + + In this example, the do_populate_sysroot + task of each item in DEPENDS must complete before + do_configure can execute. + +
+ +
+ Runtime Dependencies + + + BitBake uses the + PACKAGES, + RDEPENDS, and + RRECOMMENDS + variables to manage runtime dependencies. + + + + The PACKAGES variable lists runtime + packages. + Each of those packages can have RDEPENDS and + RRECOMMENDS runtime dependencies. + The "rdeptask" flag for tasks is used to signify the task of each + item runtime dependency which must have completed before that + task can be executed. + + do_package_qa[rdeptask] = "do_packagedata" + + In the previous example, the do_packagedata + task of each item in RDEPENDS must have + completed before do_package_qa can execute. + +
+ +
+ Recursive Dependencies + + + BitBake uses the "recrdeptask" flag to manage + recursive task dependencies. + BitBake looks through the build-time and runtime + dependencies of the current recipe, looks through + the task's inter-task + dependencies, and then adds dependencies for the + listed task. + Once BitBake has accomplished this, it recursively works through + the dependencies of those tasks. + Iterative passes continue until all dependencies are discovered + and added. + + + + You might want to not only have BitBake look for + dependencies of those tasks, but also have BitBake look + for build-time and runtime dependencies of the dependent + tasks as well. + If that is the case, you need to reference the task name + itself in the task list: + + do_a[recrdeptask] = "do_a do_b" + + +
+ +
+ Inter-Task Dependencies + + + BitBake uses the "depends" flag in a more generic form + to manage inter-task dependencies. + This more generic form allows for inter-dependency + checks for specific tasks rather than checks for + the data in DEPENDS. + Here is an example: + + do_patch[depends] = "quilt-native:do_populate_sysroot" + + In this example, the do_populate_sysroot + task of the target quilt-native + must have completed before the + do_patch task can execute. + + + + The "rdepends" flag works in a similar way but takes targets + in the runtime namespace instead of the build-time dependency + namespace. + +
+
+ +
+ Accessing Datastore Variables Using Python + + + It is often necessary to access variables in the + BitBake datastore using Python functions. + The Bitbake datastore has an API that allows you this + access. + Here is a list of available operations: + + + + + + + + + + Operation + Description + + + + + d.getVar("X", expand=False) + Returns the value of variable "X". + Using "expand=True" expands the value. + + + d.setVar("X", "value") + Sets the variable "X" to "value". + + + d.appendVar("X", "value") + Adds "value" to the end of the variable "X". + + + d.prependVar("X", "value") + Adds "value" to the start of the variable "X". + + + d.delVar("X") + Deletes the variable "X" from the datastore. + + + d.renameVar("X", "Y") + Renames the variable "X" to "Y". + + + d.getVarFlag("X", flag, expand=False) + Gets then named flag from the variable "X". + Using "expand=True" expands the named flag. + + + d.setVarFlag("X", flag, "value") + Sets the named flag for variable "X" to "value". + + + d.appendVarFlag("X", flag, "value") + Appends "value" to the named flag on the + variable "X". + + + d.prependVarFlag("X", flag, "value") + Prepends "value" to the named flag on + the variable "X". + + + d.delVarFlag("X", flag) + Deletes the named flag on the variable + "X" from the datastore. + + + d.setVarFlags("X", flagsdict) + Sets the flags specified in + the flagsdict() parameter. + setVarFlags does not clear previous flags. + Think of this operation as addVarFlags. + + + d.getVarFlags("X") + Returns a flagsdict of the flags for + the variable "X". + + + d.delVarFlags("X") + Deletes all the flags for the variable "X". + + + + + +
+ +
+ Task Checksums and Setscene + + + BitBake uses checksums (or signatures) along with the setscene + to determine if a task needs to be run. + This section describes the process. + To help understand how BitBake does this, the section assumes an + OpenEmbedded metadata-based example. + + + + This list is a place holder of content existed from previous work + on the manual. + Some or all of it probably needs integrated into the subsections + that make up this section. + For now, I have just provided a short glossary-like description + for each variable. + Ultimately, this list goes away. + + STAMP: + The base path to create stamp files. + STAMPCLEAN + Again, the base path to create stamp files but can use wildcards + for matching a range of files for clean operations. + + BB_STAMP_WHITELIST + Lists stamp files that are looked at when the stamp policy + is "whitelist". + + BB_STAMP_POLICY + Defines the mode for comparing timestamps of stamp files. + + BB_HASHCHECK_FUNCTION + Specifies the name of the function to call during + the "setscene" part of the task's execution in order + to validate the list of task hashes. + + BB_SETSCENE_VERIFY_FUNCTION + Specifies a function to call that verifies the list of + planned task execution before the main task execution + happens. + + BB_SETSCENE_DEPVALID + Specifies a function BitBake calls that determines + whether BitBake requires a setscene dependency to + be met. + + BB_TASKHASH + Within an executing task, this variable holds the hash + of the task as returned by the currently enabled + signature generator. + + + +
+
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml new file mode 100644 index 0000000..988719d --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml @@ -0,0 +1,2152 @@ + %poky; ] > + + + + +Variables Glossary + + + This chapter lists common variables used by BitBake and gives an overview + of their function and contents. + + + + Following are some points regarding the variables listed in this glossary: + + The variables listed in this glossary + are specific to BitBake. + Consequently, the descriptions are limited to that context. + + Also, variables exist in other systems that use BitBake + (e.g. The Yocto Project and OpenEmbedded) that have names identical + to those found in this glossary. + For such cases, the variables in those systems extend the + functionality of the variable as it is described here in + this glossary. + + Finally, there are variables mentioned in this + glossary that do not appear in the BitBake glossary. + These other variables are variables used in systems that use + BitBake. + + + + + + + + A + B + C + D + E + F + + H + + + + L + M + + O + P + + R + S + T + + + + + + + + + A + + ASSUME_PROVIDED + + + Lists recipe names + (PN + values) BitBake does not attempt to build. + Instead, BitBake assumes these recipes have already been + built. + + + + In OpenEmbedded Core, ASSUME_PROVIDED + mostly specifies native tools that should not be built. + An example is git-native, which + when specified allows for the Git binary from the host to + be used rather than building + git-native. + + + + + + + + B + + B + + + The directory in which BitBake executes functions + during a recipe's build process. + + + + + BB_CONSOLELOG + + + Specifies the path to a log file into which BitBake's user + interface writes output during the build. + + + + + BB_CURRENTTASK + + + Contains the name of the currently running task. + The name does not include the + do_ prefix. + + + + + BB_DANGLINGAPPENDS_WARNONLY + + + Defines how BitBake handles situations where an append + file (.bbappend) has no + corresponding recipe file (.bb). + This condition often occurs when layers get out of sync + (e.g. oe-core bumps a + recipe version and the old recipe no longer exists and the + other layer has not been updated to the new version + of the recipe yet). + + + + The default fatal behavior is safest because it is + the sane reaction given something is out of sync. + It is important to realize when your changes are no longer + being applied. + + + + + BB_DEFAULT_TASK + + + The default task to use when none is specified (e.g. + with the -c command line option). + The task name specified should not include the + do_ prefix. + + + + + BB_DISKMON_DIRS + + + Monitors disk space and available inodes during the build + and allows you to control the build based on these + parameters. + + + + Disk space monitoring is disabled by default. + When setting this variable, use the following form: + + BB_DISKMON_DIRS = "<action>,<dir>,<threshold> [...]" + + where: + + <action> is: + ABORT: Immediately abort the build when + a threshold is broken. + STOPTASKS: Stop the build after the currently + executing tasks have finished when + a threshold is broken. + WARN: Issue a warning but continue the + build when a threshold is broken. + Subsequent warnings are issued as + defined by the + BB_DISKMON_WARNINTERVAL variable, + which must be defined. + + <dir> is: + Any directory you choose. You can specify one or + more directories to monitor by separating the + groupings with a space. If two directories are + on the same device, only the first directory + is monitored. + + <threshold> is: + Either the minimum available disk space, + the minimum number of free inodes, or + both. You must specify at least one. To + omit one or the other, simply omit the value. + Specify the threshold using G, M, K for Gbytes, + Mbytes, and Kbytes, respectively. If you do + not specify G, M, or K, Kbytes is assumed by + default. Do not use GB, MB, or KB. + + + + + Here are some examples: + + BB_DISKMON_DIRS = "ABORT,${TMPDIR},1G,100K WARN,${SSTATE_DIR},1G,100K" + BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},1G" + BB_DISKMON_DIRS = "ABORT,${TMPDIR},,100K" + + The first example works only if you also set + the BB_DISKMON_WARNINTERVAL variable. + This example causes the build system to immediately + abort when either the disk space in ${TMPDIR} drops + below 1 Gbyte or the available free inodes drops below + 100 Kbytes. + Because two directories are provided with the variable, the + build system also issues a + warning when the disk space in the + ${SSTATE_DIR} directory drops + below 1 Gbyte or the number of free inodes drops + below 100 Kbytes. + Subsequent warnings are issued during intervals as + defined by the BB_DISKMON_WARNINTERVAL + variable. + + + + The second example stops the build after all currently + executing tasks complete when the minimum disk space + in the ${TMPDIR} + directory drops below 1 Gbyte. + No disk monitoring occurs for the free inodes in this case. + + + + The final example immediately aborts the build when the + number of free inodes in the ${TMPDIR} directory + drops below 100 Kbytes. + No disk space monitoring for the directory itself occurs + in this case. + + + + + BB_DISKMON_WARNINTERVAL + + + Defines the disk space and free inode warning intervals. + + + + If you are going to use the + BB_DISKMON_WARNINTERVAL variable, you must + also use the + BB_DISKMON_DIRS variable + and define its action as "WARN". + During the build, subsequent warnings are issued each time + disk space or number of free inodes further reduces by + the respective interval. + + + + If you do not provide a BB_DISKMON_WARNINTERVAL + variable and you do use BB_DISKMON_DIRS with + the "WARN" action, the disk monitoring interval defaults to + the following: + + BB_DISKMON_WARNINTERVAL = "50M,5K" + + + + + When specifying the variable in your configuration file, + use the following form: + + BB_DISKMON_WARNINTERVAL = "<disk_space_interval>,<disk_inode_interval>" + + where: + + <disk_space_interval> is: + An interval of memory expressed in either + G, M, or K for Gbytes, Mbytes, or Kbytes, + respectively. You cannot use GB, MB, or KB. + + <disk_inode_interval> is: + An interval of free inodes expressed in either + G, M, or K for Gbytes, Mbytes, or Kbytes, + respectively. You cannot use GB, MB, or KB. + + + + + Here is an example: + + BB_DISKMON_DIRS = "WARN,${SSTATE_DIR},1G,100K" + BB_DISKMON_WARNINTERVAL = "50M,5K" + + These variables cause BitBake to + issue subsequent warnings each time the available + disk space further reduces by 50 Mbytes or the number + of free inodes further reduces by 5 Kbytes in the + ${SSTATE_DIR} directory. + Subsequent warnings based on the interval occur each time + a respective interval is reached beyond the initial warning + (i.e. 1 Gbytes and 100 Kbytes). + + + + + BB_ENV_WHITELIST + + + Specifies the internal whitelist of variables to allow + through from the external environment into BitBake's + datastore. + If the value of this variable is not specified + (which is the default), the following list is used: + BBPATH, + BB_PRESERVE_ENV, + BB_ENV_WHITELIST, + and + BB_ENV_EXTRAWHITE. + + You must set this variable in the external environment + in order for it to work. + + + + + + BB_ENV_EXTRAWHITE + + + Specifies an additional set of variables to allow through + (whitelist) from the external environment into BitBake's + datastore. + This list of variables are on top of the internal list + set in + BB_ENV_WHITELIST. + + You must set this variable in the external + environment in order for it to work. + + + + + + BB_FETCH_PREMIRRORONLY + + + When set to "1", causes BitBake's fetcher module to only + search + PREMIRRORS + for files. + BitBake will not search the main + SRC_URI + or + MIRRORS. + + + + + BB_FILENAME + + + Contains the filename of the recipe that owns the currently + running task. + For example, if the do_fetch task that + resides in the my-recipe.bb is + executing, the BB_FILENAME variable + contains "/foo/path/my-recipe.bb". + + + + + BB_GENERATE_MIRROR_TARBALLS + + + Causes tarballs of the Git repositories, including the + Git metadata, to be placed in the + DL_DIR + directory. + Anyone wishing to create a source mirror would want to + enable this variable. + + + + For performance reasons, creating and placing tarballs of + the Git repositories is not the default action by BitBake. + + BB_GENERATE_MIRROR_TARBALLS = "1" + + + + + + BB_HASHCONFIG_WHITELIST + + + Lists variables that are excluded from base configuration + checksum, which is used to determine if the cache can + be reused. + + + + One of the ways BitBake determines whether to re-parse the + main metadata is through checksums of the variables in the + datastore of the base configuration data. + There are variables that you typically want to exclude when + checking whether or not to re-parse and thus rebuild the + cache. + As an example, you would usually exclude + TIME and DATE + because these variables are always changing. + If you did not exclude them, BitBake would never reuse the + cache. + + + + + BB_HASHBASE_WHITELIST + + + Lists variables that are excluded from checksum and + dependency data. + Variables that are excluded can therefore change without + affecting the checksum mechanism. + A common example would be the variable for the path of + the build. + BitBake's output should not (and usually does not) depend + on the directory in which it was built. + + + + + BB_HASHCHECK_FUNCTION + + + Specifies the name of the function to call during the + "setscene" part of the task's execution in order to + validate the list of task hashes. + The function returns the list of setscene tasks that should + be executed. + + + + At this point in the execution of the code, the objective + is to quickly verify if a given setscene function is likely + to work or not. + It's easier to check the list of setscene functions in + one pass than to call many individual tasks. + The returned list need not be completely accurate. + A given setscene task can still later fail. + However, the more accurate the data returned, the more + efficient the build will be. + + + + + BB_INVALIDCONF + + + Used in combination with the + ConfigParsed event to trigger + re-parsing the base metadata (i.e. all the + recipes). + The ConfigParsed event can set the + variable to trigger the re-parse. + You must be careful to avoid recursive loops with this + functionality. + + + + + BB_LOGFMT + + + Specifies the name of the log files saved into + ${T}. + By default, the BB_LOGFMT variable + is undefined and the log file names get created using the + following form: + + log.{task}.{pid} + + If you want to force log files to take a specific name, + you can set this variable in a configuration file. + + + + + BB_NICE_LEVEL + + + Allows BitBake to run at a specific priority + (i.e. nice level). + System permissions usually mean that BitBake can reduce its + priority but not raise it again. + See + BB_TASK_NICE_LEVEL + for additional information. + + + + + BB_NO_NETWORK + + + Disables network access in the BitBake fetcher modules. + With this access disabled, any command that attempts to + access the network becomes an error. + + + + Disabling network access is useful for testing source + mirrors, running builds when not connected to the Internet, + and when operating in certain kinds of firewall + environments. + + + + + BB_NUMBER_THREADS + + + The maximum number of tasks BitBake should run in parallel + at any one time. + If your host development system supports multiple cores, + a good rule of thumb is to set this variable to twice the + number of cores. + + + + + BB_NUMBER_PARSE_THREADS + + + Sets the number of threads BitBake uses when parsing. + By default, the number of threads is equal to the number + of cores on the system. + + + + + BB_ORIGENV + + + Contains a copy of the original external environment in + which BitBake was run. + The copy is taken before any whitelisted variable values + are filtered into BitBake's datastore. + + The contents of this variable is a datastore object + that can be queried using the normal datastore + operations. + + + + + + BB_PRESERVE_ENV + + + Disables whitelisting and instead allows all variables + through from the external environment into BitBake's + datastore. + + You must set this variable in the external + environment in order for it to work. + + + + + + BB_RUNFMT + + + Specifies the name of the executable script files + (i.e. run files) saved into + ${T}. + By default, the BB_RUNFMT variable + is undefined and the run file names get created using the + following form: + + run.{task}.{pid} + + If you want to force run files to take a specific name, + you can set this variable in a configuration file. + + + + + BB_RUNTASK + + + Contains the name of the currently executing task. + The value does not include the "do_" prefix. + For example, if the currently executing task is + do_config, the value is + "config". + + + + + BB_SCHEDULER + + + Selects the name of the scheduler to use for the + scheduling of BitBake tasks. + Three options exist: + + basic - + The basic framework from which everything derives. + Using this option causes tasks to be ordered + numerically as they are parsed. + + speed - + Executes tasks first that have more tasks + depending on them. + The "speed" option is the default. + + completion - + Causes the scheduler to try to complete a given + recipe once its build has started. + + + + + + + BB_SCHEDULERS + + + Defines custom schedulers to import. + Custom schedulers need to be derived from the + RunQueueScheduler class. + + + + For information how to select a scheduler, see the + BB_SCHEDULER + variable. + + + + + BB_SETSCENE_DEPVALID + + + Specifies a function BitBake calls that determines + whether BitBake requires a setscene dependency to be met. + + + + When running a setscene task, BitBake needs to + know which dependencies of that setscene task also need + to be run. + Whether dependencies also need to be run is highly + dependent on the metadata. + The function specified by this variable returns a + "True" or "False" depending on whether the dependency needs + to be met. + + + + + BB_SETSCENE_VERIFY_FUNCTION + + + Specifies a function to call that verifies the list of + planned task execution before the main task execution + happens. + The function is called once BitBake has a list of setscene + tasks that have run and either succeeded or failed. + + + + The function allows for a task list check to see if they + make sense. + Even if BitBake was planning to skip a task, the + returned value of the function can force BitBake to run + the task, which is necessary under certain metadata + defined circumstances. + + + + + BB_SIGNATURE_EXCLUDE_FLAGS + + + Lists variable flags (varflags) + that can be safely excluded from checksum + and dependency data for keys in the datastore. + When generating checksum or dependency data for keys in the + datastore, the flags set against that key are normally + included in the checksum. + + + + For more information on varflags, see the + "Variable Flags" + section. + + + + + BB_SIGNATURE_HANDLER + + + Defines the name of the signature handler BitBake uses. + The signature handler defines the way stamp files are + created and handled, if and how the signature is + incorporated into the stamps, and how the signature + itself is generated. + + + + A new signature handler can be added by injecting a class + derived from the + SignatureGenerator class into the + global namespace. + + + + + BB_SRCREV_POLICY + + + Defines the behavior of the fetcher when it interacts with + source control systems and dynamic source revisions. + The BB_SRCREV_POLICY variable is + useful when working without a network. + + + + The variable can be set using one of two policies: + + cache - + Retains the value the system obtained previously + rather than querying the source control system + each time. + + clear - + Queries the source controls system every time. + With this policy, there is no cache. + The "clear" policy is the default. + + + + + + + BB_STAMP_POLICY + + + Defines the mode used for how timestamps of stamp files + are compared. + You can set the variable to one of the following modes: + + perfile - + Timestamp comparisons are only made + between timestamps of a specific recipe. + This is the default mode. + + full - + Timestamp comparisons are made for all + dependencies. + + whitelist - + Identical to "full" mode except timestamp + comparisons are made for recipes listed in the + BB_STAMP_WHITELIST + variable. + + + + Stamp policies are largely obsolete with the + introduction of setscene tasks. + + + + + + BB_STAMP_WHITELIST + + + Lists files whose stamp file timestamps are compared when + the stamp policy mode is set to "whitelist". + For information on stamp policies, see the + BB_STAMP_POLICY + variable. + + + + + BB_STRICT_CHECKSUM + + + Sets a more strict checksum mechanism for non-local URLs. + Setting this variable to a value causes BitBake + to report an error if it encounters a non-local URL + that does not have at least one checksum specified. + + + + + BB_TASK_NICE_LEVEL + + + Allows specific tasks to change their priority + (i.e. nice level). + + + + You can use this variable in combination with task + overrides to raise or lower priorities of specific tasks. + For example, on the + Yocto Project + autobuilder, QEMU emulation in images is given a higher + priority as compared to build tasks to ensure that images + do not suffer timeouts on loaded systems. + + + + + BB_TASKHASH + + + Within an executing task, this variable holds the hash + of the task as returned by the currently enabled + signature generator. + + + + + BB_VERBOSE_LOGS + + + Controls how verbose BitBake is during builds. + If set, shell scripts echo commands and shell script output + appears on standard out (stdout). + + + + + BB_WORKERCONTEXT + + + Specifies if the current context is executing a task. + BitBake sets this variable to "1" when a task is + being executed. + The value is not set when the task is in server context + during parsing or event handling. + + + + + + BBCLASSEXTEND + + + Allows you to extend a recipe so that it builds variants + of the software. + Some examples of these variants for recipes from the + OpenEmbedded Core metadata are "natives" such as + quilt-native, which is a copy of + Quilt built to run on the build system; "crosses" such + as gcc-cross, which is a compiler + built to run on the build machine but produces binaries + that run on the target MACHINE; + "nativesdk", which targets the SDK machine instead of + MACHINE; and "mulitlibs" in the form + "multilib:<multilib_name>". + + + + To build a different variant of the recipe with a minimal + amount of code, it usually is as simple as adding the + variable to your recipe. + Here are two examples. + The "native" variants are from the OpenEmbedded Core + metadata: + + BBCLASSEXTEND =+ "native nativesdk" + BBCLASSEXTEND =+ "multilib:<multilib_name>" + + + + + + BBDEBUG + + + Sets the BitBake debug output level to a specific value + as incremented by the -d command line + option. + + You must set this variable in the external environment + in order for it to work. + + + + + + BBFILE_COLLECTIONS + + Lists the names of configured layers. + These names are used to find the other BBFILE_* + variables. + Typically, each layer appends its name to this variable in its + conf/layer.conf file. + + + + + BBFILE_PATTERN + + Variable that expands to match files from + BBFILES + in a particular layer. + This variable is used in the conf/layer.conf file and must + be suffixed with the name of the specific layer (e.g. + BBFILE_PATTERN_emenlow). + + + + BBFILE_PRIORITY + + Assigns the priority for recipe files in each layer. + This variable is useful in situations where the same recipe appears in + more than one layer. + Setting this variable allows you to prioritize a + layer against other layers that contain the same recipe - effectively + letting you control the precedence for the multiple layers. + The precedence established through this variable stands regardless of a + recipe's version + (PV variable). + For example, a layer that has a recipe with a higher PV value but for + which the BBFILE_PRIORITY is set to have a lower precedence still has a + lower precedence. + A larger value for the BBFILE_PRIORITY variable results in a higher + precedence. + For example, the value 6 has a higher precedence than the value 5. + If not specified, the BBFILE_PRIORITY variable is set based on layer + dependencies (see the + LAYERDEPENDS variable for + more information. + The default priority, if unspecified + for a layer with no dependencies, is the lowest defined priority + 1 + (or 1 if no priorities are defined). + + You can use the command bitbake-layers show-layers to list + all configured layers along with their priorities. + + + + + BBFILES + + List of recipe files BitBake uses to build software. + + + + BBINCLUDED + + + Contains a space-separated list of all of all files that + BitBake's parser included during parsing of the current + file. + + + + + BBINCLUDELOGS + + + If set to a value, enables printing the task log when + reporting a failed task. + + + + + BBINCLUDELOGS_LINES + + + If + BBINCLUDELOGS + is set, specifies the maximum number of lines from the + task log file to print when reporting a failed task. + If you do not set BBINCLUDELOGS_LINES, + the entire log is printed. + + + + + BBLAYERS + + Lists the layers to enable during the build. + This variable is defined in the bblayers.conf configuration + file in the build directory. + Here is an example: + + BBLAYERS = " \ + /home/scottrif/poky/meta \ + /home/scottrif/poky/meta-yocto \ + /home/scottrif/poky/meta-yocto-bsp \ + /home/scottrif/poky/meta-mykernel \ + " + + + This example enables four layers, one of which is a custom, user-defined layer + named meta-mykernel. + + + + + BBMASK + + + Prevents BitBake from processing recipes and recipe + append files. + + + + You can use the BBMASK variable + to "hide" these .bb and + .bbappend files. + BitBake ignores any recipe or recipe append files that + match the expression. + It is as if BitBake does not see them at all. + Consequently, matching files are not parsed or otherwise + used by BitBake. + + The value you provide is passed to Python's regular + expression compiler. + The expression is compared against the full paths to + the files. + For complete syntax information, see Python's + documentation at + . + + + + The following example uses a complete regular expression + to tell BitBake to ignore all recipe and recipe append + files in the meta-ti/recipes-misc/ + directory: + + BBMASK = "meta-ti/recipes-misc/" + + If you want to mask out multiple directories or recipes, + use the vertical bar to separate the regular expression + fragments. + This next example masks out multiple directories and + individual recipes: + + BBMASK = "meta-ti/recipes-misc/|meta-ti/recipes-ti/packagegroup/" + BBMASK .= "|.*meta-oe/recipes-support/" + BBMASK .= "|.*openldap" + BBMASK .= "|.*opencv" + BBMASK .= "|.*lzma" + + Notice how the vertical bar is used to append the fragments. + + When specifying a directory name, use the trailing + slash character to ensure you match just that directory + name. + + + + + + BBPATH + + + Used by BitBake to locate class + (.bbclass) and configuration + (.conf) files. + This variable is analogous to the + PATH variable. + + + + If you run BitBake from a directory outside of the + build directory, + you must be sure to set + BBPATH to point to the + build directory. + Set the variable as you would any environment variable + and then run BitBake: + + $ BBPATH="<build_directory>" + $ export BBPATH + $ bitbake <target> + + + + + + BBSERVER + + + Points to the server that runs memory-resident BitBake. + The variable is only used when you employ memory-resident + BitBake. + + + + + BBVERSIONS + + + Allows a single recipe to build multiple versions of a + project from a single recipe file. + You also able to specify conditional metadata + using the + OVERRIDES + mechanism for a single version or for an optionally named + range of versions. + + + + For more information on BBVERSIONS, + see the + "Variants - Class Extension Mechanism" + section. + + + + + BITBAKE_UI + + + Used to specify the UI module to use when running BitBake. + Using this variable is equivalent to using the + -u command-line option. + + You must set this variable in the external environment + in order for it to work. + + + + + + BUILDNAME + + + A name assigned to the build. + The name defaults to a datetime stamp of when the build was + started but can be defined by the metadata. + + + + + + + C + + CACHE + + + Specifies the directory BitBake uses to store a cache + of the metadata so it does not need to be parsed every + time BitBake is started. + + + + + + + D + + DEFAULT_PREFERENCE + + + Specifies a weak bias for recipe selection priority. + + + The most common usage of this is variable is to set + it to "-1" within a recipe for a development version of a + piece of software. + Using the variable in this way causes the stable version + of the recipe to build by default in the absence of + PREFERRED_VERSION + being used to build the development version. + + + The bias provided by DEFAULT_PREFERENCE + is weak and is overridden by + BBFILE_PRIORITY + if that variable is different between two layers + that contain different versions of the same recipe. + + + + + DEPENDS + + + Lists a recipe's build-time dependencies + (i.e. other recipe files). + + + + Consider this simple example for two recipes named "a" and + "b" that produce similarly named packages. + In this example, the DEPENDS + statement appears in the "a" recipe: + + DEPENDS = "b" + + Here, the dependency is such that the + do_configure task for recipe "a" + depends on the do_populate_sysroot + task of recipe "b". + This means anything that recipe "b" puts into sysroot + is available when recipe "a" is configuring itself. + + + + For information on runtime dependencies, see the + RDEPENDS + variable. + + + + + DESCRIPTION + + + A long description for the recipe. + + + + + DL_DIR + + + The central download directory used by the build process to + store downloads. + By default, DL_DIR gets files + suitable for mirroring for everything except Git + repositories. + If you want tarballs of Git repositories, use the + BB_GENERATE_MIRROR_TARBALLS + variable. + + + + + + + E + + EXCLUDE_FROM_WORLD + + + Directs BitBake to exclude a recipe from world builds (i.e. + bitbake world). + During world builds, BitBake locates, parses and builds all + recipes found in every layer exposed in the + bblayers.conf configuration file. + + + + To exclude a recipe from a world build using this variable, + set the variable to "1" in the recipe. + + + + Recipes added to EXCLUDE_FROM_WORLD + may still be built during a world build in order to satisfy + dependencies of other recipes. + Adding a recipe to EXCLUDE_FROM_WORLD + only ensures that the recipe is not explicitly added + to the list of build targets in a world build. + + + + + + + F + + FAKEROOT + + + Contains the command to use when running a shell script + in a fakeroot environment. + The FAKEROOT variable is obsolete + and has been replaced by the other + FAKEROOT* variables. + See these entries in the glossary for more information. + + + + + FAKEROOTBASEENV + + + Lists environment variables to set when executing + the command defined by + FAKEROOTCMD + that starts the bitbake-worker process + in the fakeroot environment. + + + + + FAKEROOTCMD + + + Contains the command that starts the bitbake-worker + process in the fakeroot environment. + + + + + FAKEROOTDIRS + + + Lists directories to create before running a task in + the fakeroot environment. + + + + + FAKEROOTENV + + + Lists environment variables to set when running a task + in the fakeroot environment. + For additional information on environment variables and + the fakeroot environment, see the + FAKEROOTBASEENV + variable. + + + + + FAKEROOTNOENV + + + Lists environment variables to set when running a task + that is not in the fakeroot environment. + For additional information on environment variables and + the fakeroot environment, see the + FAKEROOTENV + variable. + + + + + FETCHCMD + + + Defines the command the BitBake fetcher module + executes when running fetch operations. + You need to use an override suffix when you use the + variable (e.g. FETCHCMD_git + or FETCHCMD_svn). + + + + + FILE + + + Points at the current file. + BitBake sets this variable during the parsing process + to identify the file being parsed. + BitBake also sets this variable when a recipe is being + executed to identify the recipe file. + + + + + FILESDIR + + + Specifies directories BitBake uses when searching for + patches and files. + The "local" fetcher module uses these directories when + handling file:// URLs if the file + was not found using + FILESPATH. + + The FILESDIR variable is + deprecated and you should use + FILESPATH in all new code. + + + + + + FILESPATH + + + Specifies directories BitBake uses when searching for + patches and files. + The "local" fetcher module uses these directories when + handling file:// URLs. + The variable behaves like a shell PATH + environment variable. + The value is a colon-separated list of directories that + are searched left-to-right in order. + + + + + + + + + H + + HOMEPAGE + + Website where more information about the software the recipe is building + can be found. + + + + + + I + + INHERIT + + + Causes the named class to be inherited at + this point during parsing. + The variable is only valid in configuration files. + + + + + + + + + L + + LAYERDEPENDS + + Lists the layers, separated by spaces, upon which this recipe depends. + Optionally, you can specify a specific layer version for a dependency + by adding it to the end of the layer name with a colon, (e.g. "anotherlayer:3" + to be compared against + LAYERVERSION_anotherlayer + in this case). + BitBake produces an error if any dependency is missing or + the version numbers do not match exactly (if specified). + + You use this variable in the conf/layer.conf file. + You must also use the specific layer name as a suffix + to the variable (e.g. LAYERDEPENDS_mylayer). + + + + LAYERDIR + + When used inside the layer.conf configuration + file, this variable provides the path of the current layer. + This variable is not available outside of layer.conf + and references are expanded immediately when parsing of the file completes. + + + + LAYERVERSION + + Optionally specifies the version of a layer as a single number. + You can use this variable within + LAYERDEPENDS + for another layer in order to depend on a specific version + of the layer. + + You use this variable in the conf/layer.conf file. + You must also use the specific layer name as a suffix + to the variable (e.g. LAYERDEPENDS_mylayer). + + + + LICENSE + + + The list of source licenses for the recipe. + + + + + + + M + + MIRRORS + + + Specifies additional paths from which BitBake gets source code. + When the build system searches for source code, it first + tries the local download directory. + If that location fails, the build system tries locations + defined by + PREMIRRORS, + the upstream source, and then locations specified by + MIRRORS in that order. + + + + + MULTI_PROVIDER_WHITELIST + + + Allows you to suppress BitBake warnings caused when + building two separate recipes that provide the same + output. + + + + Bitbake normally issues a warning when building two + different recipes where each provides the same output. + This scenario is usually something the user does not + want. + However, cases do exist where it makes sense, particularly + in the virtual/* namespace. + You can use this variable to suppress BitBake's warnings. + + + + To use the variable, list provider names (e.g. + recipe names, virtual/kernel, + and so forth). + + + + + + + + + O + + OVERRIDES + + + BitBake uses OVERRIDES to control + what variables are overridden after BitBake parses + recipes and configuration files. + + + + Following is a simple example that uses an overrides + list based on machine architectures: + + OVERRIDES = "arm:x86:mips:powerpc" + + You can find information on how to use + OVERRIDES in the + "Conditional Syntax (Overrides)" + section. + + + + + + P + + PACKAGES + + The list of packages the recipe creates. + + + + + PACKAGES_DYNAMIC + + + A promise that your recipe satisfies runtime dependencies + for optional modules that are found in other recipes. + PACKAGES_DYNAMIC + does not actually satisfy the dependencies, it only states that + they should be satisfied. + For example, if a hard, runtime dependency + (RDEPENDS) + of another package is satisfied during the build + through the PACKAGES_DYNAMIC + variable, but a package with the module name is never actually + produced, then the other package will be broken. + + + + + PE + + + The epoch of the recipe. + By default, this variable is unset. + The variable is used to make upgrades possible when the + versioning scheme changes in some backwards incompatible + way. + + + + + PERSISTENT_DIR + + + Specifies the directory BitBake uses to store data that + should be preserved between builds. + In particular, the data stored is the data that uses + BitBake's persistent data API and the data used by the + PR Server and PR Service. + + + + + PF + + + Specifies the recipe or package name and includes all version and revision + numbers (i.e. eglibc-2.13-r20+svnr15508/ and + bash-4.2-r1/). + + + + + PN + + The recipe name. + + + + PR + + The revision of the recipe. + + + + + PREFERRED_PROVIDER + + + Determines which recipe should be given preference when + multiple recipes provide the same item. + You should always suffix the variable with the name of the + provided item, and you should set it to the + PN + of the recipe to which you want to give precedence. + Some examples: + + PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto" + PREFERRED_PROVIDER_virtual/xserver = "xserver-xf86" + PREFERRED_PROVIDER_virtual/libgl ?= "mesa" + + + + + + PREFERRED_PROVIDERS + + + Determines which recipe should be given preference for + cases where multiple recipes provide the same item. + Functionally, + PREFERRED_PROVIDERS is identical to + PREFERRED_PROVIDER. + However, the PREFERRED_PROVIDERS + variable lets you define preferences for multiple + situations using the following form: + + PREFERRED_PROVIDERS = "xxx:yyy aaa:bbb ..." + + This form is a convenient replacement for the following: + + PREFERRED_PROVIDER_xxx = "yyy" + PREFERRED_PROVIDER_aaa = "bbb" + + + + + + PREFERRED_VERSION + + + If there are multiple versions of recipes available, this + variable determines which recipe should be given preference. + You must always suffix the variable with the + PN + you want to select, and you should set + PV + accordingly for precedence. + You can use the "%" character as a + wildcard to match any number of characters, which can be + useful when specifying versions that contain long revision + numbers that could potentially change. + Here are two examples: + + PREFERRED_VERSION_python = "2.7.3" + PREFERRED_VERSION_linux-yocto = "3.10%" + + + + + + PREMIRRORS + + + Specifies additional paths from which BitBake gets source code. + When the build system searches for source code, it first + tries the local download directory. + If that location fails, the build system tries locations + defined by PREMIRRORS, the upstream + source, and then locations specified by + MIRRORS + in that order. + + + + Typically, you would add a specific server for the + build system to attempt before any others by adding + something like the following to your configuration: + + PREMIRRORS_prepend = "\ + git://.*/.* http://www.yoctoproject.org/sources/ \n \ + ftp://.*/.* http://www.yoctoproject.org/sources/ \n \ + http://.*/.* http://www.yoctoproject.org/sources/ \n \ + https://.*/.* http://www.yoctoproject.org/sources/ \n" + + These changes cause the build system to intercept + Git, FTP, HTTP, and HTTPS requests and direct them to + the http:// sources mirror. + You can use file:// URLs to point + to local directories or network shares as well. + + + + + PROVIDES + + + A list of aliases by which a particular recipe can be + known. + By default, a recipe's own + PN + is implicitly already in its PROVIDES + list. + If a recipe uses PROVIDES, the + additional aliases are synonyms for the recipe and can + be useful satisfying dependencies of other recipes during + the build as specified by + DEPENDS. + + + + Consider the following example + PROVIDES statement from a recipe + file libav_0.8.11.bb: + + PROVIDES += "libpostproc" + + The PROVIDES statement results in + the "libav" recipe also being known as "libpostproc". + + + + + PRSERV_HOST + + + The network based + PR + service host and port. + + + + Following is an example of how the PRSERV_HOST variable is + set: + + PRSERV_HOST = "localhost:0" + + You must set the variable if you want to automatically + start a local PR service. + You can set PRSERV_HOST to other + values to use a remote PR service. + + + + + PV + + The version of the recipe. + + + + + + + + + R + + RDEPENDS + + + Lists a package's runtime dependencies (i.e. other packages) + that must be installed in order for the built package to run + correctly. + If a package in this list cannot be found during the build, + you will get a build error. + + + + Because the RDEPENDS variable applies + to packages being built, you should always use the variable + in a form with an attached package name. + For example, suppose you are building a development package + that depends on the perl package. + In this case, you would use the following + RDEPENDS statement: + + RDEPENDS_${PN}-dev += "perl" + + In the example, the development package depends on + the perl package. + Thus, the RDEPENDS variable has the + ${PN}-dev package name as part of the + variable. + + + + BitBake supports specifying versioned dependencies. + Although the syntax varies depending on the packaging + format, BitBake hides these differences from you. + Here is the general syntax to specify versions with + the RDEPENDS variable: + + RDEPENDS_${PN} = "<package> (<operator> <version>)" + + For operator, you can specify the + following: + + = + < + > + <= + >= + + For example, the following sets up a dependency on version + 1.2 or greater of the package foo: + + RDEPENDS_${PN} = "foo (>= 1.2)" + + + + + For information on build-time dependencies, see the + DEPENDS + variable. + + + + + RPROVIDES + + + A list of package name aliases that a package also provides. + These aliases are useful for satisfying runtime dependencies + of other packages both during the build and on the target + (as specified by + RDEPENDS). + + + As with all package-controlling variables, you must always + use the variable in conjunction with a package name override. + Here is an example: + + RPROVIDES_${PN} = "widget-abi-2" + + + + + + RRECOMMENDS + + + A list of packages that extends the usability of a package + being built. + The package being built does not depend on this list of + packages in order to successfully build, but needs them for + the extended usability. + To specify runtime dependencies for packages, see the + RDEPENDS + variable. + + + + BitBake supports specifying versioned recommends. + Although the syntax varies depending on the packaging + format, BitBake hides these differences from you. + Here is the general syntax to specify versions with + the RRECOMMENDS variable: + + RRECOMMENDS_${PN} = "<package> (<operator> <version>)" + + For operator, you can specify the + following: + + = + < + > + <= + >= + + For example, the following sets up a recommend on version + 1.2 or greater of the package foo: + + RRECOMMENDS_${PN} = "foo (>= 1.2)" + + + + + + + + S + + SECTION + + The section in which packages should be categorized. + + + + SRC_URI + + + The list of source files - local or remote. + This variable tells BitBake which bits + to pull for the build and how to pull them. + For example, if the recipe or append file needs to + fetch a single tarball from the Internet, the recipe or + append file uses a SRC_URI + entry that specifies that tarball. + On the other hand, if the recipe or append file needs to + fetch a tarball and include a custom file, the recipe or + append file needs an SRC_URI variable + that specifies all those sources. + The following list explains the available URI protocols: + + file:// - + Fetches files, which are usually files shipped with + the metadata, + from the local machine. + The path is relative to the + FILESPATH + variable. + bzr:// - Fetches files from a + Bazaar revision control repository. + git:// - Fetches files from a + Git revision control repository. + osc:// - Fetches files from + an OSC (OpenSUSE Build service) revision control repository. + repo:// - Fetches files from + a repo (Git) repository. + http:// - Fetches files from + the Internet using HTTP. + https:// - Fetches files + from the Internet using HTTPS. + ftp:// - Fetches files + from the Internet using FTP. + cvs:// - Fetches files from + a CVS revision control repository. + hg:// - Fetches files from + a Mercurial (hg) revision control repository. + p4:// - Fetches files from + a Perforce (p4) revision control repository. + ssh:// - Fetches files from + a secure shell. + svn:// - Fetches files from + a Subversion (svn) revision control repository. + + + Here are some additional options worth mentioning: + + unpack - Controls + whether or not to unpack the file if it is an archive. + The default action is to unpack the file. + subdir - Places the file + (or extracts its contents) into the specified + subdirectory. + This option is useful for unusual tarballs or other archives that + do not have their files already in a subdirectory within the archive. + + name - Specifies a + name to be used for association with SRC_URI checksums + when you have more than one file specified in SRC_URI. + + downloadfilename - Specifies + the filename used when storing the downloaded file. + + + + + + SRCDATE + + + The date of the source code used to build the package. + This variable applies only if the source was fetched from a Source Code Manager (SCM). + + + + + SRCREV + + + The revision of the source code used to build the package. + This variable applies only when using Subversion, Git, Mercurial and Bazaar. + If you want to build a fixed revision and you want + to avoid performing a query on the remote repository every time + BitBake parses your recipe, you should specify a SRCREV that is a + full revision identifier and not just a tag. + + + + + SRCREV_FORMAT + + + Helps construct valid + SRCREV + values when multiple source controlled URLs are used in + SRC_URI. + + + + The system needs help constructing these values under these + circumstances. + Each component in the SRC_URI + is assigned a name and these are referenced + in the SRCREV_FORMAT variable. + Consider an example with URLs named "machine" and "meta". + In this case, SRCREV_FORMAT could look + like "machine_meta" and those names would have the SCM + versions substituted into each position. + Only one AUTOINC placeholder is added + and if needed. + And, this placeholder is placed at the start of the + returned string. + + + + + STAMP + + + Specifies the base path used to create recipe stamp files. + The path to an actual stamp file is constructed by evaluating this + string and then appending additional information. + + + + + STAMPCLEAN + + + Specifies the base path used to create recipe stamp files. + Unlike the + STAMP + variable, STAMPCLEAN can contain + wildcards to match the range of files a clean operation + should remove. + BitBake uses a clean operation to remove any other stamps + it should be removing when creating a new stamp. + + + + + SUMMARY + + + A short summary for the recipe, which is 72 characters or less. + + + + + + + T + + T + + Points to a directory were BitBake places + temporary files, which consist mostly of task logs and + scripts, when building a particular recipe. + + + + + TOPDIR + + + Points to the build directory. + BitBake automatically sets this variable. + + + + + + + + + + + + diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-style.css b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-style.css new file mode 100644 index 0000000..65da2a4 --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-style.css @@ -0,0 +1,984 @@ +/* + Generic XHTML / DocBook XHTML CSS Stylesheet. + + Browser wrangling and typographic design by + Oyvind Kolas / pippin@gimp.org + + Customised for Poky by + Matthew Allum / mallum@o-hand.com + + Thanks to: + Liam R. E. Quin + William Skaggs + Jakub Steiner + + Structure + --------- + + The stylesheet is divided into the following sections: + + Positioning + Margins, paddings, width, font-size, clearing. + Decorations + Borders, style + Colors + Colors + Graphics + Graphical backgrounds + Nasty IE tweaks + Workarounds needed to make it work in internet explorer, + currently makes the stylesheet non validating, but up until + this point it is validating. + Mozilla extensions + Transparency for footer + Rounded corners on boxes + +*/ + + + /*************** / + / Positioning / +/ ***************/ + +body { + font-family: Verdana, Sans, sans-serif; + + min-width: 640px; + width: 80%; + margin: 0em auto; + padding: 2em 5em 5em 5em; + color: #333; +} + +h1,h2,h3,h4,h5,h6,h7 { + font-family: Arial, Sans; + color: #00557D; + clear: both; +} + +h1 { + font-size: 2em; + text-align: left; + padding: 0em 0em 0em 0em; + margin: 2em 0em 0em 0em; +} + +h2.subtitle { + margin: 0.10em 0em 3.0em 0em; + padding: 0em 0em 0em 0em; + font-size: 1.8em; + padding-left: 20%; + font-weight: normal; + font-style: italic; +} + +h2 { + margin: 2em 0em 0.66em 0em; + padding: 0.5em 0em 0em 0em; + font-size: 1.5em; + font-weight: bold; +} + +h3.subtitle { + margin: 0em 0em 1em 0em; + padding: 0em 0em 0em 0em; + font-size: 142.14%; + text-align: right; +} + +h3 { + margin: 1em 0em 0.5em 0em; + padding: 1em 0em 0em 0em; + font-size: 140%; + font-weight: bold; +} + +h4 { + margin: 1em 0em 0.5em 0em; + padding: 1em 0em 0em 0em; + font-size: 120%; + font-weight: bold; +} + +h5 { + margin: 1em 0em 0.5em 0em; + padding: 1em 0em 0em 0em; + font-size: 110%; + font-weight: bold; +} + +h6 { + margin: 1em 0em 0em 0em; + padding: 1em 0em 0em 0em; + font-size: 110%; + font-weight: bold; +} + +.authorgroup { + background-color: transparent; + background-repeat: no-repeat; + padding-top: 256px; + background-image: url("figures/bitbake-title.png"); + background-position: left top; + margin-top: -256px; + padding-right: 50px; + margin-left: 0px; + text-align: right; + width: 740px; +} + +h3.author { + margin: 0em 0me 0em 0em; + padding: 0em 0em 0em 0em; + font-weight: normal; + font-size: 100%; + color: #333; + clear: both; +} + +.author tt.email { + font-size: 66%; +} + +.titlepage hr { + width: 0em; + clear: both; +} + +.revhistory { + padding-top: 2em; + clear: both; +} + +.toc, +.list-of-tables, +.list-of-examples, +.list-of-figures { + padding: 1.33em 0em 2.5em 0em; + color: #00557D; +} + +.toc p, +.list-of-tables p, +.list-of-figures p, +.list-of-examples p { + padding: 0em 0em 0em 0em; + padding: 0em 0em 0.3em; + margin: 1.5em 0em 0em 0em; +} + +.toc p b, +.list-of-tables p b, +.list-of-figures p b, +.list-of-examples p b{ + font-size: 100.0%; + font-weight: bold; +} + +.toc dl, +.list-of-tables dl, +.list-of-figures dl, +.list-of-examples dl { + margin: 0em 0em 0.5em 0em; + padding: 0em 0em 0em 0em; +} + +.toc dt { + margin: 0em 0em 0em 0em; + padding: 0em 0em 0em 0em; +} + +.toc dd { + margin: 0em 0em 0em 2.6em; + padding: 0em 0em 0em 0em; +} + +div.glossary dl, +div.variablelist dl { +} + +.glossary dl dt, +.variablelist dl dt, +.variablelist dl dt span.term { + font-weight: normal; + width: 20em; + text-align: right; +} + +.variablelist dl dt { + margin-top: 0.5em; +} + +.glossary dl dd, +.variablelist dl dd { + margin-top: -1em; + margin-left: 25.5em; +} + +.glossary dd p, +.variablelist dd p { + margin-top: 0em; + margin-bottom: 1em; +} + + +div.calloutlist table td { + padding: 0em 0em 0em 0em; + margin: 0em 0em 0em 0em; +} + +div.calloutlist table td p { + margin-top: 0em; + margin-bottom: 1em; +} + +div p.copyright { + text-align: left; +} + +div.legalnotice p.legalnotice-title { + margin-bottom: 0em; +} + +p { + line-height: 1.5em; + margin-top: 0em; + +} + +dl { + padding-top: 0em; +} + +hr { + border: solid 1px; +} + + +.mediaobject, +.mediaobjectco { + text-align: center; +} + +img { + border: none; +} + +ul { + padding: 0em 0em 0em 1.5em; +} + +ul li { + padding: 0em 0em 0em 0em; +} + +ul li p { + text-align: left; +} + +table { + width :100%; +} + +th { + padding: 0.25em; + text-align: left; + font-weight: normal; + vertical-align: top; +} + +td { + padding: 0.25em; + vertical-align: top; +} + +p a[id] { + margin: 0px; + padding: 0px; + display: inline; + background-image: none; +} + +a { + text-decoration: underline; + color: #444; +} + +pre { + overflow: auto; +} + +a:hover { + text-decoration: underline; + /*font-weight: bold;*/ +} + +/* This style defines how the permalink character + appears by itself and when hovered over with + the mouse. */ + +[alt='Permalink'] { color: #eee; } +[alt='Permalink']:hover { color: black; } + + +div.informalfigure, +div.informalexample, +div.informaltable, +div.figure, +div.table, +div.example { + margin: 1em 0em; + padding: 1em; + page-break-inside: avoid; +} + + +div.informalfigure p.title b, +div.informalexample p.title b, +div.informaltable p.title b, +div.figure p.title b, +div.example p.title b, +div.table p.title b{ + padding-top: 0em; + margin-top: 0em; + font-size: 100%; + font-weight: normal; +} + +.mediaobject .caption, +.mediaobject .caption p { + text-align: center; + font-size: 80%; + padding-top: 0.5em; + padding-bottom: 0.5em; +} + +.epigraph { + padding-left: 55%; + margin-bottom: 1em; +} + +.epigraph p { + text-align: left; +} + +.epigraph .quote { + font-style: italic; +} +.epigraph .attribution { + font-style: normal; + text-align: right; +} + +span.application { + font-style: italic; +} + +.programlisting { + font-family: monospace; + font-size: 80%; + white-space: pre; + margin: 1.33em 0em; + padding: 1.33em; +} + +.tip, +.warning, +.caution, +.note { + margin-top: 1em; + margin-bottom: 1em; + +} + +/* force full width of table within div */ +.tip table, +.warning table, +.caution table, +.note table { + border: none; + width: 100%; +} + + +.tip table th, +.warning table th, +.caution table th, +.note table th { + padding: 0.8em 0.0em 0.0em 0.0em; + margin : 0em 0em 0em 0em; +} + +.tip p, +.warning p, +.caution p, +.note p { + margin-top: 0.5em; + margin-bottom: 0.5em; + padding-right: 1em; + text-align: left; +} + +.acronym { + text-transform: uppercase; +} + +b.keycap, +.keycap { + padding: 0.09em 0.3em; + margin: 0em; +} + +.itemizedlist li { + clear: none; +} + +.filename { + font-size: medium; + font-family: Courier, monospace; +} + + +div.navheader, div.heading{ + position: absolute; + left: 0em; + top: 0em; + width: 100%; + background-color: #cdf; + width: 100%; +} + +div.navfooter, div.footing{ + position: fixed; + left: 0em; + bottom: 0em; + background-color: #eee; + width: 100%; +} + + +div.navheader td, +div.navfooter td { + font-size: 66%; +} + +div.navheader table th { + /*font-family: Georgia, Times, serif;*/ + /*font-size: x-large;*/ + font-size: 80%; +} + +div.navheader table { + border-left: 0em; + border-right: 0em; + border-top: 0em; + width: 100%; +} + +div.navfooter table { + border-left: 0em; + border-right: 0em; + border-bottom: 0em; + width: 100%; +} + +div.navheader table td a, +div.navfooter table td a { + color: #777; + text-decoration: none; +} + +/* normal text in the footer */ +div.navfooter table td { + color: black; +} + +div.navheader table td a:visited, +div.navfooter table td a:visited { + color: #444; +} + + +/* links in header and footer */ +div.navheader table td a:hover, +div.navfooter table td a:hover { + text-decoration: underline; + background-color: transparent; + color: #33a; +} + +div.navheader hr, +div.navfooter hr { + display: none; +} + + +.qandaset tr.question td p { + margin: 0em 0em 1em 0em; + padding: 0em 0em 0em 0em; +} + +.qandaset tr.answer td p { + margin: 0em 0em 1em 0em; + padding: 0em 0em 0em 0em; +} +.answer td { + padding-bottom: 1.5em; +} + +.emphasis { + font-weight: bold; +} + + + /************* / + / decorations / +/ *************/ + +.titlepage { +} + +.part .title { +} + +.subtitle { + border: none; +} + +/* +h1 { + border: none; +} + +h2 { + border-top: solid 0.2em; + border-bottom: solid 0.06em; +} + +h3 { + border-top: 0em; + border-bottom: solid 0.06em; +} + +h4 { + border: 0em; + border-bottom: solid 0.06em; +} + +h5 { + border: 0em; +} +*/ + +.programlisting { + border: solid 1px; +} + +div.figure, +div.table, +div.informalfigure, +div.informaltable, +div.informalexample, +div.example { + border: 1px solid; +} + + + +.tip, +.warning, +.caution, +.note { + border: 1px solid; +} + +.tip table th, +.warning table th, +.caution table th, +.note table th { + border-bottom: 1px solid; +} + +.question td { + border-top: 1px solid black; +} + +.answer { +} + + +b.keycap, +.keycap { + border: 1px solid; +} + + +div.navheader, div.heading{ + border-bottom: 1px solid; +} + + +div.navfooter, div.footing{ + border-top: 1px solid; +} + + /********* / + / colors / +/ *********/ + +body { + color: #333; + background: white; +} + +a { + background: transparent; +} + +a:hover { + background-color: #dedede; +} + + +h1, +h2, +h3, +h4, +h5, +h6, +h7, +h8 { + background-color: transparent; +} + +hr { + border-color: #aaa; +} + + +.tip, .warning, .caution, .note { + border-color: #fff; +} + + +.tip table th, +.warning table th, +.caution table th, +.note table th { + border-bottom-color: #fff; +} + + +.warning { + background-color: #f0f0f2; +} + +.caution { + background-color: #f0f0f2; +} + +.tip { + background-color: #f0f0f2; +} + +.note { + background-color: #f0f0f2; +} + +.glossary dl dt, +.variablelist dl dt, +.variablelist dl dt span.term { + color: #044; +} + +div.figure, +div.table, +div.example, +div.informalfigure, +div.informaltable, +div.informalexample { + border-color: #aaa; +} + +pre.programlisting { + color: black; + background-color: #fff; + border-color: #aaa; + border-width: 2px; +} + +.guimenu, +.guilabel, +.guimenuitem { + background-color: #eee; +} + + +b.keycap, +.keycap { + background-color: #eee; + border-color: #999; +} + + +div.navheader { + border-color: black; +} + + +div.navfooter { + border-color: black; +} + + + /*********** / + / graphics / +/ ***********/ + +/* +body { + background-image: url("images/body_bg.jpg"); + background-attachment: fixed; +} + +.navheader, +.note, +.tip { + background-image: url("images/note_bg.jpg"); + background-attachment: fixed; +} + +.warning, +.caution { + background-image: url("images/warning_bg.jpg"); + background-attachment: fixed; +} + +.figure, +.informalfigure, +.example, +.informalexample, +.table, +.informaltable { + background-image: url("images/figure_bg.jpg"); + background-attachment: fixed; +} + +*/ +h1, +h2, +h3, +h4, +h5, +h6, +h7{ +} + +/* +Example of how to stick an image as part of the title. + +div.article .titlepage .title +{ + background-image: url("figures/white-on-black.png"); + background-position: center; + background-repeat: repeat-x; +} +*/ + +div.preface .titlepage .title, +div.colophon .title, +div.chapter .titlepage .title, +div.article .titlepage .title +{ +} + +div.section div.section .titlepage .title, +div.sect2 .titlepage .title { + background: none; +} + + +h1.title { + background-color: transparent; + background-repeat: no-repeat; + height: 256px; + text-indent: -9000px; + overflow:hidden; +} + +h2.subtitle { + background-color: transparent; + text-indent: -9000px; + overflow:hidden; + width: 0px; + display: none; +} + + /*************************************** / + / pippin.gimp.org specific alterations / +/ ***************************************/ + +/* +div.heading, div.navheader { + color: #777; + font-size: 80%; + padding: 0; + margin: 0; + text-align: left; + position: absolute; + top: 0px; + left: 0px; + width: 100%; + height: 50px; + background: url('/gfx/heading_bg.png') transparent; + background-repeat: repeat-x; + background-attachment: fixed; + border: none; +} + +div.heading a { + color: #444; +} + +div.footing, div.navfooter { + border: none; + color: #ddd; + font-size: 80%; + text-align:right; + + width: 100%; + padding-top: 10px; + position: absolute; + bottom: 0px; + left: 0px; + + background: url('/gfx/footing_bg.png') transparent; +} +*/ + + + + /****************** / + / nasty ie tweaks / +/ ******************/ + +/* +div.heading, div.navheader { + width:expression(document.body.clientWidth + "px"); +} + +div.footing, div.navfooter { + width:expression(document.body.clientWidth + "px"); + margin-left:expression("-5em"); +} +body { + padding:expression("4em 5em 0em 5em"); +} +*/ + + /**************************************** / + / mozilla vendor specific css extensions / +/ ****************************************/ +/* +div.navfooter, div.footing{ + -moz-opacity: 0.8em; +} + +div.figure, +div.table, +div.informalfigure, +div.informaltable, +div.informalexample, +div.example, +.tip, +.warning, +.caution, +.note { + -moz-border-radius: 0.5em; +} + +b.keycap, +.keycap { + -moz-border-radius: 0.3em; +} +*/ + +table tr td table tr td { + display: none; +} + + +hr { + display: none; +} + +table { + border: 0em; +} + + .photo { + float: right; + margin-left: 1.5em; + margin-bottom: 1.5em; + margin-top: 0em; + max-width: 17em; + border: 1px solid gray; + padding: 3px; + background: white; +} + .seperator { + padding-top: 2em; + clear: both; + } + + #validators { + margin-top: 5em; + text-align: right; + color: #777; + } + @media print { + body { + font-size: 8pt; + } + .noprint { + display: none; + } + } + + +.tip, +.note { + background: #f0f0f2; + color: #333; + padding: 20px; + margin: 20px; +} + +.tip h3, +.note h3 { + padding: 0em; + margin: 0em; + font-size: 2em; + font-weight: bold; + color: #333; +} + +.tip a, +.note a { + color: #333; + text-decoration: underline; +} + +.footnote { + font-size: small; + color: #333; +} + +/* Changes the announcement text */ +.tip h3, +.warning h3, +.caution h3, +.note h3 { + font-size:large; + color: #00557D; +} diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml new file mode 100644 index 0000000..7fff933 --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml @@ -0,0 +1,88 @@ + + + + + + + + + + + + + BitBake User Manual + + + + + Richard Purdie, Chris Larson, and Phil Blundell + + BitBake Community + + bitbake-devel@lists.openembedded.org + + + + + + + 2004-2015 + Richard Purdie + Chris Larson + and Phil Blundell + + + + + This work is licensed under the Creative Commons Attribution License. + To view a copy of this license, visit + http://creativecommons.org/licenses/by/2.5/ + or send a letter to Creative Commons, 444 Castro Street, + Suite 900, Mountain View, California 94041, USA. + + + + + + + + + + + + + + + + + diff --git a/bitbake/doc/bitbake-user-manual/figures/bitbake-title.png b/bitbake/doc/bitbake-user-manual/figures/bitbake-title.png new file mode 100644 index 0000000..cb29015 Binary files /dev/null and b/bitbake/doc/bitbake-user-manual/figures/bitbake-title.png differ diff --git a/bitbake/doc/bitbake-user-manual/html.css b/bitbake/doc/bitbake-user-manual/html.css new file mode 100644 index 0000000..6eedfd3 --- /dev/null +++ b/bitbake/doc/bitbake-user-manual/html.css @@ -0,0 +1,281 @@ +/* Feuille de style DocBook du projet Traduc.org */ +/* DocBook CSS stylesheet of the Traduc.org project */ + +/* (c) Jean-Philippe Guérard - 14 août 2004 */ +/* (c) Jean-Philippe Guérard - 14 August 2004 */ + +/* Cette feuille de style est libre, vous pouvez la */ +/* redistribuer et la modifier selon les termes de la Licence */ +/* Art Libre. Vous trouverez un exemplaire de cette Licence sur */ +/* http://tigreraye.org/Petit-guide-du-traducteur.html#licence-art-libre */ + +/* This work of art is free, you can redistribute it and/or */ +/* modify it according to terms of the Free Art license. You */ +/* will find a specimen of this license on the Copyleft */ +/* Attitude web site: http://artlibre.org as well as on other */ +/* sites. */ +/* Please note that the French version of this licence as shown */ +/* on http://tigreraye.org/Petit-guide-du-traducteur.html#licence-art-libre */ +/* is only official licence of this document. The English */ +/* is only provided to help you understand this licence. */ + +/* La dernière version de cette feuille de style est toujours */ +/* disponible sur : http://tigreraye.org/style.css */ +/* Elle est également disponible sur : */ +/* http://www.traduc.org/docs/HOWTO/lecture/style.css */ + +/* The latest version of this stylesheet is available from: */ +/* http://tigreraye.org/style.css */ +/* It is also available on: */ +/* http://www.traduc.org/docs/HOWTO/lecture/style.css */ + +/* N'hésitez pas à envoyer vos commentaires et corrections à */ +/* Jean-Philippe Guérard */ + +/* Please send feedback and bug reports to */ +/* Jean-Philippe Guérard */ + +/* $Id: style.css,v 1.14 2004/09/10 20:12:09 fevrier Exp fevrier $ */ + +/* Présentation générale du document */ +/* Overall document presentation */ + +body { + /* + font-family: Apolline, "URW Palladio L", Garamond, jGaramond, + "Bitstream Cyberbit", "Palatino Linotype", serif; + */ + margin: 7%; + background-color: white; +} + +/* Taille du texte */ +/* Text size */ + +* { font-size: 100%; } + +/* Gestion des textes mis en relief imbriqués */ +/* Embedded emphasis */ + +em { font-style: italic; } +em em { font-style: normal; } +em em em { font-style: italic; } + +/* Titres */ +/* Titles */ + +h1 { font-size: 200%; font-weight: 900; } +h2 { font-size: 160%; font-weight: 900; } +h3 { font-size: 130%; font-weight: bold; } +h4 { font-size: 115%; font-weight: bold; } +h5 { font-size: 108%; font-weight: bold; } +h6 { font-weight: bold; } + +/* Nom de famille en petites majuscules (uniquement en français) */ +/* Last names in small caps (for French only) */ + +*[class~="surname"]:lang(fr) { font-variant: small-caps; } + +/* Blocs de citation */ +/* Quotation blocs */ + +div[class~="blockquote"] { + border: solid 2px #AAA; + padding: 5px; + margin: 5px; +} + +div[class~="blockquote"] > table { + border: none; +} + +/* Blocs litéraux : fond gris clair */ +/* Literal blocs: light gray background */ + +*[class~="literallayout"] { + background: #f0f0f0; + padding: 5px; + margin: 5px; +} + +/* Programmes et captures texte : fond bleu clair */ +/* Listing and text screen snapshots: light blue background */ + +*[class~="programlisting"], *[class~="screen"] { + background: #f0f0ff; + padding: 5px; + margin: 5px; +} + +/* Les textes à remplacer sont surlignés en vert pâle */ +/* Replaceable text in highlighted in pale green */ + +*[class~="replaceable"] { + background-color: #98fb98; + font-style: normal; } + +/* Tables : fonds gris clair & bords simples */ +/* Tables: light gray background and solid borders */ + +*[class~="table"] *[class~="title"] { width:100%; border: 0px; } + +table { + border: 1px solid #aaa; + border-collapse: collapse; + padding: 2px; + margin: 5px; +} + +/* Listes simples en style table */ +/* Simples lists in table presentation */ + +table[class~="simplelist"] { + background-color: #F0F0F0; + margin: 5px; + border: solid 1px #AAA; +} + +table[class~="simplelist"] td { + border: solid 1px #AAA; +} + +/* Les tables */ +/* Tables */ + +*[class~="table"] table { + background-color: #F0F0F0; + border: solid 1px #AAA; +} +*[class~="informaltable"] table { background-color: #F0F0F0; } + +th,td { + vertical-align: baseline; + text-align: left; + padding: 0.1em 0.3em; + empty-cells: show; +} + +/* Alignement des colonnes */ +/* Colunms alignment */ + +td[align=center] , th[align=center] { text-align: center; } +td[align=right] , th[align=right] { text-align: right; } +td[align=left] , th[align=left] { text-align: left; } +td[align=justify] , th[align=justify] { text-align: justify; } + +/* Pas de marge autour des images */ +/* No inside margins for images */ + +img { border: 0; } + +/* Les liens ne sont pas soulignés */ +/* No underlines for links */ + +:link , :visited , :active { text-decoration: none; } + +/* Prudence : cadre jaune et fond jaune clair */ +/* Caution: yellow border and light yellow background */ + +*[class~="caution"] { + border: solid 2px yellow; + background-color: #ffffe0; + padding: 1em 6px 1em ; + margin: 5px; +} + +*[class~="caution"] th { + vertical-align: middle +} + +*[class~="caution"] table { + background-color: #ffffe0; + border: none; +} + +/* Note importante : cadre jaune et fond jaune clair */ +/* Important: yellow border and light yellow background */ + +*[class~="important"] { + border: solid 2px yellow; + background-color: #ffffe0; + padding: 1em 6px 1em; + margin: 5px; +} + +*[class~="important"] th { + vertical-align: middle +} + +*[class~="important"] table { + background-color: #ffffe0; + border: none; +} + +/* Mise en évidence : texte légèrement plus grand */ +/* Highlights: slightly larger texts */ + +*[class~="highlights"] { + font-size: 110%; +} + +/* Note : cadre bleu et fond bleu clair */ +/* Notes: blue border and light blue background */ + +*[class~="note"] { + border: solid 2px #7099C5; + background-color: #f0f0ff; + padding: 1em 6px 1em ; + margin: 5px; +} + +*[class~="note"] th { + vertical-align: middle +} + +*[class~="note"] table { + background-color: #f0f0ff; + border: none; +} + +/* Astuce : cadre vert et fond vert clair */ +/* Tip: green border and light green background */ + +*[class~="tip"] { + border: solid 2px #00ff00; + background-color: #f0ffff; + padding: 1em 6px 1em ; + margin: 5px; +} + +*[class~="tip"] th { + vertical-align: middle; +} + +*[class~="tip"] table { + background-color: #f0ffff; + border: none; +} + +/* Avertissement : cadre rouge et fond rouge clair */ +/* Warning: red border and light red background */ + +*[class~="warning"] { + border: solid 2px #ff0000; + background-color: #fff0f0; + padding: 1em 6px 1em ; + margin: 5px; +} + +*[class~="warning"] th { + vertical-align: middle; +} + + +*[class~="warning"] table { + background-color: #fff0f0; + border: none; +} + +/* Fin */ +/* The End */ + diff --git a/bitbake/doc/bitbake.1 b/bitbake/doc/bitbake.1 new file mode 100644 index 0000000..a6c8d97 --- /dev/null +++ b/bitbake/doc/bitbake.1 @@ -0,0 +1,142 @@ +.\" Hey, EMACS: -*- nroff -*- +.\" First parameter, NAME, should be all caps +.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection +.\" other parameters are allowed: see man(7), man(1) +.TH BITBAKE 1 "November 19, 2006" +.\" Please adjust this date whenever revising the manpage. +.\" +.\" Some roff macros, for reference: +.\" .nh disable hyphenation +.\" .hy enable hyphenation +.\" .ad l left justify +.\" .ad b justify to both left and right margins +.\" .nf disable filling +.\" .fi enable filling +.\" .br insert line break +.\" .sp insert n+1 empty lines +.\" for manpage-specific macros, see man(7) +.SH NAME +BitBake \- simple tool for the execution of tasks +.SH SYNOPSIS +.B bitbake +.RI [ options ] " packagenames" +.br +.SH DESCRIPTION +This manual page documents briefly the +.B bitbake +command. +.PP +.\" TeX users may be more comfortable with the \fB\fP and +.\" \fI\fP escape sequences to invode bold face and italics, +.\" respectively. +\fBbitbake\fP is a program that executes the specified task (default is 'build') +for a given set of BitBake files. +.br +It expects that BBFILES is defined, which is a space separated list of files to +be executed. BBFILES does support wildcards. +.br +Default BBFILES are the .bb files in the current directory. +.SH OPTIONS +This program follow the usual GNU command line syntax, with long +options starting with two dashes (`-'). +.TP +.B \-h, \-\-help +Show summary of options. +.TP +.B \-\-version +Show version of program. +.TP +.B \-bBUILDFILE, \-\-buildfile=BUILDFILE +execute the task against this .bb file, rather than a package from BBFILES. +.TP +.B \-k, \-\-continue +continue as much as possible after an error. While the target that failed, and +those that depend on it, cannot be remade, the other dependencies of these +targets can be processed all the same. +.TP +.B \-a, \-\-tryaltconfigs +continue with builds by trying to use alternative providers where possible. +.TP +.B \-f, \-\-force +force run of specified cmd, regardless of stamp status +.TP +.B \-i, \-\-interactive +drop into the interactive mode also called the BitBake shell. +.TP +.B \-cCMD, \-\-cmd=CMD +Specify task to execute. Note that this only executes the specified task for +the providee and the packages it depends on, i.e. 'compile' does not implicitly +call stage for the dependencies (IOW: use only if you know what you are doing). +Depending on the base.bbclass a listtasks task is defined and will show +available tasks. +.TP +.B \-rFILE, \-\-read=FILE +read the specified file before bitbake.conf +.TP +.B \-v, \-\-verbose +output more chit-chat to the terminal +.TP +.B \-D, \-\-debug +Increase the debug level. You can specify this more than once. +.TP +.B \-n, \-\-dry-run +don't execute, just go through the motions +.TP +.B \-p, \-\-parse-only +quit after parsing the BB files (developers only) +.TP +.B \-s, \-\-show-versions +show current and preferred versions of all packages +.TP +.B \-e, \-\-environment +show the global or per-recipe environment (this is what used to be bbread) +.TP +.B \-g, \-\-graphviz +emit the dependency trees of the specified packages in the dot syntax +.TP +.B \-IIGNORED\_DOT\_DEPS, \-\-ignore-deps=IGNORED_DOT_DEPS +Stop processing at the given list of dependencies when generating dependency +graphs. This can help to make the graph more appealing +.TP +.B \-lDEBUG_DOMAINS, \-\-log-domains=DEBUG_DOMAINS +Show debug logging for the specified logging domains +.TP +.B \-P, \-\-profile +profile the command and print a report +.TP +.B \-uUI, \-\-ui=UI +User interface to use. Currently, hob, depexp, goggle or ncurses can be specified as UI. +.TP +.B \-tSERVERTYPE, \-\-servertype=SERVERTYPE +Choose which server to use, none, process or xmlrpc. +.TP +.B \-\-revisions-changed +Set the exit code depending on whether upstream floating revisions have changed or not. +.TP +.B \-\-server-only +Run bitbake without UI, the frontend can connect with bitbake server itself. +.TP +.B \-BBIND, \-\-bind=BIND +The name/address for the bitbake server to bind to. +.TP +.B \-\-no\-setscene +Do not run any setscene tasks, forces builds. + +.SH ENVIRONMENT VARIABLES +bitbake uses the following environment variables to control its +operation: +.TP +.B BITBAKE_UI +The bitbake user interface; overridden by the \fB-u\fP commandline option. + +.SH AUTHORS +BitBake was written by +Phil Blundell, +Holger Freyther, +Chris Larson, +Mickey Lauer, +Richard Purdie, +Holger Schurig +.PP +This manual page was written by Marcin Juszkiewicz +for the Debian project (but may be used by others). diff --git a/bitbake/doc/poky.ent b/bitbake/doc/poky.ent new file mode 100644 index 0000000..c032e14 --- /dev/null +++ b/bitbake/doc/poky.ent @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/bitbake/doc/template/Vera.ttf b/bitbake/doc/template/Vera.ttf new file mode 100644 index 0000000..58cd6b5 Binary files /dev/null and b/bitbake/doc/template/Vera.ttf differ diff --git a/bitbake/doc/template/Vera.xml b/bitbake/doc/template/Vera.xml new file mode 100644 index 0000000..3c82043 --- /dev/null +++ b/bitbake/doc/template/Vera.xml @@ -0,0 +1 @@ +BitstreamVeraSans729546928-235-183-23512879283200TYPE0CIDFontType20 \ No newline at end of file diff --git a/bitbake/doc/template/VeraMoBd.ttf b/bitbake/doc/template/VeraMoBd.ttf new file mode 100644 index 0000000..9be6547 Binary files /dev/null and b/bitbake/doc/template/VeraMoBd.ttf differ diff --git a/bitbake/doc/template/VeraMoBd.xml b/bitbake/doc/template/VeraMoBd.xml new file mode 100644 index 0000000..9b33107 --- /dev/null +++ b/bitbake/doc/template/VeraMoBd.xml @@ -0,0 +1 @@ +BitstreamVeraSansMono-BoldBitstream Vera Sans Mono BoldBitstream Vera Sans Mono729546759-240-19-2356059283400TYPE0CIDFontType20 \ No newline at end of file diff --git a/bitbake/doc/template/VeraMono.ttf b/bitbake/doc/template/VeraMono.ttf new file mode 100644 index 0000000..139f0b4 Binary files /dev/null and b/bitbake/doc/template/VeraMono.ttf differ diff --git a/bitbake/doc/template/VeraMono.xml b/bitbake/doc/template/VeraMono.xml new file mode 100644 index 0000000..3a0a866 --- /dev/null +++ b/bitbake/doc/template/VeraMono.xml @@ -0,0 +1 @@ +BitstreamVeraSansMono-RomanBitstream Vera Sans MonoBitstream Vera Sans Mono729546759-240-4-2356059283400TYPE0CIDFontType20 \ No newline at end of file diff --git a/bitbake/doc/template/component.title.xsl b/bitbake/doc/template/component.title.xsl new file mode 100644 index 0000000..faef043 --- /dev/null +++ b/bitbake/doc/template/component.title.xsl @@ -0,0 +1,39 @@ + + + + + + + + + + + 6 + 5 + 4 + 3 + 2 + 1 + + + + title + + + + + + + + + + + + + + + diff --git a/bitbake/doc/template/db-pdf.xsl b/bitbake/doc/template/db-pdf.xsl new file mode 100644 index 0000000..3dd065a --- /dev/null +++ b/bitbake/doc/template/db-pdf.xsl @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + 1 10 1 + + + + + + 0.5pt + solid + #cccccc + + + + + + 0.5pt + solid + #cccccc + + + + + #cccccc + + + + #cccccc + + + + + + + + + + + + + diff --git a/bitbake/doc/template/division.title.xsl b/bitbake/doc/template/division.title.xsl new file mode 100644 index 0000000..9c843bc --- /dev/null +++ b/bitbake/doc/template/division.title.xsl @@ -0,0 +1,25 @@ + + + + + +

+ title + + + + + + + + + + +

+
+
+ diff --git a/bitbake/doc/template/draft.png b/bitbake/doc/template/draft.png new file mode 100644 index 0000000..53051a9 Binary files /dev/null and b/bitbake/doc/template/draft.png differ diff --git a/bitbake/doc/template/fop-config.xml b/bitbake/doc/template/fop-config.xml new file mode 100644 index 0000000..09cc5ca --- /dev/null +++ b/bitbake/doc/template/fop-config.xml @@ -0,0 +1,58 @@ + + + + true + + + true + + + ../template + ../template + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/bitbake/doc/template/formal.object.heading.xsl b/bitbake/doc/template/formal.object.heading.xsl new file mode 100644 index 0000000..4f3900d --- /dev/null +++ b/bitbake/doc/template/formal.object.heading.xsl @@ -0,0 +1,21 @@ + + + + + + + + + +

+ + + + +

+
+
\ No newline at end of file diff --git a/bitbake/doc/template/gloss-permalinks.xsl b/bitbake/doc/template/gloss-permalinks.xsl new file mode 100644 index 0000000..6bf5811 --- /dev/null +++ b/bitbake/doc/template/gloss-permalinks.xsl @@ -0,0 +1,14 @@ + + + + + + + + + + + diff --git a/bitbake/doc/template/permalinks.xsl b/bitbake/doc/template/permalinks.xsl new file mode 100644 index 0000000..d2a1c14 --- /dev/null +++ b/bitbake/doc/template/permalinks.xsl @@ -0,0 +1,25 @@ + + + + + ¶ + + + + + + + + + + + + + + + + + + diff --git a/bitbake/doc/template/section.title.xsl b/bitbake/doc/template/section.title.xsl new file mode 100644 index 0000000..5c6ff9a --- /dev/null +++ b/bitbake/doc/template/section.title.xsl @@ -0,0 +1,55 @@ + + + + + + + + 1 + 2 + 3 + 4 + 5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/bitbake/doc/template/titlepage.templates.xml b/bitbake/doc/template/titlepage.templates.xml new file mode 100644 index 0000000..38ec11a --- /dev/null +++ b/bitbake/doc/template/titlepage.templates.xml @@ -0,0 +1,1259 @@ + + + + + + + + + + + + +]> + + + + + + + + + + + + + + + <subtitle param:node="ancestor-or-self::article[1]" + keep-with-next="always" + font-size="&hsize3;" + font-weight="bold" + space-after="0.8em"/> + + <corpauthor space-before="0.5em" + font-size="&hsize3;"/> + <authorgroup space-before="0.5em" + font-size="&hsize2;"/> + <author space-before="0.5em" + font-size="&hsize2;" + space-after="0.8em"/> + + <email font-size="&hsize2;"/> + + <othercredit space-before="0.5em"/> + <releaseinfo space-before="0.5em"/> + <copyright space-before="0.5em"/> + <legalnotice text-align="start" + margin-left="0.5in" + margin-right="0.5in" + font-family="{$body.fontset}"/> + <pubdate space-before="0.5em"/> + <para></para> + <revision space-before="0.5em"/> + <revhistory space-before="0.5em"/> + <abstract space-before="0.5em" + text-align="start" + margin-left="0.5in" + margin-right="0.5in" + font-family="{$body.fontset}"/> + + <para></para> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="set" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:named-template="division.title" + param:node="ancestor-or-self::set[1]" + text-align="center" + font-size="&hsize5;" + space-before="&hsize5space;" + font-weight="bold" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}" + text-align="center"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="book" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + + <mediaobject/> + +<!-- + +# If you leave this block of code in then the text title in the +# <title>BitBake User Manual statement of the +# bitbake-user-manual.xml file is rendered on the title page below the +# image. Commenting it out gets it out of there yet allows it +# to be retained in the tab text for the HTML version of the +# manual. + + +--> + <subtitle + text-align="center" + font-size="&hsize4;" + space-before="&hsize4space;" + font-family="{$title.fontset}"/> + <corpauthor font-size="&hsize3;" + keep-with-next="always" + space-before="2in"/> + <authorgroup space-before="2in"/> + <author font-size="&hsize3;" + space-before="&hsize2space;" + keep-with-next="always"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> +<!-- +# If you leave this block of code in then the text title in the +# <title>BitBake User Manual statement of the +# bitbake-user-manual.xml file is rendered on the title page below the +# image. Commenting it out gets it out of there yet allows it +# to be retained in the tab text for the HTML version of the +# manual. + + +--> + <corpauthor/> + <authorgroup t:named-template="verso.authorgroup"/> + <author/> + <othercredit/> + <pubdate space-before="1em"/> + <copyright/> + <abstract/> + <legalnotice font-size="8pt"/> + </t:titlepage-content> + + <t:titlepage-separator> + <fo:block break-after="page"/> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + <fo:block break-after="page"/> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="part" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:named-template="division.title" + param:node="ancestor-or-self::part[1]" + text-align="center" + font-size="&hsize5;" + space-before="&hsize5space;" + font-weight="bold" + font-family="{$title.fontset}"/> + <subtitle + text-align="center" + font-size="&hsize4;" + space-before="&hsize4space;" + font-weight='bold' + font-style='italic' + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<t:titlepage t:element="partintro" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + text-align="center" + font-size="&hsize5;" + font-weight="bold" + space-before="1em" + font-family="{$title.fontset}"/> + <subtitle + text-align="center" + font-size="&hsize2;" + font-weight="bold" + font-style="italic" + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="reference" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:named-template="division.title" + param:node="ancestor-or-self::reference[1]" + text-align="center" + font-size="&hsize5;" + space-before="&hsize5space;" + font-weight="bold" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}" + text-align="center"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="refsynopsisdiv" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="refsection" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="refsect1" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="refsect2" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="refsect3" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="dedication" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="component.title" + param:node="ancestor-or-self::dedication[1]" + margin-left="{$title.margin.left}" + font-size="&hsize5;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="preface" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="component.title" + param:node="ancestor-or-self::preface[1]" + margin-left="{$title.margin.left}" + font-size="&hsize5;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="chapter" t:wrapper="fo:block" + font-family="{$title.fontset}"> + <t:titlepage-content t:side="recto" margin-left="{$title.margin.left}"> + <title t:named-template="component.title" + param:node="ancestor-or-self::chapter[1]" + font-size="&hsize5;" + font-weight="bold"/> + + <subtitle space-before="0.5em" + font-style="italic" + font-size="&hsize2;" + font-weight="bold"/> + + <corpauthor space-before="0.5em" + space-after="0.5em" + font-size="&hsize2;"/> + + <authorgroup space-before="0.5em" + space-after="0.5em" + font-size="&hsize2;"/> + + <author space-before="0.5em" + space-after="0.5em" + font-size="&hsize2;"/> + + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="appendix" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:named-template="component.title" + param:node="ancestor-or-self::appendix[1]" + margin-left="{$title.margin.left}" + font-size="&hsize5;" + font-weight="bold" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + +<t:titlepage t:element="section" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + margin-left="{$title.margin.left}" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<t:titlepage t:element="sect1" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + margin-left="{$title.margin.left}" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<t:titlepage t:element="sect2" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + margin-left="{$title.margin.left}" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<t:titlepage t:element="sect3" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + margin-left="{$title.margin.left}" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<t:titlepage t:element="sect4" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + margin-left="{$title.margin.left}" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<t:titlepage t:element="sect5" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + margin-left="{$title.margin.left}" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<t:titlepage t:element="simplesect" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + margin-left="{$title.margin.left}" + font-family="{$title.fontset}"/> + <subtitle + font-family="{$title.fontset}"/> + <corpauthor/> + <authorgroup/> + <author/> + <othercredit/> + <releaseinfo/> + <copyright/> + <legalnotice/> + <pubdate/> + <revision/> + <revhistory/> + <abstract/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="bibliography" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="component.title" + param:node="ancestor-or-self::bibliography[1]" + margin-left="{$title.margin.left}" + font-size="&hsize5;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="bibliodiv" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title t:named-template="component.title" + param:node="ancestor-or-self::bibliodiv[1]" + margin-left="{$title.margin.left}" + font-size="&hsize4;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="glossary" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="component.title" + param:node="ancestor-or-self::glossary[1]" + margin-left="{$title.margin.left}" + font-size="&hsize5;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="glossdiv" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title t:named-template="component.title" + param:node="ancestor-or-self::glossdiv[1]" + margin-left="{$title.margin.left}" + font-size="&hsize4;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="index" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="component.title" + param:node="ancestor-or-self::index[1]" + param:pagewide="1" + margin-left="0pt" + font-size="&hsize5;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + +<!-- ==================================================================== --> + + <!-- The indexdiv.title template is used so that manual and --> + <!-- automatically generated indexdiv titles get the same --> + <!-- formatting. --> + + <t:titlepage t:element="indexdiv" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title t:force="1" + t:named-template="indexdiv.title" + param:title="title"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="setindex" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="component.title" + param:node="ancestor-or-self::setindex[1]" + param:pagewide="1" + margin-left="0pt" + font-size="&hsize5;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="colophon" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="component.title" + param:node="ancestor-or-self::colophon[1]" + margin-left="{$title.margin.left}" + font-size="&hsize5;" + font-family="{$title.fontset}" + font-weight="bold"/> + <subtitle + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> +</t:titlepage> + +<!-- ==================================================================== --> + + <t:titlepage t:element="table.of.contents" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="gentext" + param:key="'TableofContents'" + space-before.minimum="1em" + space-before.optimum="1.5em" + space-before.maximum="2em" + space-after="0.5em" + margin-left="{$title.margin.left}" + font-size="&hsize3;" + font-weight="bold" + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + + <t:titlepage t:element="list.of.tables" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="gentext" + param:key="'ListofTables'" + space-before.minimum="1em" + space-before.optimum="1.5em" + space-before.maximum="2em" + space-after="0.5em" + margin-left="{$title.margin.left}" + font-size="&hsize3;" + font-weight="bold" + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + + <t:titlepage t:element="list.of.figures" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="gentext" + param:key="'ListofFigures'" + space-before.minimum="1em" + space-before.optimum="1.5em" + space-before.maximum="2em" + space-after="0.5em" + margin-left="{$title.margin.left}" + font-size="&hsize3;" + font-weight="bold" + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + + <t:titlepage t:element="list.of.examples" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="gentext" + param:key="'ListofExamples'" + space-before.minimum="1em" + space-before.optimum="1.5em" + space-before.maximum="2em" + space-after="0.5em" + margin-left="{$title.margin.left}" + font-size="&hsize3;" + font-weight="bold" + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + + <t:titlepage t:element="list.of.equations" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="gentext" + param:key="'ListofEquations'" + space-before.minimum="1em" + space-before.optimum="1.5em" + space-before.maximum="2em" + space-after="0.5em" + margin-left="{$title.margin.left}" + font-size="&hsize3;" + font-weight="bold" + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + + <t:titlepage t:element="list.of.procedures" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="gentext" + param:key="'ListofProcedures'" + space-before.minimum="1em" + space-before.optimum="1.5em" + space-before.maximum="2em" + space-after="0.5em" + margin-left="{$title.margin.left}" + font-size="&hsize3;" + font-weight="bold" + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + + <t:titlepage t:element="list.of.unknowns" t:wrapper="fo:block"> + <t:titlepage-content t:side="recto"> + <title + t:force="1" + t:named-template="gentext" + param:key="'ListofUnknown'" + space-before.minimum="1em" + space-before.optimum="1.5em" + space-before.maximum="2em" + space-after="0.5em" + margin-left="{$title.margin.left}" + font-size="&hsize3;" + font-weight="bold" + font-family="{$title.fontset}"/> + </t:titlepage-content> + + <t:titlepage-content t:side="verso"> + </t:titlepage-content> + + <t:titlepage-separator> + </t:titlepage-separator> + + <t:titlepage-before t:side="recto"> + </t:titlepage-before> + + <t:titlepage-before t:side="verso"> + </t:titlepage-before> + </t:titlepage> + +<!-- ==================================================================== --> + +</t:templates> diff --git a/bitbake/doc/tools/docbook-to-pdf b/bitbake/doc/tools/docbook-to-pdf new file mode 100755 index 0000000..558ded9 --- /dev/null +++ b/bitbake/doc/tools/docbook-to-pdf @@ -0,0 +1,51 @@ +#!/bin/sh + +if [ -z "$1" -o -z "$2" ]; then + echo "usage: [-v] $0 <docbook file> <templatedir>" + echo + echo "*NOTE* you need xsltproc, fop and nwalsh docbook stylesheets" + echo " installed for this to work!" + echo + exit 0 +fi + +FO=`echo $1 | sed s/.xml/.fo/` || exit 1 +PDF=`echo $1 | sed s/.xml/.pdf/` || exit 1 +TEMPLATEDIR=$2 + +## +# These URI should be rewritten by your distribution's xml catalog to +# match your localy installed XSL stylesheets. +XSL_BASE_URI="http://docbook.sourceforge.net/release/xsl/current" + +# Creates a temporary XSL stylesheet based on titlepage.xsl +xsltproc -o /tmp/titlepage.xsl \ + --xinclude \ + $XSL_BASE_URI/template/titlepage.xsl \ + $TEMPLATEDIR/titlepage.templates.xml || exit 1 + +# Creates the file needed for FOP +xsltproc --xinclude \ + --stringparam hyphenate false \ + --stringparam formal.title.placement "figure after" \ + --stringparam ulink.show 1 \ + --stringparam body.font.master 9 \ + --stringparam title.font.master 11 \ + --stringparam draft.watermark.image "$TEMPLATEDIR/draft.png" \ + --stringparam chapter.autolabel 1 \ + --stringparam appendix.autolabel A \ + --stringparam section.autolabel 1 \ + --stringparam section.label.includes.component.label 1 \ + --output $FO \ + $TEMPLATEDIR/db-pdf.xsl \ + $1 || exit 1 + +# Invokes the Java version of FOP. Uses the additional configuration file common/fop-config.xml +fop -c $TEMPLATEDIR/fop-config.xml -fo $FO -pdf $PDF || exit 1 + +rm -f $FO +rm -f /tmp/titlepage.xsl + +echo +echo " #### Success! $PDF ready. ####" +echo diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py new file mode 100644 index 0000000..6917ec3 --- /dev/null +++ b/bitbake/lib/bb/COW.py @@ -0,0 +1,323 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# This is a copy on write dictionary and set which abuses classes to try and be nice and fast. +# +# Copyright (C) 2006 Tim Amsell +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +#Please Note: +# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. +# Assign a file to __warn__ to get warnings about slow operations. +# + +from __future__ import print_function +import copy +import types +ImmutableTypes = ( + types.NoneType, + bool, + complex, + float, + int, + long, + tuple, + frozenset, + basestring +) + +MUTABLE = "__mutable__" + +class COWMeta(type): + pass + +class COWDictMeta(COWMeta): + __warn__ = False + __hasmutable__ = False + __marker__ = tuple() + + def __str__(cls): + # FIXME: I have magic numbers! + return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) + __repr__ = __str__ + + def cow(cls): + class C(cls): + __count__ = cls.__count__ + 1 + return C + copy = cow + __call__ = cow + + def __setitem__(cls, key, value): + if not isinstance(value, ImmutableTypes): + if not isinstance(value, COWMeta): + cls.__hasmutable__ = True + key += MUTABLE + setattr(cls, key, value) + + def __getmutable__(cls, key, readonly=False): + nkey = key + MUTABLE + try: + return cls.__dict__[nkey] + except KeyError: + pass + + value = getattr(cls, nkey) + if readonly: + return value + + if not cls.__warn__ is False and not isinstance(value, COWMeta): + print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__) + try: + value = value.copy() + except AttributeError as e: + value = copy.copy(value) + setattr(cls, nkey, value) + return value + + __getmarker__ = [] + def __getreadonly__(cls, key, default=__getmarker__): + """\ + Get a value (even if mutable) which you promise not to change. + """ + return cls.__getitem__(key, default, True) + + def __getitem__(cls, key, default=__getmarker__, readonly=False): + try: + try: + value = getattr(cls, key) + except AttributeError: + value = cls.__getmutable__(key, readonly) + + # This is for values which have been deleted + if value is cls.__marker__: + raise AttributeError("key %s does not exist." % key) + + return value + except AttributeError as e: + if not default is cls.__getmarker__: + return default + + raise KeyError(str(e)) + + def __delitem__(cls, key): + cls.__setitem__(key, cls.__marker__) + + def __revertitem__(cls, key): + if not cls.__dict__.has_key(key): + key += MUTABLE + delattr(cls, key) + + def __contains__(cls, key): + return cls.has_key(key) + + def has_key(cls, key): + value = cls.__getreadonly__(key, cls.__marker__) + if value is cls.__marker__: + return False + return True + + def iter(cls, type, readonly=False): + for key in dir(cls): + if key.startswith("__"): + continue + + if key.endswith(MUTABLE): + key = key[:-len(MUTABLE)] + + if type == "keys": + yield key + + try: + if readonly: + value = cls.__getreadonly__(key) + else: + value = cls[key] + except KeyError: + continue + + if type == "values": + yield value + if type == "items": + yield (key, value) + raise StopIteration() + + def iterkeys(cls): + return cls.iter("keys") + def itervalues(cls, readonly=False): + if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: + print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__) + return cls.iter("values", readonly) + def iteritems(cls, readonly=False): + if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: + print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__) + return cls.iter("items", readonly) + +class COWSetMeta(COWDictMeta): + def __str__(cls): + # FIXME: I have magic numbers! + return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3) + __repr__ = __str__ + + def cow(cls): + class C(cls): + __count__ = cls.__count__ + 1 + return C + + def add(cls, value): + COWDictMeta.__setitem__(cls, repr(hash(value)), value) + + def remove(cls, value): + COWDictMeta.__delitem__(cls, repr(hash(value))) + + def __in__(cls, value): + return COWDictMeta.has_key(repr(hash(value))) + + def iterkeys(cls): + raise TypeError("sets don't have keys") + + def iteritems(cls): + raise TypeError("sets don't have 'items'") + +# These are the actual classes you use! +class COWDictBase(object): + __metaclass__ = COWDictMeta + __count__ = 0 + +class COWSetBase(object): + __metaclass__ = COWSetMeta + __count__ = 0 + +if __name__ == "__main__": + import sys + COWDictBase.__warn__ = sys.stderr + a = COWDictBase() + print("a", a) + + a['a'] = 'a' + a['b'] = 'b' + a['dict'] = {} + + b = a.copy() + print("b", b) + b['c'] = 'b' + + print() + + print("a", a) + for x in a.iteritems(): + print(x) + print("--") + print("b", b) + for x in b.iteritems(): + print(x) + print() + + b['dict']['a'] = 'b' + b['a'] = 'c' + + print("a", a) + for x in a.iteritems(): + print(x) + print("--") + print("b", b) + for x in b.iteritems(): + print(x) + print() + + try: + b['dict2'] + except KeyError as e: + print("Okay!") + + a['set'] = COWSetBase() + a['set'].add("o1") + a['set'].add("o1") + a['set'].add("o2") + + print("a", a) + for x in a['set'].itervalues(): + print(x) + print("--") + print("b", b) + for x in b['set'].itervalues(): + print(x) + print() + + b['set'].add('o3') + + print("a", a) + for x in a['set'].itervalues(): + print(x) + print("--") + print("b", b) + for x in b['set'].itervalues(): + print(x) + print() + + a['set2'] = set() + a['set2'].add("o1") + a['set2'].add("o1") + a['set2'].add("o2") + + print("a", a) + for x in a.iteritems(): + print(x) + print("--") + print("b", b) + for x in b.iteritems(readonly=True): + print(x) + print() + + del b['b'] + try: + print(b['b']) + except KeyError: + print("Yay! deleted key raises error") + + if b.has_key('b'): + print("Boo!") + else: + print("Yay - has_key with delete works!") + + print("a", a) + for x in a.iteritems(): + print(x) + print("--") + print("b", b) + for x in b.iteritems(readonly=True): + print(x) + print() + + b.__revertitem__('b') + + print("a", a) + for x in a.iteritems(): + print(x) + print("--") + print("b", b) + for x in b.iteritems(readonly=True): + print(x) + print() + + b.__revertitem__('dict') + print("a", a) + for x in a.iteritems(): + print(x) + print("--") + print("b", b) + for x in b.iteritems(readonly=True): + print(x) + print() diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py new file mode 100644 index 0000000..4d69552 --- /dev/null +++ b/bitbake/lib/bb/__init__.py @@ -0,0 +1,142 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Build System Python Library +# +# Copyright (C) 2003 Holger Schurig +# Copyright (C) 2003, 2004 Chris Larson +# +# Based on Gentoo's portage.py. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +__version__ = "1.24.0" + +import sys +if sys.version_info < (2, 7, 3): + raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake") + + +class BBHandledException(Exception): + """ + The big dilemma for generic bitbake code is what information to give the user + when an exception occurs. Any exception inheriting this base exception class + has already provided information to the user via some 'fired' message type such as + an explicitly fired event using bb.fire, or a bb.error message. If bitbake + encounters an exception derived from this class, no backtrace or other information + will be given to the user, its assumed the earlier event provided the relevant information. + """ + pass + +import os +import logging + + +class NullHandler(logging.Handler): + def emit(self, record): + pass + +Logger = logging.getLoggerClass() +class BBLogger(Logger): + def __init__(self, name): + if name.split(".")[0] == "BitBake": + self.debug = self.bbdebug + Logger.__init__(self, name) + + def bbdebug(self, level, msg, *args, **kwargs): + return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs) + + def plain(self, msg, *args, **kwargs): + return self.log(logging.INFO + 1, msg, *args, **kwargs) + + def verbose(self, msg, *args, **kwargs): + return self.log(logging.INFO - 1, msg, *args, **kwargs) + +logging.raiseExceptions = False +logging.setLoggerClass(BBLogger) + +logger = logging.getLogger("BitBake") +logger.addHandler(NullHandler()) +logger.setLevel(logging.DEBUG - 2) + +# This has to be imported after the setLoggerClass, as the import of bb.msg +# can result in construction of the various loggers. +import bb.msg + +from bb import fetch2 as fetch +sys.modules['bb.fetch'] = sys.modules['bb.fetch2'] + +# Messaging convenience functions +def plain(*args): + logger.plain(''.join(args)) + +def debug(lvl, *args): + if isinstance(lvl, basestring): + logger.warn("Passed invalid debug level '%s' to bb.debug", lvl) + args = (lvl,) + args + lvl = 1 + logger.debug(lvl, ''.join(args)) + +def note(*args): + logger.info(''.join(args)) + +def warn(*args): + logger.warn(''.join(args)) + +def error(*args): + logger.error(''.join(args)) + +def fatal(*args): + logger.critical(''.join(args)) + raise BBHandledException() + +def deprecated(func, name=None, advice=""): + """This is a decorator which can be used to mark functions + as deprecated. It will result in a warning being emitted + when the function is used.""" + import warnings + + if advice: + advice = ": %s" % advice + if name is None: + name = func.__name__ + + def newFunc(*args, **kwargs): + warnings.warn("Call to deprecated function %s%s." % (name, + advice), + category=DeprecationWarning, + stacklevel=2) + return func(*args, **kwargs) + newFunc.__name__ = func.__name__ + newFunc.__doc__ = func.__doc__ + newFunc.__dict__.update(func.__dict__) + return newFunc + +# For compatibility +def deprecate_import(current, modulename, fromlist, renames = None): + """Import objects from one module into another, wrapping them with a DeprecationWarning""" + import sys + + module = __import__(modulename, fromlist = fromlist) + for position, objname in enumerate(fromlist): + obj = getattr(module, objname) + newobj = deprecated(obj, "{0}.{1}".format(current, objname), + "Please use {0}.{1} instead".format(modulename, objname)) + if renames: + newname = renames[position] + else: + newname = objname + + setattr(sys.modules[current], newname, newobj) + diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py new file mode 100644 index 0000000..65cc851 --- /dev/null +++ b/bitbake/lib/bb/build.py @@ -0,0 +1,711 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake 'Build' implementation +# +# Core code for function execution and task handling in the +# BitBake build tools. +# +# Copyright (C) 2003, 2004 Chris Larson +# +# Based on Gentoo's portage.py. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import sys +import logging +import shlex +import glob +import time +import bb +import bb.msg +import bb.process +from contextlib import nested +from bb import event, utils + +bblogger = logging.getLogger('BitBake') +logger = logging.getLogger('BitBake.Build') + +NULL = open(os.devnull, 'r+') + +# When we execute a Python function, we'd like certain things +# in all namespaces, hence we add them to __builtins__. +# If we do not do this and use the exec globals, they will +# not be available to subfunctions. +__builtins__['bb'] = bb +__builtins__['os'] = os + +class FuncFailed(Exception): + def __init__(self, name = None, logfile = None): + self.logfile = logfile + self.name = name + if name: + self.msg = 'Function failed: %s' % name + else: + self.msg = "Function failed" + + def __str__(self): + if self.logfile and os.path.exists(self.logfile): + msg = ("%s (log file is located at %s)" % + (self.msg, self.logfile)) + else: + msg = self.msg + return msg + +class TaskBase(event.Event): + """Base class for task events""" + + def __init__(self, t, logfile, d): + self._task = t + self._package = d.getVar("PF", True) + self.taskfile = d.getVar("FILE", True) + self.taskname = self._task + self.logfile = logfile + self.time = time.time() + event.Event.__init__(self) + self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName()) + + def getTask(self): + return self._task + + def setTask(self, task): + self._task = task + + def getDisplayName(self): + return bb.event.getName(self)[4:] + + task = property(getTask, setTask, None, "task property") + +class TaskStarted(TaskBase): + """Task execution started""" + def __init__(self, t, logfile, taskflags, d): + super(TaskStarted, self).__init__(t, logfile, d) + self.taskflags = taskflags + +class TaskSucceeded(TaskBase): + """Task execution completed""" + +class TaskFailed(TaskBase): + """Task execution failed""" + + def __init__(self, task, logfile, metadata, errprinted = False): + self.errprinted = errprinted + super(TaskFailed, self).__init__(task, logfile, metadata) + +class TaskFailedSilent(TaskBase): + """Task execution failed (silently)""" + def getDisplayName(self): + # Don't need to tell the user it was silent + return "Failed" + +class TaskInvalid(TaskBase): + + def __init__(self, task, metadata): + super(TaskInvalid, self).__init__(task, None, metadata) + self._message = "No such task '%s'" % task + + +class LogTee(object): + def __init__(self, logger, outfile): + self.outfile = outfile + self.logger = logger + self.name = self.outfile.name + + def write(self, string): + self.logger.plain(string) + self.outfile.write(string) + + def __enter__(self): + self.outfile.__enter__() + return self + + def __exit__(self, *excinfo): + self.outfile.__exit__(*excinfo) + + def __repr__(self): + return '<LogTee {0}>'.format(self.name) + def flush(self): + self.outfile.flush() + +def exec_func(func, d, dirs = None): + """Execute a BB 'function'""" + + body = d.getVar(func) + if not body: + if body is None: + logger.warn("Function %s doesn't exist", func) + return + + flags = d.getVarFlags(func) + cleandirs = flags.get('cleandirs') + if cleandirs: + for cdir in d.expand(cleandirs).split(): + bb.utils.remove(cdir, True) + bb.utils.mkdirhier(cdir) + + if dirs is None: + dirs = flags.get('dirs') + if dirs: + dirs = d.expand(dirs).split() + + if dirs: + for adir in dirs: + bb.utils.mkdirhier(adir) + adir = dirs[-1] + else: + adir = d.getVar('B', True) + bb.utils.mkdirhier(adir) + + ispython = flags.get('python') + + lockflag = flags.get('lockfiles') + if lockflag: + lockfiles = [f for f in d.expand(lockflag).split()] + else: + lockfiles = None + + tempdir = d.getVar('T', True) + + # or func allows items to be executed outside of the normal + # task set, such as buildhistory + task = d.getVar('BB_RUNTASK', True) or func + if task == func: + taskfunc = task + else: + taskfunc = "%s.%s" % (task, func) + + runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" + runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid()) + runfile = os.path.join(tempdir, runfn) + bb.utils.mkdirhier(os.path.dirname(runfile)) + + # Setup the courtesy link to the runfn, only for tasks + # we create the link 'just' before the run script is created + # if we create it after, and if the run script fails, then the + # link won't be created as an exception would be fired. + if task == func: + runlink = os.path.join(tempdir, 'run.{0}'.format(task)) + if runlink: + bb.utils.remove(runlink) + + try: + os.symlink(runfn, runlink) + except OSError: + pass + + with bb.utils.fileslocked(lockfiles): + if ispython: + exec_func_python(func, d, runfile, cwd=adir) + else: + exec_func_shell(func, d, runfile, cwd=adir) + +_functionfmt = """ +def {function}(d): +{body} + +{function}(d) +""" +logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") +def exec_func_python(func, d, runfile, cwd=None): + """Execute a python BB 'function'""" + + bbfile = d.getVar('FILE', True) + code = _functionfmt.format(function=func, body=d.getVar(func, True)) + bb.utils.mkdirhier(os.path.dirname(runfile)) + with open(runfile, 'w') as script: + bb.data.emit_func_python(func, script, d) + + if cwd: + try: + olddir = os.getcwd() + except OSError: + olddir = None + os.chdir(cwd) + + bb.debug(2, "Executing python function %s" % func) + + try: + comp = utils.better_compile(code, func, bbfile) + utils.better_exec(comp, {"d": d}, code, bbfile) + except (bb.parse.SkipRecipe, bb.build.FuncFailed): + raise + except: + raise FuncFailed(func, None) + finally: + bb.debug(2, "Python function %s finished" % func) + + if cwd and olddir: + try: + os.chdir(olddir) + except OSError: + pass + +def shell_trap_code(): + return '''#!/bin/sh\n +# Emit a useful diagnostic if something fails: +bb_exit_handler() { + ret=$? + case $ret in + 0) ;; + *) case $BASH_VERSION in + "") echo "WARNING: exit code $ret from a shell command.";; + *) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from + \"$BASH_COMMAND\"";; + esac + exit $ret + esac +} +trap 'bb_exit_handler' 0 +set -e +''' + +def exec_func_shell(func, d, runfile, cwd=None): + """Execute a shell function from the metadata + + Note on directory behavior. The 'dirs' varflag should contain a list + of the directories you need created prior to execution. The last + item in the list is where we will chdir/cd to. + """ + + # Don't let the emitted shell script override PWD + d.delVarFlag('PWD', 'export') + + with open(runfile, 'w') as script: + script.write(shell_trap_code()) + + bb.data.emit_func(func, script, d) + + if bb.msg.loggerVerboseLogs: + script.write("set -x\n") + if cwd: + script.write("cd '%s'\n" % cwd) + script.write("%s\n" % func) + script.write(''' +# cleanup +ret=$? +trap '' 0 +exit $? +''') + + os.chmod(runfile, 0775) + + cmd = runfile + if d.getVarFlag(func, 'fakeroot'): + fakerootcmd = d.getVar('FAKEROOT', True) + if fakerootcmd: + cmd = [fakerootcmd, runfile] + + if bb.msg.loggerDefaultVerbose: + logfile = LogTee(logger, sys.stdout) + else: + logfile = sys.stdout + + bb.debug(2, "Executing shell function %s" % func) + + try: + with open(os.devnull, 'r+') as stdin: + bb.process.run(cmd, shell=False, stdin=stdin, log=logfile) + except bb.process.CmdError: + logfn = d.getVar('BB_LOGFILE', True) + raise FuncFailed(func, logfn) + + bb.debug(2, "Shell function %s finished" % func) + +def _task_data(fn, task, d): + localdata = bb.data.createCopy(d) + localdata.setVar('BB_FILENAME', fn) + localdata.setVar('BB_CURRENTTASK', task[3:]) + localdata.setVar('OVERRIDES', 'task-%s:%s' % + (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) + localdata.finalize() + bb.data.expandKeys(localdata) + return localdata + +def _exec_task(fn, task, d, quieterr): + """Execute a BB 'task' + + Execution of a task involves a bit more setup than executing a function, + running it with its own local metadata, and with some useful variables set. + """ + if not d.getVarFlag(task, 'task'): + event.fire(TaskInvalid(task, d), d) + logger.error("No such task: %s" % task) + return 1 + + logger.debug(1, "Executing task %s", task) + + localdata = _task_data(fn, task, d) + tempdir = localdata.getVar('T', True) + if not tempdir: + bb.fatal("T variable not set, unable to build") + + # Change nice level if we're asked to + nice = localdata.getVar("BB_TASK_NICE_LEVEL", True) + if nice: + curnice = os.nice(0) + nice = int(nice) - curnice + newnice = os.nice(nice) + logger.debug(1, "Renice to %s " % newnice) + + bb.utils.mkdirhier(tempdir) + + # Determine the logfile to generate + logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}' + logbase = logfmt.format(task=task, pid=os.getpid()) + + # Document the order of the tasks... + logorder = os.path.join(tempdir, 'log.task_order') + try: + with open(logorder, 'a') as logorderfile: + logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase)) + except OSError: + logger.exception("Opening log file '%s'", logorder) + pass + + # Setup the courtesy link to the logfn + loglink = os.path.join(tempdir, 'log.{0}'.format(task)) + logfn = os.path.join(tempdir, logbase) + if loglink: + bb.utils.remove(loglink) + + try: + os.symlink(logbase, loglink) + except OSError: + pass + + prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True) + postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True) + + class ErrorCheckHandler(logging.Handler): + def __init__(self): + self.triggered = False + logging.Handler.__init__(self, logging.ERROR) + def emit(self, record): + self.triggered = True + + # Handle logfiles + si = open('/dev/null', 'r') + try: + bb.utils.mkdirhier(os.path.dirname(logfn)) + logfile = open(logfn, 'w') + except OSError: + logger.exception("Opening log file '%s'", logfn) + pass + + # Dup the existing fds so we dont lose them + osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] + oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] + ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] + + # Replace those fds with our own + os.dup2(si.fileno(), osi[1]) + os.dup2(logfile.fileno(), oso[1]) + os.dup2(logfile.fileno(), ose[1]) + + # Ensure Python logging goes to the logfile + handler = logging.StreamHandler(logfile) + handler.setFormatter(logformatter) + # Always enable full debug output into task logfiles + handler.setLevel(logging.DEBUG - 2) + bblogger.addHandler(handler) + + errchk = ErrorCheckHandler() + bblogger.addHandler(errchk) + + localdata.setVar('BB_LOGFILE', logfn) + localdata.setVar('BB_RUNTASK', task) + + flags = localdata.getVarFlags(task) + + event.fire(TaskStarted(task, logfn, flags, localdata), localdata) + try: + for func in (prefuncs or '').split(): + exec_func(func, localdata) + exec_func(task, localdata) + for func in (postfuncs or '').split(): + exec_func(func, localdata) + except FuncFailed as exc: + if quieterr: + event.fire(TaskFailedSilent(task, logfn, localdata), localdata) + else: + errprinted = errchk.triggered + logger.error(str(exc)) + event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata) + return 1 + finally: + sys.stdout.flush() + sys.stderr.flush() + + bblogger.removeHandler(handler) + + # Restore the backup fds + os.dup2(osi[0], osi[1]) + os.dup2(oso[0], oso[1]) + os.dup2(ose[0], ose[1]) + + # Close the backup fds + os.close(osi[0]) + os.close(oso[0]) + os.close(ose[0]) + si.close() + + logfile.close() + if os.path.exists(logfn) and os.path.getsize(logfn) == 0: + logger.debug(2, "Zero size logfn %s, removing", logfn) + bb.utils.remove(logfn) + bb.utils.remove(loglink) + event.fire(TaskSucceeded(task, logfn, localdata), localdata) + + if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'): + make_stamp(task, localdata) + + return 0 + +def exec_task(fn, task, d, profile = False): + try: + quieterr = False + if d.getVarFlag(task, "quieterrors") is not None: + quieterr = True + + if profile: + profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task) + try: + import cProfile as profile + except: + import profile + prof = profile.Profile() + ret = profile.Profile.runcall(prof, _exec_task, fn, task, d, quieterr) + prof.dump_stats(profname) + bb.utils.process_profilelog(profname) + + return ret + else: + return _exec_task(fn, task, d, quieterr) + + except Exception: + from traceback import format_exc + if not quieterr: + logger.error("Build of %s failed" % (task)) + logger.error(format_exc()) + failedevent = TaskFailed(task, None, d, True) + event.fire(failedevent, d) + return 1 + +def stamp_internal(taskname, d, file_name, baseonly=False): + """ + Internal stamp helper function + Makes sure the stamp directory exists + Returns the stamp path+filename + + In the bitbake core, d can be a CacheData and file_name will be set. + When called in task context, d will be a data store, file_name will not be set + """ + taskflagname = taskname + if taskname.endswith("_setscene") and taskname != "do_setscene": + taskflagname = taskname.replace("_setscene", "") + + if file_name: + stamp = d.stamp_base[file_name].get(taskflagname) or d.stamp[file_name] + extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" + else: + stamp = d.getVarFlag(taskflagname, 'stamp-base', True) or d.getVar('STAMP', True) + file_name = d.getVar('BB_FILENAME', True) + extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" + + if baseonly: + return stamp + + if not stamp: + return + + stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo) + + stampdir = os.path.dirname(stamp) + if bb.parse.cached_mtime_noerror(stampdir) == 0: + bb.utils.mkdirhier(stampdir) + + return stamp + +def stamp_cleanmask_internal(taskname, d, file_name): + """ + Internal stamp helper function to generate stamp cleaning mask + Returns the stamp path+filename + + In the bitbake core, d can be a CacheData and file_name will be set. + When called in task context, d will be a data store, file_name will not be set + """ + taskflagname = taskname + if taskname.endswith("_setscene") and taskname != "do_setscene": + taskflagname = taskname.replace("_setscene", "") + + if file_name: + stamp = d.stamp_base_clean[file_name].get(taskflagname) or d.stampclean[file_name] + extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" + else: + stamp = d.getVarFlag(taskflagname, 'stamp-base-clean', True) or d.getVar('STAMPCLEAN', True) + file_name = d.getVar('BB_FILENAME', True) + extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" + + if not stamp: + return [] + + cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo) + + return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")] + +def make_stamp(task, d, file_name = None): + """ + Creates/updates a stamp for a given task + (d can be a data dict or dataCache) + """ + cleanmask = stamp_cleanmask_internal(task, d, file_name) + for mask in cleanmask: + for name in glob.glob(mask): + # Preserve sigdata files in the stamps directory + if "sigdata" in name: + continue + # Preserve taint files in the stamps directory + if name.endswith('.taint'): + continue + os.unlink(name) + + stamp = stamp_internal(task, d, file_name) + # Remove the file and recreate to force timestamp + # change on broken NFS filesystems + if stamp: + bb.utils.remove(stamp) + open(stamp, "w").close() + + # If we're in task context, write out a signature file for each task + # as it completes + if not task.endswith("_setscene") and task != "do_setscene" and not file_name: + stampbase = stamp_internal(task, d, None, True) + file_name = d.getVar('BB_FILENAME', True) + bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True) + +def del_stamp(task, d, file_name = None): + """ + Removes a stamp for a given task + (d can be a data dict or dataCache) + """ + stamp = stamp_internal(task, d, file_name) + bb.utils.remove(stamp) + +def write_taint(task, d, file_name = None): + """ + Creates a "taint" file which will force the specified task and its + dependents to be re-run the next time by influencing the value of its + taskhash. + (d can be a data dict or dataCache) + """ + import uuid + if file_name: + taintfn = d.stamp[file_name] + '.' + task + '.taint' + else: + taintfn = d.getVar('STAMP', True) + '.' + task + '.taint' + bb.utils.mkdirhier(os.path.dirname(taintfn)) + # The specific content of the taint file is not really important, + # we just need it to be random, so a random UUID is used + with open(taintfn, 'w') as taintf: + taintf.write(str(uuid.uuid4())) + +def stampfile(taskname, d, file_name = None): + """ + Return the stamp for a given task + (d can be a data dict or dataCache) + """ + return stamp_internal(taskname, d, file_name) + +def add_tasks(tasklist, deltasklist, d): + task_deps = d.getVar('_task_deps') + if not task_deps: + task_deps = {} + if not 'tasks' in task_deps: + task_deps['tasks'] = [] + if not 'parents' in task_deps: + task_deps['parents'] = {} + + for task in tasklist: + task = d.expand(task) + + if task in deltasklist: + continue + + d.setVarFlag(task, 'task', 1) + + if not task in task_deps['tasks']: + task_deps['tasks'].append(task) + + flags = d.getVarFlags(task) + def getTask(name): + if not name in task_deps: + task_deps[name] = {} + if name in flags: + deptask = d.expand(flags[name]) + task_deps[name][task] = deptask + getTask('depends') + getTask('rdepends') + getTask('deptask') + getTask('rdeptask') + getTask('recrdeptask') + getTask('recideptask') + getTask('nostamp') + getTask('fakeroot') + getTask('noexec') + getTask('umask') + task_deps['parents'][task] = [] + if 'deps' in flags: + for dep in flags['deps']: + dep = d.expand(dep) + task_deps['parents'][task].append(dep) + + # don't assume holding a reference + d.setVar('_task_deps', task_deps) + +def addtask(task, before, after, d): + if task[:3] != "do_": + task = "do_" + task + + d.setVarFlag(task, "task", 1) + bbtasks = d.getVar('__BBTASKS') or [] + if not task in bbtasks: + bbtasks.append(task) + d.setVar('__BBTASKS', bbtasks) + + existing = d.getVarFlag(task, "deps") or [] + if after is not None: + # set up deps for function + for entry in after.split(): + if entry not in existing: + existing.append(entry) + d.setVarFlag(task, "deps", existing) + if before is not None: + # set up things that depend on this func + for entry in before.split(): + existing = d.getVarFlag(entry, "deps") or [] + if task not in existing: + d.setVarFlag(entry, "deps", [task] + existing) + +def deltask(task, d): + if task[:3] != "do_": + task = "do_" + task + + bbtasks = d.getVar('__BBDELTASKS') or [] + if not task in bbtasks: + bbtasks.append(task) + d.setVar('__BBDELTASKS', bbtasks) + diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py new file mode 100644 index 0000000..a1dde96 --- /dev/null +++ b/bitbake/lib/bb/cache.py @@ -0,0 +1,837 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Cache implementation +# +# Caching of bitbake variables before task execution + +# Copyright (C) 2006 Richard Purdie +# Copyright (C) 2012 Intel Corporation + +# but small sections based on code from bin/bitbake: +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2003, 2004 Phil Blundell +# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer +# Copyright (C) 2005 Holger Hans Peter Freyther +# Copyright (C) 2005 ROAD GmbH +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +import os +import logging +from collections import defaultdict +import bb.utils + +logger = logging.getLogger("BitBake.Cache") + +try: + import cPickle as pickle +except ImportError: + import pickle + logger.info("Importing cPickle failed. " + "Falling back to a very slow implementation.") + +__cache_version__ = "148" + +def getCacheFile(path, filename, data_hash): + return os.path.join(path, filename + "." + data_hash) + +# RecipeInfoCommon defines common data retrieving methods +# from meta data for caches. CoreRecipeInfo as well as other +# Extra RecipeInfo needs to inherit this class +class RecipeInfoCommon(object): + + @classmethod + def listvar(cls, var, metadata): + return cls.getvar(var, metadata).split() + + @classmethod + def intvar(cls, var, metadata): + return int(cls.getvar(var, metadata) or 0) + + @classmethod + def depvar(cls, var, metadata): + return bb.utils.explode_deps(cls.getvar(var, metadata)) + + @classmethod + def pkgvar(cls, var, packages, metadata): + return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) + for pkg in packages) + + @classmethod + def taskvar(cls, var, tasks, metadata): + return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) + for task in tasks) + + @classmethod + def flaglist(cls, flag, varlist, metadata, squash=False): + out_dict = dict((var, metadata.getVarFlag(var, flag, True)) + for var in varlist) + if squash: + return dict((k,v) for (k,v) in out_dict.iteritems() if v) + else: + return out_dict + + @classmethod + def getvar(cls, var, metadata): + return metadata.getVar(var, True) or '' + + +class CoreRecipeInfo(RecipeInfoCommon): + __slots__ = () + + cachefile = "bb_cache.dat" + + def __init__(self, filename, metadata): + self.file_depends = metadata.getVar('__depends', False) + self.timestamp = bb.parse.cached_mtime(filename) + self.variants = self.listvar('__VARIANTS', metadata) + [''] + self.appends = self.listvar('__BBAPPEND', metadata) + self.nocache = self.getvar('__BB_DONT_CACHE', metadata) + + self.skipreason = self.getvar('__SKIPPED', metadata) + if self.skipreason: + self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0] + self.skipped = True + self.provides = self.depvar('PROVIDES', metadata) + self.rprovides = self.depvar('RPROVIDES', metadata) + return + + self.tasks = metadata.getVar('__BBTASKS', False) + + self.pn = self.getvar('PN', metadata) + self.packages = self.listvar('PACKAGES', metadata) + if not self.pn in self.packages: + self.packages.append(self.pn) + + self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata) + self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) + + self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} + + self.skipped = False + self.pe = self.getvar('PE', metadata) + self.pv = self.getvar('PV', metadata) + self.pr = self.getvar('PR', metadata) + self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata) + self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata) + self.stamp = self.getvar('STAMP', metadata) + self.stampclean = self.getvar('STAMPCLEAN', metadata) + self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata) + self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata) + self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata) + self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True) + self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata) + self.depends = self.depvar('DEPENDS', metadata) + self.provides = self.depvar('PROVIDES', metadata) + self.rdepends = self.depvar('RDEPENDS', metadata) + self.rprovides = self.depvar('RPROVIDES', metadata) + self.rrecommends = self.depvar('RRECOMMENDS', metadata) + self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata) + self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata) + self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata) + self.inherits = self.getvar('__inherit_cache', metadata) + self.fakerootenv = self.getvar('FAKEROOTENV', metadata) + self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) + self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) + + @classmethod + def init_cacheData(cls, cachedata): + # CacheData in Core RecipeInfo Class + cachedata.task_deps = {} + cachedata.pkg_fn = {} + cachedata.pkg_pn = defaultdict(list) + cachedata.pkg_pepvpr = {} + cachedata.pkg_dp = {} + + cachedata.stamp = {} + cachedata.stampclean = {} + cachedata.stamp_base = {} + cachedata.stamp_base_clean = {} + cachedata.stamp_extrainfo = {} + cachedata.file_checksums = {} + cachedata.fn_provides = {} + cachedata.pn_provides = defaultdict(list) + cachedata.all_depends = [] + + cachedata.deps = defaultdict(list) + cachedata.packages = defaultdict(list) + cachedata.providers = defaultdict(list) + cachedata.rproviders = defaultdict(list) + cachedata.packages_dynamic = defaultdict(list) + + cachedata.rundeps = defaultdict(lambda: defaultdict(list)) + cachedata.runrecs = defaultdict(lambda: defaultdict(list)) + cachedata.possible_world = [] + cachedata.universe_target = [] + cachedata.hashfn = {} + + cachedata.basetaskhash = {} + cachedata.inherits = {} + cachedata.fakerootenv = {} + cachedata.fakerootnoenv = {} + cachedata.fakerootdirs = {} + + def add_cacheData(self, cachedata, fn): + cachedata.task_deps[fn] = self.task_deps + cachedata.pkg_fn[fn] = self.pn + cachedata.pkg_pn[self.pn].append(fn) + cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr) + cachedata.pkg_dp[fn] = self.defaultpref + cachedata.stamp[fn] = self.stamp + cachedata.stampclean[fn] = self.stampclean + cachedata.stamp_base[fn] = self.stamp_base + cachedata.stamp_base_clean[fn] = self.stamp_base_clean + cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo + cachedata.file_checksums[fn] = self.file_checksums + + provides = [self.pn] + for provide in self.provides: + if provide not in provides: + provides.append(provide) + cachedata.fn_provides[fn] = provides + + for provide in provides: + cachedata.providers[provide].append(fn) + if provide not in cachedata.pn_provides[self.pn]: + cachedata.pn_provides[self.pn].append(provide) + + for dep in self.depends: + if dep not in cachedata.deps[fn]: + cachedata.deps[fn].append(dep) + if dep not in cachedata.all_depends: + cachedata.all_depends.append(dep) + + rprovides = self.rprovides + for package in self.packages: + cachedata.packages[package].append(fn) + rprovides += self.rprovides_pkg[package] + + for rprovide in rprovides: + cachedata.rproviders[rprovide].append(fn) + + for package in self.packages_dynamic: + cachedata.packages_dynamic[package].append(fn) + + # Build hash of runtime depends and recommends + for package in self.packages + [self.pn]: + cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package] + cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package] + + # Collect files we may need for possible world-dep + # calculations + if self.not_world: + logger.debug(1, "EXCLUDE FROM WORLD: %s", fn) + else: + cachedata.possible_world.append(fn) + + # create a collection of all targets for sanity checking + # tasks, such as upstream versions, license, and tools for + # task and image creation. + cachedata.universe_target.append(self.pn) + + cachedata.hashfn[fn] = self.hashfilename + for task, taskhash in self.basetaskhashes.iteritems(): + identifier = '%s.%s' % (fn, task) + cachedata.basetaskhash[identifier] = taskhash + + cachedata.inherits[fn] = self.inherits + cachedata.fakerootenv[fn] = self.fakerootenv + cachedata.fakerootnoenv[fn] = self.fakerootnoenv + cachedata.fakerootdirs[fn] = self.fakerootdirs + + + +class Cache(object): + """ + BitBake Cache implementation + """ + + def __init__(self, data, data_hash, caches_array): + # Pass caches_array information into Cache Constructor + # It will be used later for deciding whether we + # need extra cache file dump/load support + self.caches_array = caches_array + self.cachedir = data.getVar("CACHE", True) + self.clean = set() + self.checked = set() + self.depends_cache = {} + self.data = None + self.data_fn = None + self.cacheclean = True + self.data_hash = data_hash + + if self.cachedir in [None, '']: + self.has_cache = False + logger.info("Not using a cache. " + "Set CACHE = <directory> to enable.") + return + + self.has_cache = True + self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) + + logger.debug(1, "Using cache in '%s'", self.cachedir) + bb.utils.mkdirhier(self.cachedir) + + cache_ok = True + if self.caches_array: + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): + cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) + cache_ok = cache_ok and os.path.exists(cachefile) + cache_class.init_cacheData(self) + if cache_ok: + self.load_cachefile() + elif os.path.isfile(self.cachefile): + logger.info("Out of date cache found, rebuilding...") + + def load_cachefile(self): + # Firstly, using core cache file information for + # valid checking + with open(self.cachefile, "rb") as cachefile: + pickled = pickle.Unpickler(cachefile) + try: + cache_ver = pickled.load() + bitbake_ver = pickled.load() + except Exception: + logger.info('Invalid cache, rebuilding...') + return + + if cache_ver != __cache_version__: + logger.info('Cache version mismatch, rebuilding...') + return + elif bitbake_ver != bb.__version__: + logger.info('Bitbake version mismatch, rebuilding...') + return + + + cachesize = 0 + previous_progress = 0 + previous_percent = 0 + + # Calculate the correct cachesize of all those cache files + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): + cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) + with open(cachefile, "rb") as cachefile: + cachesize += os.fstat(cachefile.fileno()).st_size + + bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) + + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): + cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) + with open(cachefile, "rb") as cachefile: + pickled = pickle.Unpickler(cachefile) + while cachefile: + try: + key = pickled.load() + value = pickled.load() + except Exception: + break + if self.depends_cache.has_key(key): + self.depends_cache[key].append(value) + else: + self.depends_cache[key] = [value] + # only fire events on even percentage boundaries + current_progress = cachefile.tell() + previous_progress + current_percent = 100 * current_progress / cachesize + if current_percent > previous_percent: + previous_percent = current_percent + bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), + self.data) + + previous_progress += current_progress + + # Note: depends cache number is corresponding to the parsing file numbers. + # The same file has several caches, still regarded as one item in the cache + bb.event.fire(bb.event.CacheLoadCompleted(cachesize, + len(self.depends_cache)), + self.data) + + + @staticmethod + def virtualfn2realfn(virtualfn): + """ + Convert a virtual file name to a real one + the associated subclass keyword + """ + + fn = virtualfn + cls = "" + if virtualfn.startswith('virtual:'): + elems = virtualfn.split(':') + cls = ":".join(elems[1:-1]) + fn = elems[-1] + return (fn, cls) + + @staticmethod + def realfn2virtual(realfn, cls): + """ + Convert a real filename + the associated subclass keyword to a virtual filename + """ + if cls == "": + return realfn + return "virtual:" + cls + ":" + realfn + + @classmethod + def loadDataFull(cls, virtualfn, appends, cfgData): + """ + Return a complete set of data for fn. + To do this, we need to parse the file. + """ + + (fn, virtual) = cls.virtualfn2realfn(virtualfn) + + logger.debug(1, "Parsing %s (full)", fn) + + cfgData.setVar("__ONLYFINALISE", virtual or "default") + bb_data = cls.load_bbfile(fn, appends, cfgData) + return bb_data[virtual] + + @classmethod + def parse(cls, filename, appends, configdata, caches_array): + """Parse the specified filename, returning the recipe information""" + infos = [] + datastores = cls.load_bbfile(filename, appends, configdata) + depends = [] + for variant, data in sorted(datastores.iteritems(), + key=lambda i: i[0], + reverse=True): + virtualfn = cls.realfn2virtual(filename, variant) + depends = depends + (data.getVar("__depends", False) or []) + if depends and not variant: + data.setVar("__depends", depends) + + info_array = [] + for cache_class in caches_array: + if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): + info = cache_class(filename, data) + info_array.append(info) + infos.append((virtualfn, info_array)) + + return infos + + def load(self, filename, appends, configdata): + """Obtain the recipe information for the specified filename, + using cached values if available, otherwise parsing. + + Note that if it does parse to obtain the info, it will not + automatically add the information to the cache or to your + CacheData. Use the add or add_info method to do so after + running this, or use loadData instead.""" + cached = self.cacheValid(filename, appends) + if cached: + infos = [] + # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo] + info_array = self.depends_cache[filename] + for variant in info_array[0].variants: + virtualfn = self.realfn2virtual(filename, variant) + infos.append((virtualfn, self.depends_cache[virtualfn])) + else: + logger.debug(1, "Parsing %s", filename) + return self.parse(filename, appends, configdata, self.caches_array) + + return cached, infos + + def loadData(self, fn, appends, cfgData, cacheData): + """Load the recipe info for the specified filename, + parsing and adding to the cache if necessary, and adding + the recipe information to the supplied CacheData instance.""" + skipped, virtuals = 0, 0 + + cached, infos = self.load(fn, appends, cfgData) + for virtualfn, info_array in infos: + if info_array[0].skipped: + logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) + skipped += 1 + else: + self.add_info(virtualfn, info_array, cacheData, not cached) + virtuals += 1 + + return cached, skipped, virtuals + + def cacheValid(self, fn, appends): + """ + Is the cache valid for fn? + Fast version, no timestamps checked. + """ + if fn not in self.checked: + self.cacheValidUpdate(fn, appends) + + # Is cache enabled? + if not self.has_cache: + return False + if fn in self.clean: + return True + return False + + def cacheValidUpdate(self, fn, appends): + """ + Is the cache valid for fn? + Make thorough (slower) checks including timestamps. + """ + # Is cache enabled? + if not self.has_cache: + return False + + self.checked.add(fn) + + # File isn't in depends_cache + if not fn in self.depends_cache: + logger.debug(2, "Cache: %s is not cached", fn) + return False + + mtime = bb.parse.cached_mtime_noerror(fn) + + # Check file still exists + if mtime == 0: + logger.debug(2, "Cache: %s no longer exists", fn) + self.remove(fn) + return False + + info_array = self.depends_cache[fn] + # Check the file's timestamp + if mtime != info_array[0].timestamp: + logger.debug(2, "Cache: %s changed", fn) + self.remove(fn) + return False + + # Check dependencies are still valid + depends = info_array[0].file_depends + if depends: + for f, old_mtime in depends: + fmtime = bb.parse.cached_mtime_noerror(f) + # Check if file still exists + if old_mtime != 0 and fmtime == 0: + logger.debug(2, "Cache: %s's dependency %s was removed", + fn, f) + self.remove(fn) + return False + + if (fmtime != old_mtime): + logger.debug(2, "Cache: %s's dependency %s changed", + fn, f) + self.remove(fn) + return False + + if hasattr(info_array[0], 'file_checksums'): + for _, fl in info_array[0].file_checksums.items(): + for f in fl.split(): + if "*" in f: + continue + f, exist = f.split(":") + if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): + logger.debug(2, "Cache: %s's file checksum list file %s changed", + fn, f) + self.remove(fn) + return False + + if appends != info_array[0].appends: + logger.debug(2, "Cache: appends for %s changed", fn) + logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends))) + self.remove(fn) + return False + + invalid = False + for cls in info_array[0].variants: + virtualfn = self.realfn2virtual(fn, cls) + self.clean.add(virtualfn) + if virtualfn not in self.depends_cache: + logger.debug(2, "Cache: %s is not cached", virtualfn) + invalid = True + + # If any one of the variants is not present, mark as invalid for all + if invalid: + for cls in info_array[0].variants: + virtualfn = self.realfn2virtual(fn, cls) + if virtualfn in self.clean: + logger.debug(2, "Cache: Removing %s from cache", virtualfn) + self.clean.remove(virtualfn) + if fn in self.clean: + logger.debug(2, "Cache: Marking %s as not clean", fn) + self.clean.remove(fn) + return False + + self.clean.add(fn) + return True + + def remove(self, fn): + """ + Remove a fn from the cache + Called from the parser in error cases + """ + if fn in self.depends_cache: + logger.debug(1, "Removing %s from cache", fn) + del self.depends_cache[fn] + if fn in self.clean: + logger.debug(1, "Marking %s as unclean", fn) + self.clean.remove(fn) + + def sync(self): + """ + Save the cache + Called from the parser when complete (or exiting) + """ + + if not self.has_cache: + return + + if self.cacheclean: + logger.debug(2, "Cache is clean, not saving.") + return + + file_dict = {} + pickler_dict = {} + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): + cache_class_name = cache_class.__name__ + cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) + file_dict[cache_class_name] = open(cachefile, "wb") + pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) + + pickler_dict['CoreRecipeInfo'].dump(__cache_version__) + pickler_dict['CoreRecipeInfo'].dump(bb.__version__) + + try: + for key, info_array in self.depends_cache.iteritems(): + for info in info_array: + if isinstance(info, RecipeInfoCommon): + cache_class_name = info.__class__.__name__ + pickler_dict[cache_class_name].dump(key) + pickler_dict[cache_class_name].dump(info) + finally: + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): + cache_class_name = cache_class.__name__ + file_dict[cache_class_name].close() + + del self.depends_cache + + @staticmethod + def mtime(cachefile): + return bb.parse.cached_mtime_noerror(cachefile) + + def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None): + if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): + cacheData.add_from_recipeinfo(filename, info_array) + + if watcher: + watcher(info_array[0].file_depends) + + if not self.has_cache: + return + + if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: + if parsed: + self.cacheclean = False + self.depends_cache[filename] = info_array + + def add(self, file_name, data, cacheData, parsed=None): + """ + Save data we need into the cache + """ + + realfn = self.virtualfn2realfn(file_name)[0] + + info_array = [] + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): + info_array.append(cache_class(realfn, data)) + self.add_info(file_name, info_array, cacheData, parsed) + + @staticmethod + def load_bbfile(bbfile, appends, config): + """ + Load and parse one .bb build file + Return the data and whether parsing resulted in the file being skipped + """ + chdir_back = False + + from bb import data, parse + + # expand tmpdir to include this topdir + data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) + bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) + oldpath = os.path.abspath(os.getcwd()) + parse.cached_mtime_noerror(bbfile_loc) + bb_data = data.init_db(config) + # The ConfHandler first looks if there is a TOPDIR and if not + # then it would call getcwd(). + # Previously, we chdir()ed to bbfile_loc, called the handler + # and finally chdir()ed back, a couple of thousand times. We now + # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet. + if not data.getVar('TOPDIR', bb_data): + chdir_back = True + data.setVar('TOPDIR', bbfile_loc, bb_data) + try: + if appends: + data.setVar('__BBAPPEND', " ".join(appends), bb_data) + bb_data = parse.handle(bbfile, bb_data) + if chdir_back: + os.chdir(oldpath) + return bb_data + except: + if chdir_back: + os.chdir(oldpath) + raise + + +def init(cooker): + """ + The Objective: Cache the minimum amount of data possible yet get to the + stage of building packages (i.e. tryBuild) without reparsing any .bb files. + + To do this, we intercept getVar calls and only cache the variables we see + being accessed. We rely on the cache getVar calls being made for all + variables bitbake might need to use to reach this stage. For each cached + file we need to track: + + * Its mtime + * The mtimes of all its dependencies + * Whether it caused a parse.SkipRecipe exception + + Files causing parsing errors are evicted from the cache. + + """ + return Cache(cooker.configuration.data, cooker.configuration.data_hash) + + +class CacheData(object): + """ + The data structures we compile from the cached data + """ + + def __init__(self, caches_array): + self.caches_array = caches_array + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): + cache_class.init_cacheData(self) + + # Direct cache variables + self.task_queues = {} + self.preferred = {} + self.tasks = {} + # Indirect Cache variables (set elsewhere) + self.ignored_dependencies = [] + self.world_target = set() + self.bbfile_priority = {} + + def add_from_recipeinfo(self, fn, info_array): + for info in info_array: + info.add_cacheData(self, fn) + +class MultiProcessCache(object): + """ + BitBake multi-process cache implementation + + Used by the codeparser & file checksum caches + """ + + def __init__(self): + self.cachefile = None + self.cachedata = self.create_cachedata() + self.cachedata_extras = self.create_cachedata() + + def init_cache(self, d): + cachedir = (d.getVar("PERSISTENT_DIR", True) or + d.getVar("CACHE", True)) + if cachedir in [None, '']: + return + bb.utils.mkdirhier(cachedir) + self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name) + logger.debug(1, "Using cache in '%s'", self.cachefile) + + glf = bb.utils.lockfile(self.cachefile + ".lock") + + try: + with open(self.cachefile, "rb") as f: + p = pickle.Unpickler(f) + data, version = p.load() + except: + bb.utils.unlockfile(glf) + return + + bb.utils.unlockfile(glf) + + if version != self.__class__.CACHE_VERSION: + return + + self.cachedata = data + + def create_cachedata(self): + data = [{}] + return data + + def save_extras(self, d): + if not self.cachefile: + return + + glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) + + i = os.getpid() + lf = None + while not lf: + lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False) + if not lf or os.path.exists(self.cachefile + "-" + str(i)): + if lf: + bb.utils.unlockfile(lf) + lf = None + i = i + 1 + continue + + with open(self.cachefile + "-" + str(i), "wb") as f: + p = pickle.Pickler(f, -1) + p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION]) + + bb.utils.unlockfile(lf) + bb.utils.unlockfile(glf) + + def merge_data(self, source, dest): + for j in range(0,len(dest)): + for h in source[j]: + if h not in dest[j]: + dest[j][h] = source[j][h] + + def save_merge(self, d): + if not self.cachefile: + return + + glf = bb.utils.lockfile(self.cachefile + ".lock") + + data = self.cachedata + + for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: + f = os.path.join(os.path.dirname(self.cachefile), f) + try: + with open(f, "rb") as fd: + p = pickle.Unpickler(fd) + extradata, version = p.load() + except (IOError, EOFError): + os.unlink(f) + continue + + if version != self.__class__.CACHE_VERSION: + os.unlink(f) + continue + + self.merge_data(extradata, data) + os.unlink(f) + + with open(self.cachefile, "wb") as f: + p = pickle.Pickler(f, -1) + p.dump([data, self.__class__.CACHE_VERSION]) + + bb.utils.unlockfile(glf) + diff --git a/bitbake/lib/bb/cache_extra.py b/bitbake/lib/bb/cache_extra.py new file mode 100644 index 0000000..83f4959 --- /dev/null +++ b/bitbake/lib/bb/cache_extra.py @@ -0,0 +1,75 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Extra RecipeInfo will be all defined in this file. Currently, +# Only Hob (Image Creator) Requests some extra fields. So +# HobRecipeInfo is defined. It's named HobRecipeInfo because it +# is introduced by 'hob'. Users could also introduce other +# RecipeInfo or simply use those already defined RecipeInfo. +# In the following patch, this newly defined new extra RecipeInfo +# will be dynamically loaded and used for loading/saving the extra +# cache fields + +# Copyright (C) 2011, Intel Corporation. All rights reserved. + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +from bb.cache import RecipeInfoCommon + +class HobRecipeInfo(RecipeInfoCommon): + __slots__ = () + + classname = "HobRecipeInfo" + # please override this member with the correct data cache file + # such as (bb_cache.dat, bb_extracache_hob.dat) + cachefile = "bb_extracache_" + classname +".dat" + + # override this member with the list of extra cache fields + # that this class will provide + cachefields = ['summary', 'license', 'section', + 'description', 'homepage', 'bugtracker', + 'prevision', 'files_info'] + + def __init__(self, filename, metadata): + + self.summary = self.getvar('SUMMARY', metadata) + self.license = self.getvar('LICENSE', metadata) + self.section = self.getvar('SECTION', metadata) + self.description = self.getvar('DESCRIPTION', metadata) + self.homepage = self.getvar('HOMEPAGE', metadata) + self.bugtracker = self.getvar('BUGTRACKER', metadata) + self.prevision = self.getvar('PR', metadata) + self.files_info = self.getvar('FILES_INFO', metadata) + + @classmethod + def init_cacheData(cls, cachedata): + # CacheData in Hob RecipeInfo Class + cachedata.summary = {} + cachedata.license = {} + cachedata.section = {} + cachedata.description = {} + cachedata.homepage = {} + cachedata.bugtracker = {} + cachedata.prevision = {} + cachedata.files_info = {} + + def add_cacheData(self, cachedata, fn): + cachedata.summary[fn] = self.summary + cachedata.license[fn] = self.license + cachedata.section[fn] = self.section + cachedata.description[fn] = self.description + cachedata.homepage[fn] = self.homepage + cachedata.bugtracker[fn] = self.bugtracker + cachedata.prevision[fn] = self.prevision + cachedata.files_info[fn] = self.files_info diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py new file mode 100644 index 0000000..514ff0b --- /dev/null +++ b/bitbake/lib/bb/checksum.py @@ -0,0 +1,90 @@ +# Local file checksum cache implementation +# +# Copyright (C) 2012 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import stat +import bb.utils +import logging +from bb.cache import MultiProcessCache + +logger = logging.getLogger("BitBake.Cache") + +try: + import cPickle as pickle +except ImportError: + import pickle + logger.info("Importing cPickle failed. " + "Falling back to a very slow implementation.") + + +# mtime cache (non-persistent) +# based upon the assumption that files do not change during bitbake run +class FileMtimeCache(object): + cache = {} + + def cached_mtime(self, f): + if f not in self.cache: + self.cache[f] = os.stat(f)[stat.ST_MTIME] + return self.cache[f] + + def cached_mtime_noerror(self, f): + if f not in self.cache: + try: + self.cache[f] = os.stat(f)[stat.ST_MTIME] + except OSError: + return 0 + return self.cache[f] + + def update_mtime(self, f): + self.cache[f] = os.stat(f)[stat.ST_MTIME] + return self.cache[f] + + def clear(self): + self.cache.clear() + +# Checksum + mtime cache (persistent) +class FileChecksumCache(MultiProcessCache): + cache_file_name = "local_file_checksum_cache.dat" + CACHE_VERSION = 1 + + def __init__(self): + self.mtime_cache = FileMtimeCache() + MultiProcessCache.__init__(self) + + def get_checksum(self, f): + entry = self.cachedata[0].get(f) + cmtime = self.mtime_cache.cached_mtime(f) + if entry: + (mtime, hashval) = entry + if cmtime == mtime: + return hashval + else: + bb.debug(2, "file %s changed mtime, recompute checksum" % f) + + hashval = bb.utils.md5_file(f) + self.cachedata_extras[0][f] = (cmtime, hashval) + return hashval + + def merge_data(self, source, dest): + for h in source[0]: + if h in dest: + (smtime, _) = source[0][h] + (dmtime, _) = dest[0][h] + if smtime > dmtime: + dest[0][h] = source[0][h] + else: + dest[0][h] = source[0][h] diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py new file mode 100644 index 0000000..8b8f91a --- /dev/null +++ b/bitbake/lib/bb/codeparser.py @@ -0,0 +1,406 @@ +import ast +import codegen +import logging +import os.path +import bb.utils, bb.data +from itertools import chain +from pysh import pyshyacc, pyshlex, sherrors +from bb.cache import MultiProcessCache + + +logger = logging.getLogger('BitBake.CodeParser') + +try: + import cPickle as pickle +except ImportError: + import pickle + logger.info('Importing cPickle failed. Falling back to a very slow implementation.') + + +def check_indent(codestr): + """If the code is indented, add a top level piece of code to 'remove' the indentation""" + + i = 0 + while codestr[i] in ["\n", "\t", " "]: + i = i + 1 + + if i == 0: + return codestr + + if codestr[i-1] == "\t" or codestr[i-1] == " ": + return "if 1:\n" + codestr + + return codestr + + +# Basically pickle, in python 2.7.3 at least, does badly with data duplication +# upon pickling and unpickling. Combine this with duplicate objects and things +# are a mess. +# +# When the sets are originally created, python calls intern() on the set keys +# which significantly improves memory usage. Sadly the pickle/unpickle process +# doesn't call intern() on the keys and results in the same strings being duplicated +# in memory. This also means pickle will save the same string multiple times in +# the cache file. +# +# By having shell and python cacheline objects with setstate/getstate, we force +# the object creation through our own routine where we can call intern (via internSet). +# +# We also use hashable frozensets and ensure we use references to these so that +# duplicates can be removed, both in memory and in the resulting pickled data. +# +# By playing these games, the size of the cache file shrinks dramatically +# meaning faster load times and the reloaded cache files also consume much less +# memory. Smaller cache files, faster load times and lower memory usage is good. +# +# A custom getstate/setstate using tuples is actually worth 15% cachesize by +# avoiding duplication of the attribute names! + +class SetCache(object): + def __init__(self): + self.setcache = {} + + def internSet(self, items): + + new = [] + for i in items: + new.append(intern(i)) + s = frozenset(new) + if hash(s) in self.setcache: + return self.setcache[hash(s)] + self.setcache[hash(s)] = s + return s + +codecache = SetCache() + +class pythonCacheLine(object): + def __init__(self, refs, execs, contains): + self.refs = codecache.internSet(refs) + self.execs = codecache.internSet(execs) + self.contains = {} + for c in contains: + self.contains[c] = codecache.internSet(contains[c]) + + def __getstate__(self): + return (self.refs, self.execs, self.contains) + + def __setstate__(self, state): + (refs, execs, contains) = state + self.__init__(refs, execs, contains) + def __hash__(self): + l = (hash(self.refs), hash(self.execs)) + for c in sorted(self.contains.keys()): + l = l + (c, hash(self.contains[c])) + return hash(l) + +class shellCacheLine(object): + def __init__(self, execs): + self.execs = codecache.internSet(execs) + + def __getstate__(self): + return (self.execs) + + def __setstate__(self, state): + (execs) = state + self.__init__(execs) + def __hash__(self): + return hash(self.execs) + +class CodeParserCache(MultiProcessCache): + cache_file_name = "bb_codeparser.dat" + CACHE_VERSION = 7 + + def __init__(self): + MultiProcessCache.__init__(self) + self.pythoncache = self.cachedata[0] + self.shellcache = self.cachedata[1] + self.pythoncacheextras = self.cachedata_extras[0] + self.shellcacheextras = self.cachedata_extras[1] + + # To avoid duplication in the codeparser cache, keep + # a lookup of hashes of objects we already have + self.pythoncachelines = {} + self.shellcachelines = {} + + def newPythonCacheLine(self, refs, execs, contains): + cacheline = pythonCacheLine(refs, execs, contains) + h = hash(cacheline) + if h in self.pythoncachelines: + return self.pythoncachelines[h] + self.pythoncachelines[h] = cacheline + return cacheline + + def newShellCacheLine(self, execs): + cacheline = shellCacheLine(execs) + h = hash(cacheline) + if h in self.shellcachelines: + return self.shellcachelines[h] + self.shellcachelines[h] = cacheline + return cacheline + + def init_cache(self, d): + MultiProcessCache.init_cache(self, d) + + # cachedata gets re-assigned in the parent + self.pythoncache = self.cachedata[0] + self.shellcache = self.cachedata[1] + + def create_cachedata(self): + data = [{}, {}] + return data + +codeparsercache = CodeParserCache() + +def parser_cache_init(d): + codeparsercache.init_cache(d) + +def parser_cache_save(d): + codeparsercache.save_extras(d) + +def parser_cache_savemerge(d): + codeparsercache.save_merge(d) + +Logger = logging.getLoggerClass() +class BufferedLogger(Logger): + def __init__(self, name, level=0, target=None): + Logger.__init__(self, name) + self.setLevel(level) + self.buffer = [] + self.target = target + + def handle(self, record): + self.buffer.append(record) + + def flush(self): + for record in self.buffer: + self.target.handle(record) + self.buffer = [] + +class PythonParser(): + getvars = (".getVar", ".appendVar", ".prependVar") + containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains", "bb.utils.contains_any") + execfuncs = ("bb.build.exec_func", "bb.build.exec_task") + + def warn(self, func, arg): + """Warn about calls of bitbake APIs which pass a non-literal + argument for the variable name, as we're not able to track such + a reference. + """ + + try: + funcstr = codegen.to_source(func) + argstr = codegen.to_source(arg) + except TypeError: + self.log.debug(2, 'Failed to convert function and argument to source form') + else: + self.log.debug(1, self.unhandled_message % (funcstr, argstr)) + + def visit_Call(self, node): + name = self.called_node_name(node.func) + if name and name.endswith(self.getvars) or name in self.containsfuncs: + if isinstance(node.args[0], ast.Str): + varname = node.args[0].s + if name in self.containsfuncs and isinstance(node.args[1], ast.Str): + if varname not in self.contains: + self.contains[varname] = set() + self.contains[varname].add(node.args[1].s) + else: + self.references.add(node.args[0].s) + else: + self.warn(node.func, node.args[0]) + elif name in self.execfuncs: + if isinstance(node.args[0], ast.Str): + self.var_execs.add(node.args[0].s) + else: + self.warn(node.func, node.args[0]) + elif name and isinstance(node.func, (ast.Name, ast.Attribute)): + self.execs.add(name) + + def called_node_name(self, node): + """Given a called node, return its original string form""" + components = [] + while node: + if isinstance(node, ast.Attribute): + components.append(node.attr) + node = node.value + elif isinstance(node, ast.Name): + components.append(node.id) + return '.'.join(reversed(components)) + else: + break + + def __init__(self, name, log): + self.var_execs = set() + self.contains = {} + self.execs = set() + self.references = set() + self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log) + + self.unhandled_message = "in call of %s, argument '%s' is not a string literal" + self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message) + + def parse_python(self, node): + h = hash(str(node)) + + if h in codeparsercache.pythoncache: + self.references = set(codeparsercache.pythoncache[h].refs) + self.execs = set(codeparsercache.pythoncache[h].execs) + self.contains = {} + for i in codeparsercache.pythoncache[h].contains: + self.contains[i] = set(codeparsercache.pythoncache[h].contains[i]) + return + + if h in codeparsercache.pythoncacheextras: + self.references = set(codeparsercache.pythoncacheextras[h].refs) + self.execs = set(codeparsercache.pythoncacheextras[h].execs) + self.contains = {} + for i in codeparsercache.pythoncacheextras[h].contains: + self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) + return + + code = compile(check_indent(str(node)), "<string>", "exec", + ast.PyCF_ONLY_AST) + + for n in ast.walk(code): + if n.__class__.__name__ == "Call": + self.visit_Call(n) + + self.execs.update(self.var_execs) + + codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains) + +class ShellParser(): + def __init__(self, name, log): + self.funcdefs = set() + self.allexecs = set() + self.execs = set() + self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log) + self.unhandled_template = "unable to handle non-literal command '%s'" + self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template) + + def parse_shell(self, value): + """Parse the supplied shell code in a string, returning the external + commands it executes. + """ + + h = hash(str(value)) + + if h in codeparsercache.shellcache: + self.execs = set(codeparsercache.shellcache[h].execs) + return self.execs + + if h in codeparsercache.shellcacheextras: + self.execs = set(codeparsercache.shellcacheextras[h].execs) + return self.execs + + self._parse_shell(value) + self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs) + + codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs) + + return self.execs + + def _parse_shell(self, value): + try: + tokens, _ = pyshyacc.parse(value, eof=True, debug=False) + except pyshlex.NeedMore: + raise sherrors.ShellSyntaxError("Unexpected EOF") + + for token in tokens: + self.process_tokens(token) + + def process_tokens(self, tokens): + """Process a supplied portion of the syntax tree as returned by + pyshyacc.parse. + """ + + def function_definition(value): + self.funcdefs.add(value.name) + return [value.body], None + + def case_clause(value): + # Element 0 of each item in the case is the list of patterns, and + # Element 1 of each item in the case is the list of commands to be + # executed when that pattern matches. + words = chain(*[item[0] for item in value.items]) + cmds = chain(*[item[1] for item in value.items]) + return cmds, words + + def if_clause(value): + main = chain(value.cond, value.if_cmds) + rest = value.else_cmds + if isinstance(rest, tuple) and rest[0] == "elif": + return chain(main, if_clause(rest[1])) + else: + return chain(main, rest) + + def simple_command(value): + return None, chain(value.words, (assign[1] for assign in value.assigns)) + + token_handlers = { + "and_or": lambda x: ((x.left, x.right), None), + "async": lambda x: ([x], None), + "brace_group": lambda x: (x.cmds, None), + "for_clause": lambda x: (x.cmds, x.items), + "function_definition": function_definition, + "if_clause": lambda x: (if_clause(x), None), + "pipeline": lambda x: (x.commands, None), + "redirect_list": lambda x: ([x.cmd], None), + "subshell": lambda x: (x.cmds, None), + "while_clause": lambda x: (chain(x.condition, x.cmds), None), + "until_clause": lambda x: (chain(x.condition, x.cmds), None), + "simple_command": simple_command, + "case_clause": case_clause, + } + + for token in tokens: + name, value = token + try: + more_tokens, words = token_handlers[name](value) + except KeyError: + raise NotImplementedError("Unsupported token type " + name) + + if more_tokens: + self.process_tokens(more_tokens) + + if words: + self.process_words(words) + + def process_words(self, words): + """Process a set of 'words' in pyshyacc parlance, which includes + extraction of executed commands from $() blocks, as well as grabbing + the command name argument. + """ + + words = list(words) + for word in list(words): + wtree = pyshlex.make_wordtree(word[1]) + for part in wtree: + if not isinstance(part, list): + continue + + if part[0] in ('`', '$('): + command = pyshlex.wordtree_as_string(part[1:-1]) + self._parse_shell(command) + + if word[0] in ("cmd_name", "cmd_word"): + if word in words: + words.remove(word) + + usetoken = False + for word in words: + if word[0] in ("cmd_name", "cmd_word") or \ + (usetoken and word[0] == "TOKEN"): + if "=" in word[1]: + usetoken = True + continue + + cmd = word[1] + if cmd.startswith("$"): + self.log.debug(1, self.unhandled_template % cmd) + elif cmd == "eval": + command = " ".join(word for _, word in words[1:]) + self._parse_shell(command) + else: + self.allexecs.add(cmd) + break diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py new file mode 100644 index 0000000..60f9ac0 --- /dev/null +++ b/bitbake/lib/bb/command.py @@ -0,0 +1,451 @@ +""" +BitBake 'Command' module + +Provide an interface to interact with the bitbake server through 'commands' +""" + +# Copyright (C) 2006-2007 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +The bitbake server takes 'commands' from its UI/commandline. +Commands are either synchronous or asynchronous. +Async commands return data to the client in the form of events. +Sync commands must only return data through the function return value +and must not trigger events, directly or indirectly. +Commands are queued in a CommandQueue +""" + +import bb.event +import bb.cooker + +class CommandCompleted(bb.event.Event): + pass + +class CommandExit(bb.event.Event): + def __init__(self, exitcode): + bb.event.Event.__init__(self) + self.exitcode = int(exitcode) + +class CommandFailed(CommandExit): + def __init__(self, message): + self.error = message + CommandExit.__init__(self, 1) + +class CommandError(Exception): + pass + +class Command: + """ + A queue of asynchronous commands for bitbake + """ + def __init__(self, cooker): + self.cooker = cooker + self.cmds_sync = CommandsSync() + self.cmds_async = CommandsAsync() + + # FIXME Add lock for this + self.currentAsyncCommand = None + + def runCommand(self, commandline, ro_only = False): + command = commandline.pop(0) + if hasattr(CommandsSync, command): + # Can run synchronous commands straight away + command_method = getattr(self.cmds_sync, command) + if ro_only: + if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'): + return None, "Not able to execute not readonly commands in readonly mode" + try: + result = command_method(self, commandline) + except CommandError as exc: + return None, exc.args[0] + except Exception: + import traceback + return None, traceback.format_exc() + else: + return result, None + if self.currentAsyncCommand is not None: + return None, "Busy (%s in progress)" % self.currentAsyncCommand[0] + if command not in CommandsAsync.__dict__: + return None, "No such command" + self.currentAsyncCommand = (command, commandline) + self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker) + return True, None + + def runAsyncCommand(self): + try: + if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown): + # updateCache will trigger a shutdown of the parser + # and then raise BBHandledException triggering an exit + self.cooker.updateCache() + return False + if self.currentAsyncCommand is not None: + (command, options) = self.currentAsyncCommand + commandmethod = getattr(CommandsAsync, command) + needcache = getattr( commandmethod, "needcache" ) + if needcache and self.cooker.state != bb.cooker.state.running: + self.cooker.updateCache() + return True + else: + commandmethod(self.cmds_async, self, options) + return False + else: + return False + except KeyboardInterrupt as exc: + self.finishAsyncCommand("Interrupted") + return False + except SystemExit as exc: + arg = exc.args[0] + if isinstance(arg, basestring): + self.finishAsyncCommand(arg) + else: + self.finishAsyncCommand("Exited with %s" % arg) + return False + except Exception as exc: + import traceback + if isinstance(exc, bb.BBHandledException): + self.finishAsyncCommand("") + else: + self.finishAsyncCommand(traceback.format_exc()) + return False + + def finishAsyncCommand(self, msg=None, code=None): + if msg or msg == "": + bb.event.fire(CommandFailed(msg), self.cooker.event_data) + elif code: + bb.event.fire(CommandExit(code), self.cooker.event_data) + else: + bb.event.fire(CommandCompleted(), self.cooker.event_data) + self.currentAsyncCommand = None + self.cooker.finishcommand() + +class CommandsSync: + """ + A class of synchronous commands + These should run quickly so as not to hurt interactive performance. + These must not influence any running synchronous command. + """ + + def stateShutdown(self, command, params): + """ + Trigger cooker 'shutdown' mode + """ + command.cooker.shutdown(False) + + def stateForceShutdown(self, command, params): + """ + Stop the cooker + """ + command.cooker.shutdown(True) + + def getAllKeysWithFlags(self, command, params): + """ + Returns a dump of the global state. Call with + variable flags to be retrieved as params. + """ + flaglist = params[0] + return command.cooker.getAllKeysWithFlags(flaglist) + getAllKeysWithFlags.readonly = True + + def getVariable(self, command, params): + """ + Read the value of a variable from data + """ + varname = params[0] + expand = True + if len(params) > 1: + expand = (params[1] == "True") + + return command.cooker.data.getVar(varname, expand) + getVariable.readonly = True + + def setVariable(self, command, params): + """ + Set the value of variable in data + """ + varname = params[0] + value = str(params[1]) + command.cooker.data.setVar(varname, value) + + def setConfig(self, command, params): + """ + Set the value of variable in configuration + """ + varname = params[0] + value = str(params[1]) + setattr(command.cooker.configuration, varname, value) + + def enableDataTracking(self, command, params): + """ + Enable history tracking for variables + """ + command.cooker.enableDataTracking() + + def disableDataTracking(self, command, params): + """ + Disable history tracking for variables + """ + command.cooker.disableDataTracking() + + def setPrePostConfFiles(self, command, params): + prefiles = params[0].split() + postfiles = params[1].split() + command.cooker.configuration.prefile = prefiles + command.cooker.configuration.postfile = postfiles + + def getCpuCount(self, command, params): + """ + Get the CPU count on the bitbake server + """ + return bb.utils.cpu_count() + getCpuCount.readonly = True + + def matchFile(self, command, params): + fMatch = params[0] + return command.cooker.matchFile(fMatch) + + def generateNewImage(self, command, params): + image = params[0] + base_image = params[1] + package_queue = params[2] + timestamp = params[3] + description = params[4] + return command.cooker.generateNewImage(image, base_image, + package_queue, timestamp, description) + + def ensureDir(self, command, params): + directory = params[0] + bb.utils.mkdirhier(directory) + + def setVarFile(self, command, params): + """ + Save a variable in a file; used for saving in a configuration file + """ + var = params[0] + val = params[1] + default_file = params[2] + op = params[3] + command.cooker.modifyConfigurationVar(var, val, default_file, op) + + def removeVarFile(self, command, params): + """ + Remove a variable declaration from a file + """ + var = params[0] + command.cooker.removeConfigurationVar(var) + + def createConfigFile(self, command, params): + """ + Create an extra configuration file + """ + name = params[0] + command.cooker.createConfigFile(name) + + def setEventMask(self, command, params): + handlerNum = params[0] + llevel = params[1] + debug_domains = params[2] + mask = params[3] + return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask) + + def setFeatures(self, command, params): + """ + Set the cooker features to include the passed list of features + """ + features = params[0] + command.cooker.setFeatures(features) + + # although we change the internal state of the cooker, this is transparent since + # we always take and leave the cooker in state.initial + setFeatures.readonly = True + + def updateConfig(self, command, params): + options = params[0] + command.cooker.updateConfigOpts(options) + +class CommandsAsync: + """ + A class of asynchronous commands + These functions communicate via generated events. + Any function that requires metadata parsing should be here. + """ + + def buildFile(self, command, params): + """ + Build a single specified .bb file + """ + bfile = params[0] + task = params[1] + + command.cooker.buildFile(bfile, task) + buildFile.needcache = False + + def buildTargets(self, command, params): + """ + Build a set of targets + """ + pkgs_to_build = params[0] + task = params[1] + + command.cooker.buildTargets(pkgs_to_build, task) + buildTargets.needcache = True + + def generateDepTreeEvent(self, command, params): + """ + Generate an event containing the dependency information + """ + pkgs_to_build = params[0] + task = params[1] + + command.cooker.generateDepTreeEvent(pkgs_to_build, task) + command.finishAsyncCommand() + generateDepTreeEvent.needcache = True + + def generateDotGraph(self, command, params): + """ + Dump dependency information to disk as .dot files + """ + pkgs_to_build = params[0] + task = params[1] + + command.cooker.generateDotGraphFiles(pkgs_to_build, task) + command.finishAsyncCommand() + generateDotGraph.needcache = True + + def generateTargetsTree(self, command, params): + """ + Generate a tree of buildable targets. + If klass is provided ensure all recipes that inherit the class are + included in the package list. + If pkg_list provided use that list (plus any extras brought in by + klass) rather than generating a tree for all packages. + """ + klass = params[0] + pkg_list = params[1] + + command.cooker.generateTargetsTree(klass, pkg_list) + command.finishAsyncCommand() + generateTargetsTree.needcache = True + + def findCoreBaseFiles(self, command, params): + """ + Find certain files in COREBASE directory. i.e. Layers + """ + subdir = params[0] + filename = params[1] + + command.cooker.findCoreBaseFiles(subdir, filename) + command.finishAsyncCommand() + findCoreBaseFiles.needcache = False + + def findConfigFiles(self, command, params): + """ + Find config files which provide appropriate values + for the passed configuration variable. i.e. MACHINE + """ + varname = params[0] + + command.cooker.findConfigFiles(varname) + command.finishAsyncCommand() + findConfigFiles.needcache = False + + def findFilesMatchingInDir(self, command, params): + """ + Find implementation files matching the specified pattern + in the requested subdirectory of a BBPATH + """ + pattern = params[0] + directory = params[1] + + command.cooker.findFilesMatchingInDir(pattern, directory) + command.finishAsyncCommand() + findFilesMatchingInDir.needcache = False + + def findConfigFilePath(self, command, params): + """ + Find the path of the requested configuration file + """ + configfile = params[0] + + command.cooker.findConfigFilePath(configfile) + command.finishAsyncCommand() + findConfigFilePath.needcache = False + + def showVersions(self, command, params): + """ + Show the currently selected versions + """ + command.cooker.showVersions() + command.finishAsyncCommand() + showVersions.needcache = True + + def showEnvironmentTarget(self, command, params): + """ + Print the environment of a target recipe + (needs the cache to work out which recipe to use) + """ + pkg = params[0] + + command.cooker.showEnvironment(None, pkg) + command.finishAsyncCommand() + showEnvironmentTarget.needcache = True + + def showEnvironment(self, command, params): + """ + Print the standard environment + or if specified the environment for a specified recipe + """ + bfile = params[0] + + command.cooker.showEnvironment(bfile) + command.finishAsyncCommand() + showEnvironment.needcache = False + + def parseFiles(self, command, params): + """ + Parse the .bb files + """ + command.cooker.updateCache() + command.finishAsyncCommand() + parseFiles.needcache = True + + def compareRevisions(self, command, params): + """ + Parse the .bb files + """ + if bb.fetch.fetcher_compare_revisions(command.cooker.data): + command.finishAsyncCommand(code=1) + else: + command.finishAsyncCommand() + compareRevisions.needcache = True + + def triggerEvent(self, command, params): + """ + Trigger a certain event + """ + event = params[0] + bb.event.fire(eval(event), command.cooker.data) + command.currentAsyncCommand = None + triggerEvent.needcache = False + + def resetCooker(self, command, params): + """ + Reset the cooker to its initial state, thus forcing a reparse for + any async command that has the needcache property set to True + """ + command.cooker.reset() + command.finishAsyncCommand() + resetCooker.needcache = False + diff --git a/bitbake/lib/bb/compat.py b/bitbake/lib/bb/compat.py new file mode 100644 index 0000000..de1923d --- /dev/null +++ b/bitbake/lib/bb/compat.py @@ -0,0 +1,6 @@ +"""Code pulled from future python versions, here for compatibility""" + +from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict +from functools import total_ordering + + diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py new file mode 100644 index 0000000..879d2ba --- /dev/null +++ b/bitbake/lib/bb/cooker.py @@ -0,0 +1,2025 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2003, 2004 Phil Blundell +# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer +# Copyright (C) 2005 Holger Hans Peter Freyther +# Copyright (C) 2005 ROAD GmbH +# Copyright (C) 2006 - 2007 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +from __future__ import print_function +import sys, os, glob, os.path, re, time +import atexit +import itertools +import logging +import multiprocessing +import sre_constants +import threading +from cStringIO import StringIO +from contextlib import closing +from functools import wraps +from collections import defaultdict +import bb, bb.exceptions, bb.command +from bb import utils, data, parse, event, cache, providers, taskdata, runqueue +import Queue +import signal +import prserv.serv +import pyinotify + +logger = logging.getLogger("BitBake") +collectlog = logging.getLogger("BitBake.Collection") +buildlog = logging.getLogger("BitBake.Build") +parselog = logging.getLogger("BitBake.Parsing") +providerlog = logging.getLogger("BitBake.Provider") + +class NoSpecificMatch(bb.BBHandledException): + """ + Exception raised when no or multiple file matches are found + """ + +class NothingToBuild(Exception): + """ + Exception raised when there is nothing to build + """ + +class CollectionError(bb.BBHandledException): + """ + Exception raised when layer configuration is incorrect + """ + +class state: + initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7) + + +class SkippedPackage: + def __init__(self, info = None, reason = None): + self.pn = None + self.skipreason = None + self.provides = None + self.rprovides = None + + if info: + self.pn = info.pn + self.skipreason = info.skipreason + self.provides = info.provides + self.rprovides = info.rprovides + elif reason: + self.skipreason = reason + + +class CookerFeatures(object): + _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4) + + def __init__(self): + self._features=set() + + def setFeature(self, f): + # validate we got a request for a feature we support + if f not in CookerFeatures._feature_list: + return + self._features.add(f) + + def __contains__(self, f): + return f in self._features + + def __iter__(self): + return self._features.__iter__() + + def next(self): + return self._features.next() + + +#============================================================================# +# BBCooker +#============================================================================# +class BBCooker: + """ + Manages one bitbake build run + """ + + def __init__(self, configuration, featureSet = []): + self.recipecache = None + self.skiplist = {} + self.featureset = CookerFeatures() + for f in featureSet: + self.featureset.setFeature(f) + + self.configuration = configuration + + self.configwatcher = pyinotify.WatchManager() + self.configwatcher.bbseen = [] + self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications) + self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \ + pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \ + pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO + self.watcher = pyinotify.WatchManager() + self.watcher.bbseen = [] + self.notifier = pyinotify.Notifier(self.watcher, self.notifications) + + + self.initConfigurationData() + + self.inotify_modified_files = [] + + def _process_inotify_updates(server, notifier_list, abort): + for n in notifier_list: + if n.check_events(timeout=0): + # read notified events and enqeue them + n.read_events() + n.process_events() + return 1.0 + + self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier]) + + self.baseconfig_valid = True + self.parsecache_valid = False + + # Take a lock so only one copy of bitbake can run against a given build + # directory at a time + lockfile = self.data.expand("${TOPDIR}/bitbake.lock") + self.lock = bb.utils.lockfile(lockfile, False, False) + if not self.lock: + bb.fatal("Only one copy of bitbake should be run against a build directory") + try: + self.lock.seek(0) + self.lock.truncate() + if len(configuration.interface) >= 2: + self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1])); + self.lock.flush() + except: + pass + + # TOSTOP must not be set or our children will hang when they output + fd = sys.stdout.fileno() + if os.isatty(fd): + import termios + tcattr = termios.tcgetattr(fd) + if tcattr[3] & termios.TOSTOP: + buildlog.info("The terminal had the TOSTOP bit set, clearing...") + tcattr[3] = tcattr[3] & ~termios.TOSTOP + termios.tcsetattr(fd, termios.TCSANOW, tcattr) + + self.command = bb.command.Command(self) + self.state = state.initial + + self.parser = None + + signal.signal(signal.SIGTERM, self.sigterm_exception) + # Let SIGHUP exit as SIGTERM + signal.signal(signal.SIGHUP, self.sigterm_exception) + + def config_notifications(self, event): + if not event.path in self.inotify_modified_files: + self.inotify_modified_files.append(event.path) + self.baseconfig_valid = False + + def notifications(self, event): + if not event.path in self.inotify_modified_files: + self.inotify_modified_files.append(event.path) + self.parsecache_valid = False + + def add_filewatch(self, deps, watcher=None): + if not watcher: + watcher = self.watcher + for i in deps: + f = i[0] + if f in watcher.bbseen: + continue + watcher.bbseen.append(f) + while True: + # We try and add watches for files that don't exist but if they did, would influence + # the parser. The parent directory of these files may not exist, in which case we need + # to watch any parent that does exist for changes. + try: + watcher.add_watch(f, self.watchmask, quiet=False) + break + except pyinotify.WatchManagerError as e: + if 'ENOENT' in str(e): + f = os.path.dirname(f) + watcher.bbseen.append(f) + continue + raise + + def sigterm_exception(self, signum, stackframe): + if signum == signal.SIGTERM: + bb.warn("Cooker recieved SIGTERM, shutting down...") + elif signum == signal.SIGHUP: + bb.warn("Cooker recieved SIGHUP, shutting down...") + self.state = state.forceshutdown + + def setFeatures(self, features): + # we only accept a new feature set if we're in state initial, so we can reset without problems + if self.state != state.initial: + raise Exception("Illegal state for feature set change") + original_featureset = list(self.featureset) + for feature in features: + self.featureset.setFeature(feature) + bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) + if (original_featureset != list(self.featureset)): + self.reset() + + def initConfigurationData(self): + + self.state = state.initial + self.caches_array = [] + + if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: + self.enableDataTracking() + + all_extra_cache_names = [] + # We hardcode all known cache types in a single place, here. + if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: + all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") + + caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names + + # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! + # This is the entry point, no further check needed! + for var in caches_name_array: + try: + module_name, cache_name = var.split(':') + module = __import__(module_name, fromlist=(cache_name,)) + self.caches_array.append(getattr(module, cache_name)) + except ImportError as exc: + logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc)) + sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name) + + self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) + self.databuilder.parseBaseConfiguration() + self.data = self.databuilder.data + self.data_hash = self.databuilder.data_hash + + + # we log all events to a file if so directed + if self.configuration.writeeventlog: + import json, pickle + DEFAULT_EVENTFILE = self.configuration.writeeventlog + class EventLogWriteHandler(): + + class EventWriter(): + def __init__(self, cooker): + self.file_inited = None + self.cooker = cooker + self.event_queue = [] + + def init_file(self): + try: + # delete the old log + os.remove(DEFAULT_EVENTFILE) + except: + pass + + # write current configuration data + with open(DEFAULT_EVENTFILE, "w") as f: + f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])})) + + def write_event(self, event): + with open(DEFAULT_EVENTFILE, "a") as f: + try: + f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) })) + except Exception as e: + import traceback + print(e, traceback.format_exc(e)) + + + def send(self, event): + event_class = event.__module__ + "." + event.__class__.__name__ + + # init on bb.event.BuildStarted + if self.file_inited is None: + if event_class == "bb.event.BuildStarted": + self.init_file() + self.file_inited = True + + # write pending events + for e in self.event_queue: + self.write_event(e) + + # also write the current event + self.write_event(event) + + else: + # queue all events until the file is inited + self.event_queue.append(event) + + else: + # we have the file, just write the event + self.write_event(event) + + # set our handler's event processor + event = EventWriter(self) # self is the cooker here + + + # set up cooker features for this mock UI handler + + # we need to write the dependency tree in the log + self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE) + # register the log file writer as UI Handler + bb.event.register_UIHhandler(EventLogWriteHandler()) + + + # + # Special updated configuration we use for firing events + # + self.event_data = bb.data.createCopy(self.data) + bb.data.update_data(self.event_data) + bb.parse.init_parser(self.event_data) + + if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: + self.disableDataTracking() + + def enableDataTracking(self): + self.configuration.tracking = True + if hasattr(self, "data"): + self.data.enableTracking() + + def disableDataTracking(self): + self.configuration.tracking = False + if hasattr(self, "data"): + self.data.disableTracking() + + def modifyConfigurationVar(self, var, val, default_file, op): + if op == "append": + self.appendConfigurationVar(var, val, default_file) + elif op == "set": + self.saveConfigurationVar(var, val, default_file, "=") + elif op == "earlyAssign": + self.saveConfigurationVar(var, val, default_file, "?=") + + + def appendConfigurationVar(self, var, val, default_file): + #add append var operation to the end of default_file + default_file = bb.cookerdata.findConfigFile(default_file, self.data) + + total = "#added by hob" + total += "\n%s += \"%s\"\n" % (var, val) + + with open(default_file, 'a') as f: + f.write(total) + + #add to history + loginfo = {"op":append, "file":default_file, "line":total.count("\n")} + self.data.appendVar(var, val, **loginfo) + + def saveConfigurationVar(self, var, val, default_file, op): + + replaced = False + #do not save if nothing changed + if str(val) == self.data.getVar(var): + return + + conf_files = self.data.varhistory.get_variable_files(var) + + #format the value when it is a list + if isinstance(val, list): + listval = "" + for value in val: + listval += "%s " % value + val = listval + + topdir = self.data.getVar("TOPDIR") + + #comment or replace operations made on var + for conf_file in conf_files: + if topdir in conf_file: + with open(conf_file, 'r') as f: + contents = f.readlines() + + lines = self.data.varhistory.get_variable_lines(var, conf_file) + for line in lines: + total = "" + i = 0 + for c in contents: + total += c + i = i + 1 + if i==int(line): + end_index = len(total) + index = total.rfind(var, 0, end_index) + + begin_line = total.count("\n",0,index) + end_line = int(line) + + #check if the variable was saved before in the same way + #if true it replace the place where the variable was declared + #else it comments it + if contents[begin_line-1]== "#added by hob\n": + contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val) + replaced = True + else: + for ii in range(begin_line, end_line): + contents[ii] = "#" + contents[ii] + + with open(conf_file, 'w') as f: + f.writelines(contents) + + if replaced == False: + #remove var from history + self.data.varhistory.del_var_history(var) + + #add var to the end of default_file + default_file = bb.cookerdata.findConfigFile(default_file, self.data) + + #add the variable on a single line, to be easy to replace the second time + total = "\n#added by hob" + total += "\n%s %s \"%s\"\n" % (var, op, val) + + with open(default_file, 'a') as f: + f.write(total) + + #add to history + loginfo = {"op":set, "file":default_file, "line":total.count("\n")} + self.data.setVar(var, val, **loginfo) + + def removeConfigurationVar(self, var): + conf_files = self.data.varhistory.get_variable_files(var) + topdir = self.data.getVar("TOPDIR") + + for conf_file in conf_files: + if topdir in conf_file: + with open(conf_file, 'r') as f: + contents = f.readlines() + + lines = self.data.varhistory.get_variable_lines(var, conf_file) + for line in lines: + total = "" + i = 0 + for c in contents: + total += c + i = i + 1 + if i==int(line): + end_index = len(total) + index = total.rfind(var, 0, end_index) + + begin_line = total.count("\n",0,index) + + #check if the variable was saved before in the same way + if contents[begin_line-1]== "#added by hob\n": + contents[begin_line-1] = contents[begin_line] = "\n" + else: + contents[begin_line] = "\n" + #remove var from history + self.data.varhistory.del_var_history(var, conf_file, line) + #remove variable + self.data.delVar(var) + + with open(conf_file, 'w') as f: + f.writelines(contents) + + def createConfigFile(self, name): + path = os.getcwd() + confpath = os.path.join(path, "conf", name) + open(confpath, 'w').close() + + def parseConfiguration(self): + # Set log file verbosity + verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0")) + if verboselogs: + bb.msg.loggerVerboseLogs = True + + # Change nice level if we're asked to + nice = self.data.getVar("BB_NICE_LEVEL", True) + if nice: + curnice = os.nice(0) + nice = int(nice) - curnice + buildlog.verbose("Renice to %s " % os.nice(nice)) + + if self.recipecache: + del self.recipecache + self.recipecache = bb.cache.CacheData(self.caches_array) + + self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) ) + + def updateConfigOpts(self,options): + for o in options: + setattr(self.configuration, o, options[o]) + + def runCommands(self, server, data, abort): + """ + Run any queued asynchronous command + This is done by the idle handler so it runs in true context rather than + tied to any UI. + """ + + return self.command.runAsyncCommand() + + def showVersions(self): + + pkg_pn = self.recipecache.pkg_pn + (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn) + + logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") + logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") + + for p in sorted(pkg_pn): + pref = preferred_versions[p] + latest = latest_versions[p] + + prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2] + lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] + + if pref == latest: + prefstr = "" + + logger.plain("%-35s %25s %25s", p, lateststr, prefstr) + + def showEnvironment(self, buildfile = None, pkgs_to_build = []): + """ + Show the outer or per-recipe environment + """ + fn = None + envdata = None + + if buildfile: + # Parse the configuration here. We need to do it explicitly here since + # this showEnvironment() code path doesn't use the cache + self.parseConfiguration() + + fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile) + fn = self.matchFile(fn) + fn = bb.cache.Cache.realfn2virtual(fn, cls) + elif len(pkgs_to_build) == 1: + ignore = self.data.getVar("ASSUME_PROVIDED", True) or "" + if pkgs_to_build[0] in set(ignore.split()): + bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) + + taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort) + + targetid = taskdata.getbuild_id(pkgs_to_build[0]) + fnid = taskdata.build_targets[targetid][0] + fn = taskdata.fn_index[fnid] + else: + envdata = self.data + + if fn: + try: + envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data) + except Exception as e: + parselog.exception("Unable to read %s", fn) + raise + + # Display history + with closing(StringIO()) as env: + self.data.inchistory.emit(env) + logger.plain(env.getvalue()) + + # emit variables and shell functions + data.update_data(envdata) + with closing(StringIO()) as env: + data.emit_env(env, envdata, True) + logger.plain(env.getvalue()) + + # emit the metadata which isnt valid shell + data.expandKeys(envdata) + for e in envdata.keys(): + if data.getVarFlag( e, 'python', envdata ): + logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1)) + + + def buildTaskData(self, pkgs_to_build, task, abort): + """ + Prepare a runqueue and taskdata object for iteration over pkgs_to_build + """ + bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data) + + # A task of None means use the default task + if task is None: + task = self.configuration.cmd + + fulltargetlist = self.checkPackages(pkgs_to_build) + + localdata = data.createCopy(self.data) + bb.data.update_data(localdata) + bb.data.expandKeys(localdata) + taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist) + + current = 0 + runlist = [] + for k in fulltargetlist: + ktask = task + if ":do_" in k: + k2 = k.split(":do_") + k = k2[0] + ktask = k2[1] + taskdata.add_provider(localdata, self.recipecache, k) + current += 1 + runlist.append([k, "do_%s" % ktask]) + bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data) + taskdata.add_unresolved(localdata, self.recipecache) + bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) + return taskdata, runlist, fulltargetlist + + def prepareTreeData(self, pkgs_to_build, task): + """ + Prepare a runqueue and taskdata object for iteration over pkgs_to_build + """ + + # We set abort to False here to prevent unbuildable targets raising + # an exception when we're just generating data + taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False) + + return runlist, taskdata + + ######## WARNING : this function requires cache_extra to be enabled ######## + + def generateTaskDepTreeData(self, pkgs_to_build, task): + """ + Create a dependency graph of pkgs_to_build including reverse dependency + information. + """ + runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) + rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) + rq.rqdata.prepare() + return self.buildDependTree(rq, taskdata) + + + def buildDependTree(self, rq, taskdata): + seen_fnids = [] + depend_tree = {} + depend_tree["depends"] = {} + depend_tree["tdepends"] = {} + depend_tree["pn"] = {} + depend_tree["rdepends-pn"] = {} + depend_tree["packages"] = {} + depend_tree["rdepends-pkg"] = {} + depend_tree["rrecs-pkg"] = {} + depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities + + for task in xrange(len(rq.rqdata.runq_fnid)): + taskname = rq.rqdata.runq_task[task] + fnid = rq.rqdata.runq_fnid[task] + fn = taskdata.fn_index[fnid] + pn = self.recipecache.pkg_fn[fn] + version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn] + if pn not in depend_tree["pn"]: + depend_tree["pn"][pn] = {} + depend_tree["pn"][pn]["filename"] = fn + depend_tree["pn"][pn]["version"] = version + depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None) + + # if we have extra caches, list all attributes they bring in + extra_info = [] + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): + cachefields = getattr(cache_class, 'cachefields', []) + extra_info = extra_info + cachefields + + # for all attributes stored, add them to the dependency tree + for ei in extra_info: + depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn] + + + for dep in rq.rqdata.runq_depends[task]: + depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]] + deppn = self.recipecache.pkg_fn[depfn] + dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task]) + if not dotname in depend_tree["tdepends"]: + depend_tree["tdepends"][dotname] = [] + depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep])) + if fnid not in seen_fnids: + seen_fnids.append(fnid) + packages = [] + + depend_tree["depends"][pn] = [] + for dep in taskdata.depids[fnid]: + depend_tree["depends"][pn].append(taskdata.build_names_index[dep]) + + depend_tree["rdepends-pn"][pn] = [] + for rdep in taskdata.rdepids[fnid]: + depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep]) + + rdepends = self.recipecache.rundeps[fn] + for package in rdepends: + depend_tree["rdepends-pkg"][package] = [] + for rdepend in rdepends[package]: + depend_tree["rdepends-pkg"][package].append(rdepend) + packages.append(package) + + rrecs = self.recipecache.runrecs[fn] + for package in rrecs: + depend_tree["rrecs-pkg"][package] = [] + for rdepend in rrecs[package]: + depend_tree["rrecs-pkg"][package].append(rdepend) + if not package in packages: + packages.append(package) + + for package in packages: + if package not in depend_tree["packages"]: + depend_tree["packages"][package] = {} + depend_tree["packages"][package]["pn"] = pn + depend_tree["packages"][package]["filename"] = fn + depend_tree["packages"][package]["version"] = version + + return depend_tree + + ######## WARNING : this function requires cache_extra to be enabled ######## + def generatePkgDepTreeData(self, pkgs_to_build, task): + """ + Create a dependency tree of pkgs_to_build, returning the data. + """ + _, taskdata = self.prepareTreeData(pkgs_to_build, task) + tasks_fnid = [] + if len(taskdata.tasks_name) != 0: + for task in xrange(len(taskdata.tasks_name)): + tasks_fnid.append(taskdata.tasks_fnid[task]) + + seen_fnids = [] + depend_tree = {} + depend_tree["depends"] = {} + depend_tree["pn"] = {} + depend_tree["rdepends-pn"] = {} + depend_tree["rdepends-pkg"] = {} + depend_tree["rrecs-pkg"] = {} + + # if we have extra caches, list all attributes they bring in + extra_info = [] + for cache_class in self.caches_array: + if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): + cachefields = getattr(cache_class, 'cachefields', []) + extra_info = extra_info + cachefields + + for task in xrange(len(tasks_fnid)): + fnid = tasks_fnid[task] + fn = taskdata.fn_index[fnid] + pn = self.recipecache.pkg_fn[fn] + + if pn not in depend_tree["pn"]: + depend_tree["pn"][pn] = {} + depend_tree["pn"][pn]["filename"] = fn + version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn] + depend_tree["pn"][pn]["version"] = version + rdepends = self.recipecache.rundeps[fn] + rrecs = self.recipecache.runrecs[fn] + depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None) + + # for all extra attributes stored, add them to the dependency tree + for ei in extra_info: + depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn] + + if fnid not in seen_fnids: + seen_fnids.append(fnid) + + depend_tree["depends"][pn] = [] + for dep in taskdata.depids[fnid]: + item = taskdata.build_names_index[dep] + pn_provider = "" + targetid = taskdata.getbuild_id(item) + if targetid in taskdata.build_targets and taskdata.build_targets[targetid]: + id = taskdata.build_targets[targetid][0] + fn_provider = taskdata.fn_index[id] + pn_provider = self.recipecache.pkg_fn[fn_provider] + else: + pn_provider = item + depend_tree["depends"][pn].append(pn_provider) + + depend_tree["rdepends-pn"][pn] = [] + for rdep in taskdata.rdepids[fnid]: + item = taskdata.run_names_index[rdep] + pn_rprovider = "" + targetid = taskdata.getrun_id(item) + if targetid in taskdata.run_targets and taskdata.run_targets[targetid]: + id = taskdata.run_targets[targetid][0] + fn_rprovider = taskdata.fn_index[id] + pn_rprovider = self.recipecache.pkg_fn[fn_rprovider] + else: + pn_rprovider = item + depend_tree["rdepends-pn"][pn].append(pn_rprovider) + + depend_tree["rdepends-pkg"].update(rdepends) + depend_tree["rrecs-pkg"].update(rrecs) + + return depend_tree + + def generateDepTreeEvent(self, pkgs_to_build, task): + """ + Create a task dependency graph of pkgs_to_build. + Generate an event with the result + """ + depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) + bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data) + + def generateDotGraphFiles(self, pkgs_to_build, task): + """ + Create a task dependency graph of pkgs_to_build. + Save the result to a set of .dot files. + """ + + depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) + + # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn + depends_file = file('pn-depends.dot', 'w' ) + buildlist_file = file('pn-buildlist', 'w' ) + print("digraph depends {", file=depends_file) + for pn in depgraph["pn"]: + fn = depgraph["pn"][pn]["filename"] + version = depgraph["pn"][pn]["version"] + print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) + print("%s" % pn, file=buildlist_file) + buildlist_file.close() + logger.info("PN build list saved to 'pn-buildlist'") + for pn in depgraph["depends"]: + for depend in depgraph["depends"][pn]: + print('"%s" -> "%s"' % (pn, depend), file=depends_file) + for pn in depgraph["rdepends-pn"]: + for rdepend in depgraph["rdepends-pn"][pn]: + print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file) + print("}", file=depends_file) + logger.info("PN dependencies saved to 'pn-depends.dot'") + + depends_file = file('package-depends.dot', 'w' ) + print("digraph depends {", file=depends_file) + for package in depgraph["packages"]: + pn = depgraph["packages"][package]["pn"] + fn = depgraph["packages"][package]["filename"] + version = depgraph["packages"][package]["version"] + if package == pn: + print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) + else: + print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file) + for depend in depgraph["depends"][pn]: + print('"%s" -> "%s"' % (package, depend), file=depends_file) + for package in depgraph["rdepends-pkg"]: + for rdepend in depgraph["rdepends-pkg"][package]: + print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) + for package in depgraph["rrecs-pkg"]: + for rdepend in depgraph["rrecs-pkg"][package]: + print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) + print("}", file=depends_file) + logger.info("Package dependencies saved to 'package-depends.dot'") + + tdepends_file = file('task-depends.dot', 'w' ) + print("digraph depends {", file=tdepends_file) + for task in depgraph["tdepends"]: + (pn, taskname) = task.rsplit(".", 1) + fn = depgraph["pn"][pn]["filename"] + version = depgraph["pn"][pn]["version"] + print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file) + for dep in depgraph["tdepends"][task]: + print('"%s" -> "%s"' % (task, dep), file=tdepends_file) + print("}", file=tdepends_file) + logger.info("Task dependencies saved to 'task-depends.dot'") + + def show_appends_with_no_recipes( self ): + appends_without_recipes = [self.collection.appendlist[recipe] + for recipe in self.collection.appendlist + if recipe not in self.collection.appliedappendlist] + if appends_without_recipes: + appendlines = (' %s' % append + for appends in appends_without_recipes + for append in appends) + msg = 'No recipes available for:\n%s' % '\n'.join(appendlines) + warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \ + self.data, False) or "no" + if warn_only.lower() in ("1", "yes", "true"): + bb.warn(msg) + else: + bb.fatal(msg) + + def handlePrefProviders(self): + + localdata = data.createCopy(self.data) + bb.data.update_data(localdata) + bb.data.expandKeys(localdata) + + # Handle PREFERRED_PROVIDERS + for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split(): + try: + (providee, provider) = p.split(':') + except: + providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p) + continue + if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider: + providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee]) + self.recipecache.preferred[providee] = provider + + def findCoreBaseFiles(self, subdir, configfile): + corebase = self.data.getVar('COREBASE', True) or "" + paths = [] + for root, dirs, files in os.walk(corebase + '/' + subdir): + for d in dirs: + configfilepath = os.path.join(root, d, configfile) + if os.path.exists(configfilepath): + paths.append(os.path.join(root, d)) + + if paths: + bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data) + + def findConfigFilePath(self, configfile): + """ + Find the location on disk of configfile and if it exists and was parsed by BitBake + emit the ConfigFilePathFound event with the path to the file. + """ + path = bb.cookerdata.findConfigFile(configfile, self.data) + if not path: + return + + # Generate a list of parsed configuration files by searching the files + # listed in the __depends and __base_depends variables with a .conf suffix. + conffiles = [] + dep_files = self.data.getVar('__base_depends') or [] + dep_files = dep_files + (self.data.getVar('__depends') or []) + + for f in dep_files: + if f[0].endswith(".conf"): + conffiles.append(f[0]) + + _, conf, conffile = path.rpartition("conf/") + match = os.path.join(conf, conffile) + # Try and find matches for conf/conffilename.conf as we don't always + # have the full path to the file. + for cfg in conffiles: + if cfg.endswith(match): + bb.event.fire(bb.event.ConfigFilePathFound(path), + self.data) + break + + def findFilesMatchingInDir(self, filepattern, directory): + """ + Searches for files matching the regex 'pattern' which are children of + 'directory' in each BBPATH. i.e. to find all rootfs package classes available + to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes') + or to find all machine configuration files one could call: + findFilesMatchingInDir(self, 'conf/machines', 'conf') + """ + + matches = [] + p = re.compile(re.escape(filepattern)) + bbpaths = self.data.getVar('BBPATH', True).split(':') + for path in bbpaths: + dirpath = os.path.join(path, directory) + if os.path.exists(dirpath): + for root, dirs, files in os.walk(dirpath): + for f in files: + if p.search(f): + matches.append(f) + + if matches: + bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) + + def findConfigFiles(self, varname): + """ + Find config files which are appropriate values for varname. + i.e. MACHINE, DISTRO + """ + possible = [] + var = varname.lower() + + data = self.data + # iterate configs + bbpaths = data.getVar('BBPATH', True).split(':') + for path in bbpaths: + confpath = os.path.join(path, "conf", var) + if os.path.exists(confpath): + for root, dirs, files in os.walk(confpath): + # get all child files, these are appropriate values + for f in files: + val, sep, end = f.rpartition('.') + if end == 'conf': + possible.append(val) + + if possible: + bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data) + + def findInheritsClass(self, klass): + """ + Find all recipes which inherit the specified class + """ + pkg_list = [] + + for pfn in self.recipecache.pkg_fn: + inherits = self.recipecache.inherits.get(pfn, None) + if inherits and inherits.count(klass) > 0: + pkg_list.append(self.recipecache.pkg_fn[pfn]) + + return pkg_list + + def generateTargetsTree(self, klass=None, pkgs=[]): + """ + Generate a dependency tree of buildable targets + Generate an event with the result + """ + # if the caller hasn't specified a pkgs list default to universe + if not len(pkgs): + pkgs = ['universe'] + # if inherited_class passed ensure all recipes which inherit the + # specified class are included in pkgs + if klass: + extra_pkgs = self.findInheritsClass(klass) + pkgs = pkgs + extra_pkgs + + # generate a dependency tree for all our packages + tree = self.generatePkgDepTreeData(pkgs, 'build') + bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data) + + def buildWorldTargetList(self): + """ + Build package list for "bitbake world" + """ + parselog.debug(1, "collating packages for \"world\"") + for f in self.recipecache.possible_world: + terminal = True + pn = self.recipecache.pkg_fn[f] + + for p in self.recipecache.pn_provides[pn]: + if p.startswith('virtual/'): + parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p) + terminal = False + break + for pf in self.recipecache.providers[p]: + if self.recipecache.pkg_fn[pf] != pn: + parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p) + terminal = False + break + if terminal: + self.recipecache.world_target.add(pn) + + def interactiveMode( self ): + """Drop off into a shell""" + try: + from bb import shell + except ImportError: + parselog.exception("Interactive mode not available") + sys.exit(1) + else: + shell.start( self ) + + + def handleCollections( self, collections ): + """Handle collections""" + errors = False + self.recipecache.bbfile_config_priorities = [] + if collections: + collection_priorities = {} + collection_depends = {} + collection_list = collections.split() + min_prio = 0 + for c in collection_list: + # Get collection priority if defined explicitly + priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True) + if priority: + try: + prio = int(priority) + except ValueError: + parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority) + errors = True + if min_prio == 0 or prio < min_prio: + min_prio = prio + collection_priorities[c] = prio + else: + collection_priorities[c] = None + + # Check dependencies and store information for priority calculation + deps = self.data.getVar("LAYERDEPENDS_%s" % c, True) + if deps: + depnamelist = [] + deplist = deps.split() + for dep in deplist: + depsplit = dep.split(':') + if len(depsplit) > 1: + try: + depver = int(depsplit[1]) + except ValueError: + parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep) + errors = True + continue + else: + depver = None + dep = depsplit[0] + depnamelist.append(dep) + + if dep in collection_list: + if depver: + layerver = self.data.getVar("LAYERVERSION_%s" % dep, True) + if layerver: + try: + lver = int(layerver) + except ValueError: + parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver) + errors = True + continue + if lver != depver: + parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver) + errors = True + else: + parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep) + errors = True + else: + parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep) + errors = True + collection_depends[c] = depnamelist + else: + collection_depends[c] = [] + + # Recursively work out collection priorities based on dependencies + def calc_layer_priority(collection): + if not collection_priorities[collection]: + max_depprio = min_prio + for dep in collection_depends[collection]: + calc_layer_priority(dep) + depprio = collection_priorities[dep] + if depprio > max_depprio: + max_depprio = depprio + max_depprio += 1 + parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) + collection_priorities[collection] = max_depprio + + # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities + for c in collection_list: + calc_layer_priority(c) + regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True) + if regex == None: + parselog.error("BBFILE_PATTERN_%s not defined" % c) + errors = True + continue + try: + cre = re.compile(regex) + except re.error: + parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex) + errors = True + continue + self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c])) + if errors: + # We've already printed the actual error(s) + raise CollectionError("Errors during parsing layer configuration") + + def buildSetVars(self): + """ + Setup any variables needed before starting a build + """ + if not self.data.getVar("BUILDNAME"): + self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M')) + self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime())) + + def matchFiles(self, bf): + """ + Find the .bb files which match the expression in 'buildfile'. + """ + if bf.startswith("/") or bf.startswith("../"): + bf = os.path.abspath(bf) + + self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities) + filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data) + try: + os.stat(bf) + bf = os.path.abspath(bf) + return [bf] + except OSError: + regexp = re.compile(bf) + matches = [] + for f in filelist: + if regexp.search(f) and os.path.isfile(f): + matches.append(f) + return matches + + def matchFile(self, buildfile): + """ + Find the .bb file which matches the expression in 'buildfile'. + Raise an error if multiple files + """ + matches = self.matchFiles(buildfile) + if len(matches) != 1: + if matches: + msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches)) + if matches: + for f in matches: + msg += "\n %s" % f + parselog.error(msg) + else: + parselog.error("Unable to find any recipe file matching '%s'" % buildfile) + raise NoSpecificMatch + return matches[0] + + def buildFile(self, buildfile, task): + """ + Build the file matching regexp buildfile + """ + + # Too many people use -b because they think it's how you normally + # specify a target to be built, so show a warning + bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.") + + # Parse the configuration here. We need to do it explicitly here since + # buildFile() doesn't use the cache + self.parseConfiguration() + + # If we are told to do the None task then query the default task + if (task == None): + task = self.configuration.cmd + + fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile) + fn = self.matchFile(fn) + + self.buildSetVars() + + infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \ + self.data, + self.caches_array) + infos = dict(infos) + + fn = bb.cache.Cache.realfn2virtual(fn, cls) + try: + info_array = infos[fn] + except KeyError: + bb.fatal("%s does not exist" % fn) + + if info_array[0].skipped: + bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason)) + + self.recipecache.add_from_recipeinfo(fn, info_array) + + # Tweak some variables + item = info_array[0].pn + self.recipecache.ignored_dependencies = set() + self.recipecache.bbfile_priority[fn] = 1 + + # Remove external dependencies + self.recipecache.task_deps[fn]['depends'] = {} + self.recipecache.deps[fn] = [] + self.recipecache.rundeps[fn] = [] + self.recipecache.runrecs[fn] = [] + + # Invalidate task for target if force mode active + if self.configuration.force: + logger.verbose("Invalidate task %s, %s", task, fn) + bb.parse.siggen.invalidate_task('do_%s' % task, self.recipecache, fn) + + # Setup taskdata structure + taskdata = bb.taskdata.TaskData(self.configuration.abort) + taskdata.add_provider(self.data, self.recipecache, item) + + buildname = self.data.getVar("BUILDNAME") + bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data) + + # Execute the runqueue + runlist = [[item, "do_%s" % task]] + + rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) + + def buildFileIdle(server, rq, abort): + + msg = None + if abort or self.state == state.forceshutdown: + rq.finish_runqueue(True) + msg = "Forced shutdown" + elif self.state == state.shutdown: + rq.finish_runqueue(False) + msg = "Stopped build" + failures = 0 + try: + retval = rq.execute_runqueue() + except runqueue.TaskFailure as exc: + failures += len(exc.args) + retval = False + except SystemExit as exc: + self.command.finishAsyncCommand() + return False + + if not retval: + bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data) + self.command.finishAsyncCommand(msg) + return False + if retval is True: + return True + return retval + + self.configuration.server_register_idlecallback(buildFileIdle, rq) + + def buildTargets(self, targets, task): + """ + Attempt to build the targets specified + """ + + def buildTargetsIdle(server, rq, abort): + msg = None + if abort or self.state == state.forceshutdown: + rq.finish_runqueue(True) + msg = "Forced shutdown" + elif self.state == state.shutdown: + rq.finish_runqueue(False) + msg = "Stopped build" + failures = 0 + try: + retval = rq.execute_runqueue() + except runqueue.TaskFailure as exc: + failures += len(exc.args) + retval = False + except SystemExit as exc: + self.command.finishAsyncCommand() + return False + + if not retval: + bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data) + self.command.finishAsyncCommand(msg) + return False + if retval is True: + return True + return retval + + self.buildSetVars() + + taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort) + + buildname = self.data.getVar("BUILDNAME") + bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data) + + rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) + if 'universe' in targets: + rq.rqdata.warn_multi_bb = True + + self.configuration.server_register_idlecallback(buildTargetsIdle, rq) + + + def getAllKeysWithFlags(self, flaglist): + dump = {} + for k in self.data.keys(): + try: + v = self.data.getVar(k, True) + if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart): + dump[k] = { + 'v' : v , + 'history' : self.data.varhistory.variable(k), + } + for d in flaglist: + dump[k][d] = self.data.getVarFlag(k, d) + except Exception as e: + print(e) + return dump + + + def generateNewImage(self, image, base_image, package_queue, timestamp, description): + ''' + Create a new image with a "require"/"inherit" base_image statement + ''' + if timestamp: + image_name = os.path.splitext(image)[0] + timestr = time.strftime("-%Y%m%d-%H%M%S") + dest = image_name + str(timestr) + ".bb" + else: + if not image.endswith(".bb"): + dest = image + ".bb" + else: + dest = image + + basename = False + if base_image: + with open(base_image, 'r') as f: + require_line = f.readline() + p = re.compile("IMAGE_BASENAME *=") + for line in f: + if p.search(line): + basename = True + + with open(dest, "w") as imagefile: + if base_image is None: + imagefile.write("inherit core-image\n") + else: + topdir = self.data.getVar("TOPDIR") + if topdir in base_image: + base_image = require_line.split()[1] + imagefile.write("require " + base_image + "\n") + image_install = "IMAGE_INSTALL = \"" + for package in package_queue: + image_install += str(package) + " " + image_install += "\"\n" + imagefile.write(image_install) + + description_var = "DESCRIPTION = \"" + description + "\"\n" + imagefile.write(description_var) + + if basename: + # If this is overwritten in a inherited image, reset it to default + image_basename = "IMAGE_BASENAME = \"${PN}\"\n" + imagefile.write(image_basename) + + self.state = state.initial + if timestamp: + return timestr + + # This is called for all async commands when self.state != running + def updateCache(self): + if self.state == state.running: + return + + if self.state in (state.shutdown, state.forceshutdown, state.error): + if hasattr(self.parser, 'shutdown'): + self.parser.shutdown(clean=False, force = True) + raise bb.BBHandledException() + + if self.state != state.parsing: + + # reload files for which we got notifications + for p in self.inotify_modified_files: + bb.parse.update_cache(p) + self.inotify_modified_files = [] + + if not self.baseconfig_valid: + logger.debug(1, "Reloading base configuration data") + self.initConfigurationData() + self.baseconfig_valid = True + self.parsecache_valid = False + + if self.state != state.parsing and not self.parsecache_valid: + self.parseConfiguration () + if CookerFeatures.SEND_SANITYEVENTS in self.featureset: + bb.event.fire(bb.event.SanityCheck(False), self.data) + + ignore = self.data.getVar("ASSUME_PROVIDED", True) or "" + self.recipecache.ignored_dependencies = set(ignore.split()) + + for dep in self.configuration.extra_assume_provided: + self.recipecache.ignored_dependencies.add(dep) + + self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities) + (filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data) + + self.data.renameVar("__depends", "__base_depends") + self.add_filewatch(self.data.getVar("__base_depends"), self.configwatcher) + + self.parser = CookerParser(self, filelist, masked) + self.parsecache_valid = True + + self.state = state.parsing + + if not self.parser.parse_next(): + collectlog.debug(1, "parsing complete") + if self.parser.error: + raise bb.BBHandledException() + self.show_appends_with_no_recipes() + self.handlePrefProviders() + self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn) + self.state = state.running + return None + + return True + + def checkPackages(self, pkgs_to_build): + + # Return a copy, don't modify the original + pkgs_to_build = pkgs_to_build[:] + + if len(pkgs_to_build) == 0: + raise NothingToBuild + + ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split() + for pkg in pkgs_to_build: + if pkg in ignore: + parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg) + + if 'world' in pkgs_to_build: + self.buildWorldTargetList() + pkgs_to_build.remove('world') + for t in self.recipecache.world_target: + pkgs_to_build.append(t) + + if 'universe' in pkgs_to_build: + parselog.warn("The \"universe\" target is only intended for testing and may produce errors.") + parselog.debug(1, "collating packages for \"universe\"") + pkgs_to_build.remove('universe') + for t in self.recipecache.universe_target: + pkgs_to_build.append(t) + + return pkgs_to_build + + + + + def pre_serve(self): + # Empty the environment. The environment will be populated as + # necessary from the data store. + #bb.utils.empty_environment() + try: + self.prhost = prserv.serv.auto_start(self.data) + except prserv.serv.PRServiceConfigError: + bb.event.fire(CookerExit(), self.event_data) + self.state = state.error + return + + def post_serve(self): + prserv.serv.auto_shutdown(self.data) + bb.event.fire(CookerExit(), self.event_data) + + def shutdown(self, force = False): + if force: + self.state = state.forceshutdown + else: + self.state = state.shutdown + + def finishcommand(self): + self.state = state.initial + + def reset(self): + self.initConfigurationData() + +def server_main(cooker, func, *args): + cooker.pre_serve() + + if cooker.configuration.profile: + try: + import cProfile as profile + except: + import profile + prof = profile.Profile() + + ret = profile.Profile.runcall(prof, func, *args) + + prof.dump_stats("profile.log") + bb.utils.process_profilelog("profile.log") + print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed") + + else: + ret = func(*args) + + cooker.post_serve() + + return ret + +class CookerExit(bb.event.Event): + """ + Notify clients of the Cooker shutdown + """ + + def __init__(self): + bb.event.Event.__init__(self) + + +class CookerCollectFiles(object): + def __init__(self, priorities): + self.appendlist = {} + self.appliedappendlist = [] + self.bbfile_config_priorities = priorities + + def calc_bbfile_priority( self, filename, matched = None ): + for _, _, regex, pri in self.bbfile_config_priorities: + if regex.match(filename): + if matched != None: + if not regex in matched: + matched.add(regex) + return pri + return 0 + + def get_bbfiles(self): + """Get list of default .bb files by reading out the current directory""" + path = os.getcwd() + contents = os.listdir(path) + bbfiles = [] + for f in contents: + if f.endswith(".bb"): + bbfiles.append(os.path.abspath(os.path.join(path, f))) + return bbfiles + + def find_bbfiles(self, path): + """Find all the .bb and .bbappend files in a directory""" + found = [] + for dir, dirs, files in os.walk(path): + for ignored in ('SCCS', 'CVS', '.svn'): + if ignored in dirs: + dirs.remove(ignored) + found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))] + + return found + + def collect_bbfiles(self, config, eventdata): + """Collect all available .bb build files""" + masked = 0 + + collectlog.debug(1, "collecting .bb files") + + files = (config.getVar( "BBFILES", True) or "").split() + config.setVar("BBFILES", " ".join(files)) + + # Sort files by priority + files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) ) + + if not len(files): + files = self.get_bbfiles() + + if not len(files): + collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") + bb.event.fire(CookerExit(), eventdata) + + # Can't use set here as order is important + newfiles = [] + for f in files: + if os.path.isdir(f): + dirfiles = self.find_bbfiles(f) + for g in dirfiles: + if g not in newfiles: + newfiles.append(g) + else: + globbed = glob.glob(f) + if not globbed and os.path.exists(f): + globbed = [f] + for g in globbed: + if g not in newfiles: + newfiles.append(g) + + bbmask = config.getVar('BBMASK', True) + + if bbmask: + try: + bbmask_compiled = re.compile(bbmask) + except sre_constants.error: + collectlog.critical("BBMASK is not a valid regular expression, ignoring.") + return list(newfiles), 0 + + bbfiles = [] + bbappend = [] + for f in newfiles: + if bbmask and bbmask_compiled.search(f): + collectlog.debug(1, "skipping masked file %s", f) + masked += 1 + continue + if f.endswith('.bb'): + bbfiles.append(f) + elif f.endswith('.bbappend'): + bbappend.append(f) + else: + collectlog.debug(1, "skipping %s: unknown file extension", f) + + # Build a list of .bbappend files for each .bb file + for f in bbappend: + base = os.path.basename(f).replace('.bbappend', '.bb') + if not base in self.appendlist: + self.appendlist[base] = [] + if f not in self.appendlist[base]: + self.appendlist[base].append(f) + + # Find overlayed recipes + # bbfiles will be in priority order which makes this easy + bbfile_seen = dict() + self.overlayed = defaultdict(list) + for f in reversed(bbfiles): + base = os.path.basename(f) + if base not in bbfile_seen: + bbfile_seen[base] = f + else: + topfile = bbfile_seen[base] + self.overlayed[topfile].append(f) + + return (bbfiles, masked) + + def get_file_appends(self, fn): + """ + Returns a list of .bbappend files to apply to fn + """ + filelist = [] + f = os.path.basename(fn) + for bbappend in self.appendlist: + if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])): + self.appliedappendlist.append(bbappend) + for filename in self.appendlist[bbappend]: + filelist.append(filename) + return filelist + + def collection_priorities(self, pkgfns): + + priorities = {} + + # Calculate priorities for each file + matched = set() + for p in pkgfns: + realfn, cls = bb.cache.Cache.virtualfn2realfn(p) + priorities[p] = self.calc_bbfile_priority(realfn, matched) + + # Don't show the warning if the BBFILE_PATTERN did match .bbappend files + unmatched = set() + for _, _, regex, pri in self.bbfile_config_priorities: + if not regex in matched: + unmatched.add(regex) + + def findmatch(regex): + for bbfile in self.appendlist: + for append in self.appendlist[bbfile]: + if regex.match(append): + return True + return False + + for unmatch in unmatched.copy(): + if findmatch(unmatch): + unmatched.remove(unmatch) + + for collection, pattern, regex, _ in self.bbfile_config_priorities: + if regex in unmatched: + collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern)) + + return priorities + +class ParsingFailure(Exception): + def __init__(self, realexception, recipe): + self.realexception = realexception + self.recipe = recipe + Exception.__init__(self, realexception, recipe) + +class Feeder(multiprocessing.Process): + def __init__(self, jobs, to_parsers, quit): + self.quit = quit + self.jobs = jobs + self.to_parsers = to_parsers + multiprocessing.Process.__init__(self) + + def run(self): + while True: + try: + quit = self.quit.get_nowait() + except Queue.Empty: + pass + else: + if quit == 'cancel': + self.to_parsers.cancel_join_thread() + break + + try: + job = self.jobs.pop() + except IndexError: + break + + try: + self.to_parsers.put(job, timeout=0.5) + except Queue.Full: + self.jobs.insert(0, job) + continue + +class Parser(multiprocessing.Process): + def __init__(self, jobs, results, quit, init, profile): + self.jobs = jobs + self.results = results + self.quit = quit + self.init = init + multiprocessing.Process.__init__(self) + self.context = bb.utils.get_context().copy() + self.handlers = bb.event.get_class_handlers().copy() + self.profile = profile + + def run(self): + + if not self.profile: + self.realrun() + return + + try: + import cProfile as profile + except: + import profile + prof = profile.Profile() + try: + profile.Profile.runcall(prof, self.realrun) + finally: + logfile = "profile-parse-%s.log" % multiprocessing.current_process().name + prof.dump_stats(logfile) + bb.utils.process_profilelog(logfile) + print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile)) + + def realrun(self): + if self.init: + self.init() + + pending = [] + while True: + try: + self.quit.get_nowait() + except Queue.Empty: + pass + else: + self.results.cancel_join_thread() + break + + if pending: + result = pending.pop() + else: + try: + job = self.jobs.get(timeout=0.25) + except Queue.Empty: + continue + + if job is None: + break + result = self.parse(*job) + + try: + self.results.put(result, timeout=0.25) + except Queue.Full: + pending.append(result) + + def parse(self, filename, appends, caches_array): + try: + # Reset our environment and handlers to the original settings + bb.utils.set_context(self.context.copy()) + bb.event.set_class_handlers(self.handlers.copy()) + return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array) + except Exception as exc: + tb = sys.exc_info()[2] + exc.recipe = filename + exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) + return True, exc + # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown + # and for example a worker thread doesn't just exit on its own in response to + # a SystemExit event for example. + except BaseException as exc: + return True, ParsingFailure(exc, filename) + +class CookerParser(object): + def __init__(self, cooker, filelist, masked): + self.filelist = filelist + self.cooker = cooker + self.cfgdata = cooker.data + self.cfghash = cooker.data_hash + + # Accounting statistics + self.parsed = 0 + self.cached = 0 + self.error = 0 + self.masked = masked + + self.skipped = 0 + self.virtuals = 0 + self.total = len(filelist) + + self.current = 0 + self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or + multiprocessing.cpu_count()) + + self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array) + self.fromcache = [] + self.willparse = [] + for filename in self.filelist: + appends = self.cooker.collection.get_file_appends(filename) + if not self.bb_cache.cacheValid(filename, appends): + self.willparse.append((filename, appends, cooker.caches_array)) + else: + self.fromcache.append((filename, appends)) + self.toparse = self.total - len(self.fromcache) + self.progress_chunk = max(self.toparse / 100, 1) + + self.start() + self.haveshutdown = False + + def start(self): + self.results = self.load_cached() + self.processes = [] + if self.toparse: + bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) + def init(): + Parser.cfg = self.cfgdata + multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1) + multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1) + + self.feeder_quit = multiprocessing.Queue(maxsize=1) + self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes) + self.jobs = multiprocessing.Queue(maxsize=self.num_processes) + self.result_queue = multiprocessing.Queue() + self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit) + self.feeder.start() + for i in range(0, self.num_processes): + parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile) + parser.start() + self.processes.append(parser) + + self.results = itertools.chain(self.results, self.parse_generator()) + + def shutdown(self, clean=True, force=False): + if not self.toparse: + return + if self.haveshutdown: + return + self.haveshutdown = True + + if clean: + event = bb.event.ParseCompleted(self.cached, self.parsed, + self.skipped, self.masked, + self.virtuals, self.error, + self.total) + + bb.event.fire(event, self.cfgdata) + self.feeder_quit.put(None) + for process in self.processes: + self.jobs.put(None) + else: + self.feeder_quit.put('cancel') + + self.parser_quit.cancel_join_thread() + for process in self.processes: + self.parser_quit.put(None) + + self.jobs.cancel_join_thread() + + for process in self.processes: + if force: + process.join(.1) + process.terminate() + else: + process.join() + self.feeder.join() + + sync = threading.Thread(target=self.bb_cache.sync) + sync.start() + multiprocessing.util.Finalize(None, sync.join, exitpriority=-100) + bb.codeparser.parser_cache_savemerge(self.cooker.data) + bb.fetch.fetcher_parse_done(self.cooker.data) + + def load_cached(self): + for filename, appends in self.fromcache: + cached, infos = self.bb_cache.load(filename, appends, self.cfgdata) + yield not cached, infos + + def parse_generator(self): + while True: + if self.parsed >= self.toparse: + break + + try: + result = self.result_queue.get(timeout=0.25) + except Queue.Empty: + pass + else: + value = result[1] + if isinstance(value, BaseException): + raise value + else: + yield result + + def parse_next(self): + result = [] + parsed = None + try: + parsed, result = self.results.next() + except StopIteration: + self.shutdown() + return False + except bb.BBHandledException as exc: + self.error += 1 + logger.error('Failed to parse recipe: %s' % exc.recipe) + self.shutdown(clean=False) + return False + except ParsingFailure as exc: + self.error += 1 + logger.error('Unable to parse %s: %s' % + (exc.recipe, bb.exceptions.to_string(exc.realexception))) + self.shutdown(clean=False) + return False + except bb.parse.ParseError as exc: + self.error += 1 + logger.error(str(exc)) + self.shutdown(clean=False) + return False + except bb.data_smart.ExpansionError as exc: + self.error += 1 + _, value, _ = sys.exc_info() + logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc)) + self.shutdown(clean=False) + return False + except SyntaxError as exc: + self.error += 1 + logger.error('Unable to parse %s', exc.recipe) + self.shutdown(clean=False) + return False + except Exception as exc: + self.error += 1 + etype, value, tb = sys.exc_info() + if hasattr(value, "recipe"): + logger.error('Unable to parse %s', value.recipe, + exc_info=(etype, value, exc.traceback)) + else: + # Most likely, an exception occurred during raising an exception + import traceback + logger.error('Exception during parse: %s' % traceback.format_exc()) + self.shutdown(clean=False) + return False + + self.current += 1 + self.virtuals += len(result) + if parsed: + self.parsed += 1 + if self.parsed % self.progress_chunk == 0: + bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse), + self.cfgdata) + else: + self.cached += 1 + + for virtualfn, info_array in result: + if info_array[0].skipped: + self.skipped += 1 + self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) + self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache, + parsed=parsed, watcher = self.cooker.add_filewatch) + return True + + def reparse(self, filename): + infos = self.bb_cache.parse(filename, + self.cooker.collection.get_file_appends(filename), + self.cfgdata, self.cooker.caches_array) + for vfn, info_array in infos: + self.cooker.recipecache.add_from_recipeinfo(vfn, info_array) diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py new file mode 100644 index 0000000..2ceed2d --- /dev/null +++ b/bitbake/lib/bb/cookerdata.py @@ -0,0 +1,320 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2003, 2004 Phil Blundell +# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer +# Copyright (C) 2005 Holger Hans Peter Freyther +# Copyright (C) 2005 ROAD GmbH +# Copyright (C) 2006 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os, sys +from functools import wraps +import logging +import bb +from bb import data +import bb.parse + +logger = logging.getLogger("BitBake") +parselog = logging.getLogger("BitBake.Parsing") + +class ConfigParameters(object): + def __init__(self): + self.options, targets = self.parseCommandLine() + self.environment = self.parseEnvironment() + + self.options.pkgs_to_build = targets or [] + + self.options.tracking = False + if hasattr(self.options, "show_environment") and self.options.show_environment: + self.options.tracking = True + + for key, val in self.options.__dict__.items(): + setattr(self, key, val) + + def parseCommandLine(self): + raise Exception("Caller must implement commandline option parsing") + + def parseEnvironment(self): + return os.environ.copy() + + def updateFromServer(self, server): + if not self.options.cmd: + defaulttask, error = server.runCommand(["getVariable", "BB_DEFAULT_TASK"]) + if error: + raise Exception("Unable to get the value of BB_DEFAULT_TASK from the server: %s" % error) + self.options.cmd = defaulttask or "build" + _, error = server.runCommand(["setConfig", "cmd", self.options.cmd]) + if error: + raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error) + + if not self.options.pkgs_to_build: + bbpkgs, error = server.runCommand(["getVariable", "BBPKGS"]) + if error: + raise Exception("Unable to get the value of BBPKGS from the server: %s" % error) + if bbpkgs: + self.options.pkgs_to_build.extend(bbpkgs.split()) + + def updateToServer(self, server): + options = {} + for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp", + "verbose", "debug", "dry_run", "dump_signatures", + "debug_domains", "extra_assume_provided", "profile"]: + options[o] = getattr(self.options, o) + + ret, error = server.runCommand(["updateConfig", options]) + if error: + raise Exception("Unable to update the server configuration with local parameters: %s" % error) + + def parseActions(self): + # Parse any commandline into actions + action = {'action':None, 'msg':None} + if self.options.show_environment: + if 'world' in self.options.pkgs_to_build: + action['msg'] = "'world' is not a valid target for --environment." + elif 'universe' in self.options.pkgs_to_build: + action['msg'] = "'universe' is not a valid target for --environment." + elif len(self.options.pkgs_to_build) > 1: + action['msg'] = "Only one target can be used with the --environment option." + elif self.options.buildfile and len(self.options.pkgs_to_build) > 0: + action['msg'] = "No target should be used with the --environment and --buildfile options." + elif len(self.options.pkgs_to_build) > 0: + action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build] + else: + action['action'] = ["showEnvironment", self.options.buildfile] + elif self.options.buildfile is not None: + action['action'] = ["buildFile", self.options.buildfile, self.options.cmd] + elif self.options.revisions_changed: + action['action'] = ["compareRevisions"] + elif self.options.show_versions: + action['action'] = ["showVersions"] + elif self.options.parse_only: + action['action'] = ["parseFiles"] + elif self.options.dot_graph: + if self.options.pkgs_to_build: + action['action'] = ["generateDotGraph", self.options.pkgs_to_build, self.options.cmd] + else: + action['msg'] = "Please specify a package name for dependency graph generation." + else: + if self.options.pkgs_to_build: + action['action'] = ["buildTargets", self.options.pkgs_to_build, self.options.cmd] + else: + #action['msg'] = "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information." + action = None + self.options.initialaction = action + return action + +class CookerConfiguration(object): + """ + Manages build options and configurations for one run + """ + + def __init__(self): + self.debug_domains = [] + self.extra_assume_provided = [] + self.prefile = [] + self.postfile = [] + self.debug = 0 + self.cmd = None + self.abort = True + self.force = False + self.profile = False + self.nosetscene = False + self.invalidate_stamp = False + self.dump_signatures = [] + self.dry_run = False + self.tracking = False + self.interface = [] + self.writeeventlog = False + + self.env = {} + + def setConfigParameters(self, parameters): + for key in self.__dict__.keys(): + if key in parameters.options.__dict__: + setattr(self, key, parameters.options.__dict__[key]) + self.env = parameters.environment.copy() + self.tracking = parameters.tracking + + def setServerRegIdleCallback(self, srcb): + self.server_register_idlecallback = srcb + + def __getstate__(self): + state = {} + for key in self.__dict__.keys(): + if key == "server_register_idlecallback": + state[key] = None + else: + state[key] = getattr(self, key) + return state + + def __setstate__(self,state): + for k in state: + setattr(self, k, state[k]) + + +def catch_parse_error(func): + """Exception handling bits for our parsing""" + @wraps(func) + def wrapped(fn, *args): + try: + return func(fn, *args) + except (IOError, bb.parse.ParseError, bb.data_smart.ExpansionError) as exc: + import traceback + parselog.critical( traceback.format_exc()) + parselog.critical("Unable to parse %s: %s" % (fn, exc)) + sys.exit(1) + return wrapped + +@catch_parse_error +def parse_config_file(fn, data, include=True): + return bb.parse.handle(fn, data, include) + +@catch_parse_error +def _inherit(bbclass, data): + bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data) + return data + +def findConfigFile(configfile, data): + search = [] + bbpath = data.getVar("BBPATH", True) + if bbpath: + for i in bbpath.split(":"): + search.append(os.path.join(i, "conf", configfile)) + path = os.getcwd() + while path != "/": + search.append(os.path.join(path, "conf", configfile)) + path, _ = os.path.split(path) + + for i in search: + if os.path.exists(i): + return i + + return None + +class CookerDataBuilder(object): + + def __init__(self, cookercfg, worker = False): + + self.prefiles = cookercfg.prefile + self.postfiles = cookercfg.postfile + self.tracking = cookercfg.tracking + + bb.utils.set_context(bb.utils.clean_context()) + bb.event.set_class_handlers(bb.event.clean_class_handlers()) + self.data = bb.data.init() + if self.tracking: + self.data.enableTracking() + + # Keep a datastore of the initial environment variables and their + # values from when BitBake was launched to enable child processes + # to use environment variables which have been cleaned from the + # BitBake processes env + self.savedenv = bb.data.init() + for k in cookercfg.env: + self.savedenv.setVar(k, cookercfg.env[k]) + + filtered_keys = bb.utils.approved_variables() + bb.data.inheritFromOS(self.data, self.savedenv, filtered_keys) + self.data.setVar("BB_ORIGENV", self.savedenv) + + if worker: + self.data.setVar("BB_WORKERCONTEXT", "1") + + def parseBaseConfiguration(self): + try: + self.parseConfigurationFiles(self.prefiles, self.postfiles) + except SyntaxError: + raise bb.BBHandledException + except bb.data_smart.ExpansionError as e: + logger.error(str(e)) + raise bb.BBHandledException + except Exception: + logger.exception("Error parsing configuration files") + raise bb.BBHandledException + + def _findLayerConf(self, data): + return findConfigFile("bblayers.conf", data) + + def parseConfigurationFiles(self, prefiles, postfiles): + data = self.data + bb.parse.init_parser(data) + + # Parse files for loading *before* bitbake.conf and any includes + for f in prefiles: + data = parse_config_file(f, data) + + layerconf = self._findLayerConf(data) + if layerconf: + parselog.debug(2, "Found bblayers.conf (%s)", layerconf) + # By definition bblayers.conf is in conf/ of TOPDIR. + # We may have been called with cwd somewhere else so reset TOPDIR + data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) + data = parse_config_file(layerconf, data) + + layers = (data.getVar('BBLAYERS', True) or "").split() + + data = bb.data.createCopy(data) + for layer in layers: + parselog.debug(2, "Adding layer %s", layer) + data.setVar('LAYERDIR', layer) + data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data) + data.expandVarref('LAYERDIR') + + data.delVar('LAYERDIR') + + if not data.getVar("BBPATH", True): + msg = "The BBPATH variable is not set" + if not layerconf: + msg += (" and bitbake did not find a conf/bblayers.conf file in" + " the expected location.\nMaybe you accidentally" + " invoked bitbake from the wrong directory?") + raise SystemExit(msg) + + data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) + + # Parse files for loading *after* bitbake.conf and any includes + for p in postfiles: + data = parse_config_file(p, data) + + # Handle any INHERITs and inherit the base class + bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() + for bbclass in bbclasses: + data = _inherit(bbclass, data) + + # Nomally we only register event handlers at the end of parsing .bb files + # We register any handlers we've found so far here... + for var in data.getVar('__BBHANDLERS') or []: + bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split()) + + if data.getVar("BB_WORKERCONTEXT", False) is None: + bb.fetch.fetcher_init(data) + bb.codeparser.parser_cache_init(data) + bb.event.fire(bb.event.ConfigParsed(), data) + + if data.getVar("BB_INVALIDCONF") is True: + data.setVar("BB_INVALIDCONF", False) + self.parseConfigurationFiles(self.prefiles, self.postfiles) + return + + bb.parse.init_parser(data) + data.setVar('BBINCLUDED',bb.parse.get_file_depends(data)) + self.data = data + self.data_hash = data.get_hash() + + + diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py new file mode 100644 index 0000000..346a618 --- /dev/null +++ b/bitbake/lib/bb/daemonize.py @@ -0,0 +1,193 @@ +""" +Python Daemonizing helper + +Configurable daemon behaviors: + + 1.) The current working directory set to the "/" directory. + 2.) The current file creation mode mask set to 0. + 3.) Close all open files (1024). + 4.) Redirect standard I/O streams to "/dev/null". + +A failed call to fork() now raises an exception. + +References: + 1) Advanced Programming in the Unix Environment: W. Richard Stevens + http://www.apuebook.com/apue3e.html + 2) The Linux Programming Interface: Michael Kerrisk + http://man7.org/tlpi/index.html + 3) Unix Programming Frequently Asked Questions: + http://www.faqs.org/faqs/unix-faq/programmer/faq/ + +Modified to allow a function to be daemonized and return for +bitbake use by Richard Purdie +""" + +__author__ = "Chad J. Schroeder" +__copyright__ = "Copyright (C) 2005 Chad J. Schroeder" +__version__ = "0.2" + +# Standard Python modules. +import os # Miscellaneous OS interfaces. +import sys # System-specific parameters and functions. + +# Default daemon parameters. +# File mode creation mask of the daemon. +# For BitBake's children, we do want to inherit the parent umask. +UMASK = None + +# Default maximum for the number of available file descriptors. +MAXFD = 1024 + +# The standard I/O file descriptors are redirected to /dev/null by default. +if (hasattr(os, "devnull")): + REDIRECT_TO = os.devnull +else: + REDIRECT_TO = "/dev/null" + +def createDaemon(function, logfile): + """ + Detach a process from the controlling terminal and run it in the + background as a daemon, returning control to the caller. + """ + + try: + # Fork a child process so the parent can exit. This returns control to + # the command-line or shell. It also guarantees that the child will not + # be a process group leader, since the child receives a new process ID + # and inherits the parent's process group ID. This step is required + # to insure that the next call to os.setsid is successful. + pid = os.fork() + except OSError as e: + raise Exception("%s [%d]" % (e.strerror, e.errno)) + + if (pid == 0): # The first child. + # To become the session leader of this new session and the process group + # leader of the new process group, we call os.setsid(). The process is + # also guaranteed not to have a controlling terminal. + os.setsid() + + # Is ignoring SIGHUP necessary? + # + # It's often suggested that the SIGHUP signal should be ignored before + # the second fork to avoid premature termination of the process. The + # reason is that when the first child terminates, all processes, e.g. + # the second child, in the orphaned group will be sent a SIGHUP. + # + # "However, as part of the session management system, there are exactly + # two cases where SIGHUP is sent on the death of a process: + # + # 1) When the process that dies is the session leader of a session that + # is attached to a terminal device, SIGHUP is sent to all processes + # in the foreground process group of that terminal device. + # 2) When the death of a process causes a process group to become + # orphaned, and one or more processes in the orphaned group are + # stopped, then SIGHUP and SIGCONT are sent to all members of the + # orphaned group." [2] + # + # The first case can be ignored since the child is guaranteed not to have + # a controlling terminal. The second case isn't so easy to dismiss. + # The process group is orphaned when the first child terminates and + # POSIX.1 requires that every STOPPED process in an orphaned process + # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the + # second child is not STOPPED though, we can safely forego ignoring the + # SIGHUP signal. In any case, there are no ill-effects if it is ignored. + # + # import signal # Set handlers for asynchronous events. + # signal.signal(signal.SIGHUP, signal.SIG_IGN) + + try: + # Fork a second child and exit immediately to prevent zombies. This + # causes the second child process to be orphaned, making the init + # process responsible for its cleanup. And, since the first child is + # a session leader without a controlling terminal, it's possible for + # it to acquire one by opening a terminal in the future (System V- + # based systems). This second fork guarantees that the child is no + # longer a session leader, preventing the daemon from ever acquiring + # a controlling terminal. + pid = os.fork() # Fork a second child. + except OSError as e: + raise Exception("%s [%d]" % (e.strerror, e.errno)) + + if (pid == 0): # The second child. + # We probably don't want the file mode creation mask inherited from + # the parent, so we give the child complete control over permissions. + if UMASK is not None: + os.umask(UMASK) + else: + # Parent (the first child) of the second child. + os._exit(0) + else: + # exit() or _exit()? + # _exit is like exit(), but it doesn't call any functions registered + # with atexit (and on_exit) or any registered signal handlers. It also + # closes any open file descriptors. Using exit() may cause all stdio + # streams to be flushed twice and any temporary files may be unexpectedly + # removed. It's therefore recommended that child branches of a fork() + # and the parent branch(es) of a daemon use _exit(). + return + + # Close all open file descriptors. This prevents the child from keeping + # open any file descriptors inherited from the parent. There is a variety + # of methods to accomplish this task. Three are listed below. + # + # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum + # number of open file descriptors to close. If it doesn't exist, use + # the default value (configurable). + # + # try: + # maxfd = os.sysconf("SC_OPEN_MAX") + # except (AttributeError, ValueError): + # maxfd = MAXFD + # + # OR + # + # if (os.sysconf_names.has_key("SC_OPEN_MAX")): + # maxfd = os.sysconf("SC_OPEN_MAX") + # else: + # maxfd = MAXFD + # + # OR + # + # Use the getrlimit method to retrieve the maximum file descriptor number + # that can be opened by this process. If there is no limit on the + # resource, use the default value. + # + import resource # Resource usage information. + maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1] + if (maxfd == resource.RLIM_INFINITY): + maxfd = MAXFD + + # Iterate through and close all file descriptors. +# for fd in range(0, maxfd): +# try: +# os.close(fd) +# except OSError: # ERROR, fd wasn't open to begin with (ignored) +# pass + + # Redirect the standard I/O file descriptors to the specified file. Since + # the daemon has no controlling terminal, most daemons redirect stdin, + # stdout, and stderr to /dev/null. This is done to prevent side-effects + # from reads and writes to the standard I/O file descriptors. + + # This call to open is guaranteed to return the lowest file descriptor, + # which will be 0 (stdin), since it was closed above. +# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) + + # Duplicate standard input to standard output and standard error. +# os.dup2(0, 1) # standard output (1) +# os.dup2(0, 2) # standard error (2) + + + si = file('/dev/null', 'r') + so = file(logfile, 'w') + se = so + + + # Replace those fds with our own + os.dup2(si.fileno(), sys.stdin.fileno()) + os.dup2(so.fileno(), sys.stdout.fileno()) + os.dup2(se.fileno(), sys.stderr.fileno()) + + function() + + os._exit(0) diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py new file mode 100644 index 0000000..82eefef --- /dev/null +++ b/bitbake/lib/bb/data.py @@ -0,0 +1,446 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Data' implementations + +Functions for interacting with the data structure used by the +BitBake build tools. + +The expandKeys and update_data are the most expensive +operations. At night the cookie monster came by and +suggested 'give me cookies on setting the variables and +things will work out'. Taking this suggestion into account +applying the skills from the not yet passed 'Entwurf und +Analyse von Algorithmen' lecture and the cookie +monster seems to be right. We will track setVar more carefully +to have faster update_data and expandKeys operations. + +This is a trade-off between speed and memory again but +the speed is more critical here. +""" + +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2005 Holger Hans Peter Freyther +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import sys, os, re +if sys.argv[0][-5:] == "pydoc": + path = os.path.dirname(os.path.dirname(sys.argv[1])) +else: + path = os.path.dirname(os.path.dirname(sys.argv[0])) +sys.path.insert(0, path) +from itertools import groupby + +from bb import data_smart +from bb import codeparser +import bb + +logger = data_smart.logger +_dict_type = data_smart.DataSmart + +def init(): + """Return a new object representing the Bitbake data""" + return _dict_type() + +def init_db(parent = None): + """Return a new object representing the Bitbake data, + optionally based on an existing object""" + if parent is not None: + return parent.createCopy() + else: + return _dict_type() + +def createCopy(source): + """Link the source set to the destination + If one does not find the value in the destination set, + search will go on to the source set to get the value. + Value from source are copy-on-write. i.e. any try to + modify one of them will end up putting the modified value + in the destination set. + """ + return source.createCopy() + +def initVar(var, d): + """Non-destructive var init for data structure""" + d.initVar(var) + + +def setVar(var, value, d): + """Set a variable to a given value""" + d.setVar(var, value) + + +def getVar(var, d, exp = 0): + """Gets the value of a variable""" + return d.getVar(var, exp) + + +def renameVar(key, newkey, d): + """Renames a variable from key to newkey""" + d.renameVar(key, newkey) + +def delVar(var, d): + """Removes a variable from the data set""" + d.delVar(var) + +def appendVar(var, value, d): + """Append additional value to a variable""" + d.appendVar(var, value) + +def setVarFlag(var, flag, flagvalue, d): + """Set a flag for a given variable to a given value""" + d.setVarFlag(var, flag, flagvalue) + +def getVarFlag(var, flag, d): + """Gets given flag from given var""" + return d.getVarFlag(var, flag) + +def delVarFlag(var, flag, d): + """Removes a given flag from the variable's flags""" + d.delVarFlag(var, flag) + +def setVarFlags(var, flags, d): + """Set the flags for a given variable + + Note: + setVarFlags will not clear previous + flags. Think of this method as + addVarFlags + """ + d.setVarFlags(var, flags) + +def getVarFlags(var, d): + """Gets a variable's flags""" + return d.getVarFlags(var) + +def delVarFlags(var, d): + """Removes a variable's flags""" + d.delVarFlags(var) + +def keys(d): + """Return a list of keys in d""" + return d.keys() + + +__expand_var_regexp__ = re.compile(r"\${[^{}]+}") +__expand_python_regexp__ = re.compile(r"\${@.+?}") + +def expand(s, d, varname = None): + """Variable expansion using the data store""" + return d.expand(s, varname) + +def expandKeys(alterdata, readdata = None): + if readdata == None: + readdata = alterdata + + todolist = {} + for key in alterdata: + if not '${' in key: + continue + + ekey = expand(key, readdata) + if key == ekey: + continue + todolist[key] = ekey + + # These two for loops are split for performance to maximise the + # usefulness of the expand cache + + for key in todolist: + ekey = todolist[key] + newval = alterdata.getVar(ekey, 0) + if newval: + val = alterdata.getVar(key, 0) + if val is not None and newval is not None: + bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval)) + alterdata.renameVar(key, ekey) + +def inheritFromOS(d, savedenv, permitted): + """Inherit variables from the initial environment.""" + exportlist = bb.utils.preserved_envvars_exported() + for s in savedenv.keys(): + if s in permitted: + try: + d.setVar(s, getVar(s, savedenv, True), op = 'from env') + if s in exportlist: + d.setVarFlag(s, "export", True, op = 'auto env export') + except TypeError: + pass + +def emit_var(var, o=sys.__stdout__, d = init(), all=False): + """Emit a variable to be sourced by a shell.""" + if getVarFlag(var, "python", d): + return 0 + + export = getVarFlag(var, "export", d) + unexport = getVarFlag(var, "unexport", d) + func = getVarFlag(var, "func", d) + if not all and not export and not unexport and not func: + return 0 + + try: + if all: + oval = getVar(var, d, 0) + val = getVar(var, d, 1) + except (KeyboardInterrupt, bb.build.FuncFailed): + raise + except Exception as exc: + o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc))) + return 0 + + if all: + d.varhistory.emit(var, oval, val, o) + + if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: + return 0 + + varExpanded = expand(var, d) + + if unexport: + o.write('unset %s\n' % varExpanded) + return 0 + + if val is None: + return 0 + + val = str(val) + + if varExpanded.startswith("BASH_FUNC_"): + varExpanded = varExpanded[10:-2] + val = val[3:] # Strip off "() " + o.write("%s() %s\n" % (varExpanded, val)) + o.write("export -f %s\n" % (varExpanded)) + return 1 + + if func: + # NOTE: should probably check for unbalanced {} within the var + o.write("%s() {\n%s\n}\n" % (varExpanded, val)) + return 1 + + if export: + o.write('export ') + + # if we're going to output this within doublequotes, + # to a shell, we need to escape the quotes in the var + alter = re.sub('"', '\\"', val) + alter = re.sub('\n', ' \\\n', alter) + alter = re.sub('\\$', '\\\\$', alter) + o.write('%s="%s"\n' % (varExpanded, alter)) + return 0 + +def emit_env(o=sys.__stdout__, d = init(), all=False): + """Emits all items in the data store in a format such that it can be sourced by a shell.""" + + isfunc = lambda key: bool(d.getVarFlag(key, "func")) + keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc) + grouped = groupby(keys, isfunc) + for isfunc, keys in grouped: + for key in keys: + emit_var(key, o, d, all and not isfunc) and o.write('\n') + +def exported_keys(d): + return (key for key in d.keys() if not key.startswith('__') and + d.getVarFlag(key, 'export') and + not d.getVarFlag(key, 'unexport')) + +def exported_vars(d): + for key in exported_keys(d): + try: + value = d.getVar(key, True) + except Exception: + pass + + if value is not None: + yield key, str(value) + +def emit_func(func, o=sys.__stdout__, d = init()): + """Emits all items in the data store in a format such that it can be sourced by a shell.""" + + keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func")) + for key in keys: + emit_var(key, o, d, False) and o.write('\n') + + emit_var(func, o, d, False) and o.write('\n') + newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True)) + newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) + seen = set() + while newdeps: + deps = newdeps + seen |= deps + newdeps = set() + for dep in deps: + if d.getVarFlag(dep, "func") and not d.getVarFlag(dep, "python"): + emit_var(dep, o, d, False) and o.write('\n') + newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True)) + newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split()) + newdeps -= seen + +_functionfmt = """ +def {function}(d): +{body}""" + +def emit_func_python(func, o=sys.__stdout__, d = init()): + """Emits all items in the data store in a format such that it can be sourced by a shell.""" + + def write_func(func, o, call = False): + body = d.getVar(func, True) + if not body.startswith("def"): + body = _functionfmt.format(function=func, body=body) + + o.write(body.strip() + "\n\n") + if call: + o.write(func + "(d)" + "\n\n") + + write_func(func, o, True) + pp = bb.codeparser.PythonParser(func, logger) + pp.parse_python(d.getVar(func, True)) + newdeps = pp.execs + newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) + seen = set() + while newdeps: + deps = newdeps + seen |= deps + newdeps = set() + for dep in deps: + if d.getVarFlag(dep, "func") and d.getVarFlag(dep, "python"): + write_func(dep, o) + pp = bb.codeparser.PythonParser(dep, logger) + pp.parse_python(d.getVar(dep, True)) + newdeps |= pp.execs + newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split()) + newdeps -= seen + +def update_data(d): + """Performs final steps upon the datastore, including application of overrides""" + d.finalize(parent = True) + +def build_dependencies(key, keys, shelldeps, varflagsexcl, d): + deps = set() + try: + if key[-1] == ']': + vf = key[:-1].split('[') + value = d.getVarFlag(vf[0], vf[1], False) + parser = d.expandWithRefs(value, key) + deps |= parser.references + deps = deps | (keys & parser.execs) + return deps, value + varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs"]) or {} + vardeps = varflags.get("vardeps") + value = d.getVar(key, False) + + def handle_contains(value, contains, d): + newvalue = "" + for k in sorted(contains): + l = (d.getVar(k, True) or "").split() + for word in sorted(contains[k]): + if word in l: + newvalue += "\n%s{%s} = Set" % (k, word) + else: + newvalue += "\n%s{%s} = Unset" % (k, word) + if not newvalue: + return value + if not value: + return newvalue + return value + newvalue + + if "vardepvalue" in varflags: + value = varflags.get("vardepvalue") + elif varflags.get("func"): + if varflags.get("python"): + parsedvar = d.expandWithRefs(value, key) + parser = bb.codeparser.PythonParser(key, logger) + if parsedvar.value and "\t" in parsedvar.value: + logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True))) + parser.parse_python(parsedvar.value) + deps = deps | parser.references + value = handle_contains(value, parser.contains, d) + else: + parsedvar = d.expandWithRefs(value, key) + parser = bb.codeparser.ShellParser(key, logger) + parser.parse_shell(parsedvar.value) + deps = deps | shelldeps + if vardeps is None: + parser.log.flush() + if "prefuncs" in varflags: + deps = deps | set(varflags["prefuncs"].split()) + if "postfuncs" in varflags: + deps = deps | set(varflags["postfuncs"].split()) + deps = deps | parsedvar.references + deps = deps | (keys & parser.execs) | (keys & parsedvar.execs) + value = handle_contains(value, parsedvar.contains, d) + else: + parser = d.expandWithRefs(value, key) + deps |= parser.references + deps = deps | (keys & parser.execs) + value = handle_contains(value, parser.contains, d) + + if "vardepvalueexclude" in varflags: + exclude = varflags.get("vardepvalueexclude") + for excl in exclude.split('|'): + if excl: + value = value.replace(excl, '') + + # Add varflags, assuming an exclusion list is set + if varflagsexcl: + varfdeps = [] + for f in varflags: + if f not in varflagsexcl: + varfdeps.append('%s[%s]' % (key, f)) + if varfdeps: + deps |= set(varfdeps) + + deps |= set((vardeps or "").split()) + deps -= set(varflags.get("vardepsexclude", "").split()) + except Exception as e: + raise bb.data_smart.ExpansionError(key, None, e) + return deps, value + #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) + #d.setVarFlag(key, "vardeps", deps) + +def generate_dependencies(d): + + keys = set(key for key in d if not key.startswith("__")) + shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport")) + varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True) + + deps = {} + values = {} + + tasklist = d.getVar('__BBTASKS') or [] + for task in tasklist: + deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d) + newdeps = deps[task] + seen = set() + while newdeps: + nextdeps = newdeps + seen |= nextdeps + newdeps = set() + for dep in nextdeps: + if dep not in deps: + deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d) + newdeps |= deps[dep] + newdeps -= seen + #print "For %s: %s" % (task, str(deps[task])) + return tasklist, deps, values + +def inherits_class(klass, d): + val = getVar('__inherit_cache', d) or [] + needle = os.path.join('classes', '%s.bbclass' % klass) + for v in val: + if v.endswith(needle): + return True + return False diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py new file mode 100644 index 0000000..d4bb98d --- /dev/null +++ b/bitbake/lib/bb/data_smart.py @@ -0,0 +1,811 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake Smart Dictionary Implementation + +Functions for interacting with the data structure used by the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2004, 2005 Seb Frankengul +# Copyright (C) 2005, 2006 Holger Hans Peter Freyther +# Copyright (C) 2005 Uli Luckas +# Copyright (C) 2005 ROAD GmbH +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import copy, re, sys, traceback +from collections import MutableMapping +import logging +import hashlib +import bb, bb.codeparser +from bb import utils +from bb.COW import COWDictBase + +logger = logging.getLogger("BitBake.Data") + +__setvar_keyword__ = ["_append", "_prepend", "_remove"] +__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>.*))?$') +__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t ]+}") +__expand_python_regexp__ = re.compile(r"\${@.+?}") + +def infer_caller_details(loginfo, parent = False, varval = True): + """Save the caller the trouble of specifying everything.""" + # Save effort. + if 'ignore' in loginfo and loginfo['ignore']: + return + # If nothing was provided, mark this as possibly unneeded. + if not loginfo: + loginfo['ignore'] = True + return + # Infer caller's likely values for variable (var) and value (value), + # to reduce clutter in the rest of the code. + if varval and ('variable' not in loginfo or 'detail' not in loginfo): + try: + raise Exception + except Exception: + tb = sys.exc_info()[2] + if parent: + above = tb.tb_frame.f_back.f_back + else: + above = tb.tb_frame.f_back + lcls = above.f_locals.items() + for k, v in lcls: + if k == 'value' and 'detail' not in loginfo: + loginfo['detail'] = v + if k == 'var' and 'variable' not in loginfo: + loginfo['variable'] = v + # Infer file/line/function from traceback + if 'file' not in loginfo: + depth = 3 + if parent: + depth = 4 + file, line, func, text = traceback.extract_stack(limit = depth)[0] + loginfo['file'] = file + loginfo['line'] = line + if func not in loginfo: + loginfo['func'] = func + +class VariableParse: + def __init__(self, varname, d, val = None): + self.varname = varname + self.d = d + self.value = val + + self.references = set() + self.execs = set() + self.contains = {} + + def var_sub(self, match): + key = match.group()[2:-1] + if self.varname and key: + if self.varname == key: + raise Exception("variable %s references itself!" % self.varname) + if key in self.d.expand_cache: + varparse = self.d.expand_cache[key] + var = varparse.value + else: + var = self.d.getVarFlag(key, "_content", True) + self.references.add(key) + if var is not None: + return var + else: + return match.group() + + def python_sub(self, match): + code = match.group()[3:-1] + codeobj = compile(code.strip(), self.varname or "<expansion>", "eval") + + parser = bb.codeparser.PythonParser(self.varname, logger) + parser.parse_python(code) + if self.varname: + vardeps = self.d.getVarFlag(self.varname, "vardeps", True) + if vardeps is None: + parser.log.flush() + else: + parser.log.flush() + self.references |= parser.references + self.execs |= parser.execs + + for k in parser.contains: + if k not in self.contains: + self.contains[k] = parser.contains[k].copy() + else: + self.contains[k].update(parser.contains[k]) + value = utils.better_eval(codeobj, DataContext(self.d)) + return str(value) + + +class DataContext(dict): + def __init__(self, metadata, **kwargs): + self.metadata = metadata + dict.__init__(self, **kwargs) + self['d'] = metadata + + def __missing__(self, key): + value = self.metadata.getVar(key, True) + if value is None or self.metadata.getVarFlag(key, 'func'): + raise KeyError(key) + else: + return value + +class ExpansionError(Exception): + def __init__(self, varname, expression, exception): + self.expression = expression + self.variablename = varname + self.exception = exception + if varname: + if expression: + self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception) + else: + self.msg = "Failure expanding variable %s: %s: %s" % (varname, type(exception).__name__, exception) + else: + self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception) + Exception.__init__(self, self.msg) + self.args = (varname, expression, exception) + def __str__(self): + return self.msg + +class IncludeHistory(object): + def __init__(self, parent = None, filename = '[TOP LEVEL]'): + self.parent = parent + self.filename = filename + self.children = [] + self.current = self + + def copy(self): + new = IncludeHistory(self.parent, self.filename) + for c in self.children: + new.children.append(c) + return new + + def include(self, filename): + newfile = IncludeHistory(self.current, filename) + self.current.children.append(newfile) + self.current = newfile + return self + + def __enter__(self): + pass + + def __exit__(self, a, b, c): + if self.current.parent: + self.current = self.current.parent + else: + bb.warn("Include log: Tried to finish '%s' at top level." % filename) + return False + + def emit(self, o, level = 0): + """Emit an include history file, and its children.""" + if level: + spaces = " " * (level - 1) + o.write("# %s%s" % (spaces, self.filename)) + if len(self.children) > 0: + o.write(" includes:") + else: + o.write("#\n# INCLUDE HISTORY:\n#") + level = level + 1 + for child in self.children: + o.write("\n") + child.emit(o, level) + +class VariableHistory(object): + def __init__(self, dataroot): + self.dataroot = dataroot + self.variables = COWDictBase.copy() + + def copy(self): + new = VariableHistory(self.dataroot) + new.variables = self.variables.copy() + return new + + def record(self, *kwonly, **loginfo): + if not self.dataroot._tracking: + return + if len(kwonly) > 0: + raise TypeError + infer_caller_details(loginfo, parent = True) + if 'ignore' in loginfo and loginfo['ignore']: + return + if 'op' not in loginfo or not loginfo['op']: + loginfo['op'] = 'set' + if 'detail' in loginfo: + loginfo['detail'] = str(loginfo['detail']) + if 'variable' not in loginfo or 'file' not in loginfo: + raise ValueError("record() missing variable or file.") + var = loginfo['variable'] + + if var not in self.variables: + self.variables[var] = [] + self.variables[var].append(loginfo.copy()) + + def variable(self, var): + if var in self.variables: + return self.variables[var] + else: + return [] + + def emit(self, var, oval, val, o): + history = self.variable(var) + commentVal = re.sub('\n', '\n#', str(oval)) + if history: + if len(history) == 1: + o.write("#\n# $%s\n" % var) + else: + o.write("#\n# $%s [%d operations]\n" % (var, len(history))) + for event in history: + # o.write("# %s\n" % str(event)) + if 'func' in event: + # If we have a function listed, this is internal + # code, not an operation in a config file, and the + # full path is distracting. + event['file'] = re.sub('.*/', '', event['file']) + display_func = ' [%s]' % event['func'] + else: + display_func = '' + if 'flag' in event: + flag = '[%s] ' % (event['flag']) + else: + flag = '' + o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail']))) + if len(history) > 1: + o.write("# pre-expansion value:\n") + o.write('# "%s"\n' % (commentVal)) + else: + o.write("#\n# $%s\n# [no history recorded]\n#\n" % var) + o.write('# "%s"\n' % (commentVal)) + + def get_variable_files(self, var): + """Get the files where operations are made on a variable""" + var_history = self.variable(var) + files = [] + for event in var_history: + files.append(event['file']) + return files + + def get_variable_lines(self, var, f): + """Get the line where a operation is made on a variable in file f""" + var_history = self.variable(var) + lines = [] + for event in var_history: + if f== event['file']: + line = event['line'] + lines.append(line) + return lines + + def del_var_history(self, var, f=None, line=None): + """If file f and line are not given, the entire history of var is deleted""" + if var in self.variables: + if f and line: + self.variables[var] = [ x for x in self.variables[var] if x['file']!=f and x['line']!=line] + else: + self.variables[var] = [] + +class DataSmart(MutableMapping): + def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ): + self.dict = {} + + self.inchistory = IncludeHistory() + self.varhistory = VariableHistory(self) + self._tracking = False + + # cookie monster tribute + self._special_values = special + self._seen_overrides = seen + + self.expand_cache = {} + + def enableTracking(self): + self._tracking = True + + def disableTracking(self): + self._tracking = False + + def expandWithRefs(self, s, varname): + + if not isinstance(s, basestring): # sanity check + return VariableParse(varname, self, s) + + if varname and varname in self.expand_cache: + return self.expand_cache[varname] + + varparse = VariableParse(varname, self) + + while s.find('${') != -1: + olds = s + try: + s = __expand_var_regexp__.sub(varparse.var_sub, s) + s = __expand_python_regexp__.sub(varparse.python_sub, s) + if s == olds: + break + except ExpansionError: + raise + except bb.parse.SkipRecipe: + raise + except Exception as exc: + raise ExpansionError(varname, s, exc) + + varparse.value = s + + if varname: + self.expand_cache[varname] = varparse + + return varparse + + def expand(self, s, varname = None): + return self.expandWithRefs(s, varname).value + + + def finalize(self, parent = False): + """Performs final steps upon the datastore, including application of overrides""" + + overrides = (self.getVar("OVERRIDES", True) or "").split(":") or [] + finalize_caller = { + 'op': 'finalize', + } + infer_caller_details(finalize_caller, parent = parent, varval = False) + + # + # Well let us see what breaks here. We used to iterate + # over each variable and apply the override and then + # do the line expanding. + # If we have bad luck - which we will have - the keys + # where in some order that is so important for this + # method which we don't have anymore. + # Anyway we will fix that and write test cases this + # time. + + # + # First we apply all overrides + # Then we will handle _append and _prepend and store the _remove + # information for later. + # + + # We only want to report finalization once per variable overridden. + finalizes_reported = {} + + for o in overrides: + # calculate '_'+override + l = len(o) + 1 + + # see if one should even try + if o not in self._seen_overrides: + continue + + vars = self._seen_overrides[o].copy() + for var in vars: + name = var[:-l] + try: + # Report only once, even if multiple changes. + if name not in finalizes_reported: + finalizes_reported[name] = True + finalize_caller['variable'] = name + finalize_caller['detail'] = 'was: ' + str(self.getVar(name, False)) + self.varhistory.record(**finalize_caller) + # Copy history of the override over. + for event in self.varhistory.variable(var): + loginfo = event.copy() + loginfo['variable'] = name + loginfo['op'] = 'override[%s]:%s' % (o, loginfo['op']) + self.varhistory.record(**loginfo) + self.setVar(name, self.getVar(var, False), op = 'finalize', file = 'override[%s]' % o, line = '') + self.delVar(var) + except Exception: + logger.info("Untracked delVar") + + # now on to the appends and prepends, and stashing the removes + for op in __setvar_keyword__: + if op in self._special_values: + appends = self._special_values[op] or [] + for append in appends: + keep = [] + for (a, o) in self.getVarFlag(append, op) or []: + match = True + if o: + for o2 in o.split("_"): + if not o2 in overrides: + match = False + if not match: + keep.append((a ,o)) + continue + + if op == "_append": + sval = self.getVar(append, False) or "" + sval += a + self.setVar(append, sval) + elif op == "_prepend": + sval = a + (self.getVar(append, False) or "") + self.setVar(append, sval) + elif op == "_remove": + removes = self.getVarFlag(append, "_removeactive", False) or [] + removes.extend(a.split()) + self.setVarFlag(append, "_removeactive", removes, ignore=True) + + # We save overrides that may be applied at some later stage + if keep: + self.setVarFlag(append, op, keep, ignore=True) + else: + self.delVarFlag(append, op, ignore=True) + + def initVar(self, var): + self.expand_cache = {} + if not var in self.dict: + self.dict[var] = {} + + def _findVar(self, var): + dest = self.dict + while dest: + if var in dest: + return dest[var] + + if "_data" not in dest: + break + dest = dest["_data"] + + def _makeShadowCopy(self, var): + if var in self.dict: + return + + local_var = self._findVar(var) + + if local_var: + self.dict[var] = copy.copy(local_var) + else: + self.initVar(var) + + + def setVar(self, var, value, **loginfo): + #print("var=" + str(var) + " val=" + str(value)) + if 'op' not in loginfo: + loginfo['op'] = "set" + self.expand_cache = {} + match = __setvar_regexp__.match(var) + if match and match.group("keyword") in __setvar_keyword__: + base = match.group('base') + keyword = match.group("keyword") + override = match.group('add') + l = self.getVarFlag(base, keyword) or [] + l.append([value, override]) + self.setVarFlag(base, keyword, l, ignore=True) + # And cause that to be recorded: + loginfo['detail'] = value + loginfo['variable'] = base + if override: + loginfo['op'] = '%s[%s]' % (keyword, override) + else: + loginfo['op'] = keyword + self.varhistory.record(**loginfo) + # todo make sure keyword is not __doc__ or __module__ + # pay the cookie monster + try: + self._special_values[keyword].add(base) + except KeyError: + self._special_values[keyword] = set() + self._special_values[keyword].add(base) + + return + + if not var in self.dict: + self._makeShadowCopy(var) + + # more cookies for the cookie monster + if '_' in var: + self._setvar_update_overrides(var) + + # setting var + self.dict[var]["_content"] = value + self.varhistory.record(**loginfo) + + def _setvar_update_overrides(self, var): + # aka pay the cookie monster + override = var[var.rfind('_')+1:] + shortvar = var[:var.rfind('_')] + while override: + if override not in self._seen_overrides: + self._seen_overrides[override] = set() + self._seen_overrides[override].add( var ) + override = None + if "_" in shortvar: + override = var[shortvar.rfind('_')+1:] + shortvar = var[:shortvar.rfind('_')] + + def getVar(self, var, expand=False, noweakdefault=False): + return self.getVarFlag(var, "_content", expand, noweakdefault) + + def renameVar(self, key, newkey, **loginfo): + """ + Rename the variable key to newkey + """ + val = self.getVar(key, 0) + if val is not None: + loginfo['variable'] = newkey + loginfo['op'] = 'rename from %s' % key + loginfo['detail'] = val + self.varhistory.record(**loginfo) + self.setVar(newkey, val, ignore=True) + + for i in (__setvar_keyword__): + src = self.getVarFlag(key, i) + if src is None: + continue + + dest = self.getVarFlag(newkey, i) or [] + dest.extend(src) + self.setVarFlag(newkey, i, dest, ignore=True) + + if i in self._special_values and key in self._special_values[i]: + self._special_values[i].remove(key) + self._special_values[i].add(newkey) + + loginfo['variable'] = key + loginfo['op'] = 'rename (to)' + loginfo['detail'] = newkey + self.varhistory.record(**loginfo) + self.delVar(key, ignore=True) + + def appendVar(self, var, value, **loginfo): + loginfo['op'] = 'append' + self.varhistory.record(**loginfo) + newvalue = (self.getVar(var, False) or "") + value + self.setVar(var, newvalue, ignore=True) + + def prependVar(self, var, value, **loginfo): + loginfo['op'] = 'prepend' + self.varhistory.record(**loginfo) + newvalue = value + (self.getVar(var, False) or "") + self.setVar(var, newvalue, ignore=True) + + def delVar(self, var, **loginfo): + loginfo['detail'] = "" + loginfo['op'] = 'del' + self.varhistory.record(**loginfo) + self.expand_cache = {} + self.dict[var] = {} + if '_' in var: + override = var[var.rfind('_')+1:] + if override and override in self._seen_overrides and var in self._seen_overrides[override]: + self._seen_overrides[override].remove(var) + + def setVarFlag(self, var, flag, value, **loginfo): + if 'op' not in loginfo: + loginfo['op'] = "set" + loginfo['flag'] = flag + self.varhistory.record(**loginfo) + if not var in self.dict: + self._makeShadowCopy(var) + self.dict[var][flag] = value + + if flag == "defaultval" and '_' in var: + self._setvar_update_overrides(var) + + if flag == "unexport" or flag == "export": + if not "__exportlist" in self.dict: + self._makeShadowCopy("__exportlist") + if not "_content" in self.dict["__exportlist"]: + self.dict["__exportlist"]["_content"] = set() + self.dict["__exportlist"]["_content"].add(var) + + def getVarFlag(self, var, flag, expand=False, noweakdefault=False): + local_var = self._findVar(var) + value = None + if local_var is not None: + if flag in local_var: + value = copy.copy(local_var[flag]) + elif flag == "_content" and "defaultval" in local_var and not noweakdefault: + value = copy.copy(local_var["defaultval"]) + if expand and value: + # Only getvar (flag == _content) hits the expand cache + cachename = None + if flag == "_content": + cachename = var + else: + cachename = var + "[" + flag + "]" + value = self.expand(value, cachename) + if value and flag == "_content" and local_var is not None and "_removeactive" in local_var: + removes = [self.expand(r).split() for r in local_var["_removeactive"]] + removes = reduce(lambda a, b: a+b, removes, []) + filtered = filter(lambda v: v not in removes, + value.split()) + value = " ".join(filtered) + if expand: + # We need to ensure the expand cache has the correct value + # flag == "_content" here + self.expand_cache[var].value = value + return value + + def delVarFlag(self, var, flag, **loginfo): + local_var = self._findVar(var) + if not local_var: + return + if not var in self.dict: + self._makeShadowCopy(var) + + if var in self.dict and flag in self.dict[var]: + loginfo['detail'] = "" + loginfo['op'] = 'delFlag' + loginfo['flag'] = flag + self.varhistory.record(**loginfo) + + del self.dict[var][flag] + + def appendVarFlag(self, var, flag, value, **loginfo): + loginfo['op'] = 'append' + loginfo['flag'] = flag + self.varhistory.record(**loginfo) + newvalue = (self.getVarFlag(var, flag, False) or "") + value + self.setVarFlag(var, flag, newvalue, ignore=True) + + def prependVarFlag(self, var, flag, value, **loginfo): + loginfo['op'] = 'prepend' + loginfo['flag'] = flag + self.varhistory.record(**loginfo) + newvalue = value + (self.getVarFlag(var, flag, False) or "") + self.setVarFlag(var, flag, newvalue, ignore=True) + + def setVarFlags(self, var, flags, **loginfo): + infer_caller_details(loginfo) + if not var in self.dict: + self._makeShadowCopy(var) + + for i in flags: + if i == "_content": + continue + loginfo['flag'] = i + loginfo['detail'] = flags[i] + self.varhistory.record(**loginfo) + self.dict[var][i] = flags[i] + + def getVarFlags(self, var, expand = False, internalflags=False): + local_var = self._findVar(var) + flags = {} + + if local_var: + for i in local_var: + if i.startswith("_") and not internalflags: + continue + flags[i] = local_var[i] + if expand and i in expand: + flags[i] = self.expand(flags[i], var + "[" + i + "]") + if len(flags) == 0: + return None + return flags + + + def delVarFlags(self, var, **loginfo): + if not var in self.dict: + self._makeShadowCopy(var) + + if var in self.dict: + content = None + + loginfo['op'] = 'delete flags' + self.varhistory.record(**loginfo) + + # try to save the content + if "_content" in self.dict[var]: + content = self.dict[var]["_content"] + self.dict[var] = {} + self.dict[var]["_content"] = content + else: + del self.dict[var] + + + def createCopy(self): + """ + Create a copy of self by setting _data to self + """ + # we really want this to be a DataSmart... + data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy()) + data.dict["_data"] = self.dict + data.varhistory = self.varhistory.copy() + data.varhistory.datasmart = data + data.inchistory = self.inchistory.copy() + + data._tracking = self._tracking + + return data + + def expandVarref(self, variable, parents=False): + """Find all references to variable in the data and expand it + in place, optionally descending to parent datastores.""" + + if parents: + keys = iter(self) + else: + keys = self.localkeys() + + ref = '${%s}' % variable + value = self.getVar(variable, False) + for key in keys: + referrervalue = self.getVar(key, False) + if referrervalue and ref in referrervalue: + self.setVar(key, referrervalue.replace(ref, value)) + + def localkeys(self): + for key in self.dict: + if key != '_data': + yield key + + def __iter__(self): + def keylist(d): + klist = set() + for key in d: + if key == "_data": + continue + if not d[key]: + continue + klist.add(key) + + if "_data" in d: + klist |= keylist(d["_data"]) + + return klist + + for k in keylist(self.dict): + yield k + + def __len__(self): + return len(frozenset(self)) + + def __getitem__(self, item): + value = self.getVar(item, False) + if value is None: + raise KeyError(item) + else: + return value + + def __setitem__(self, var, value): + self.setVar(var, value) + + def __delitem__(self, var): + self.delVar(var) + + def get_hash(self): + data = {} + d = self.createCopy() + bb.data.expandKeys(d) + bb.data.update_data(d) + + config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split()) + keys = set(key for key in iter(d) if not key.startswith("__")) + for key in keys: + if key in config_whitelist: + continue + + value = d.getVar(key, False) or "" + data.update({key:value}) + + varflags = d.getVarFlags(key, internalflags = True) + if not varflags: + continue + for f in varflags: + if f == "_content": + continue + data.update({'%s[%s]' % (key, f):varflags[f]}) + + for key in ["__BBTASKS", "__BBANONFUNCS", "__BBHANDLERS"]: + bb_list = d.getVar(key, False) or [] + bb_list.sort() + data.update({key:str(bb_list)}) + + if key == "__BBANONFUNCS": + for i in bb_list: + value = d.getVar(i, True) or "" + data.update({i:value}) + + data_str = str([(k, data[k]) for k in sorted(data.keys())]) + return hashlib.md5(data_str).hexdigest() diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py new file mode 100644 index 0000000..6cbd0d5 --- /dev/null +++ b/bitbake/lib/bb/event.py @@ -0,0 +1,639 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Event' implementation + +Classes and functions for manipulating 'events' in the +BitBake build tools. +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os, sys +import warnings +try: + import cPickle as pickle +except ImportError: + import pickle +import logging +import atexit +import traceback +import bb.utils +import bb.compat +import bb.exceptions + +# This is the pid for which we should generate the event. This is set when +# the runqueue forks off. +worker_pid = 0 +worker_fire = None + +logger = logging.getLogger('BitBake.Event') + +class Event(object): + """Base class for events""" + + def __init__(self): + self.pid = worker_pid + +Registered = 10 +AlreadyRegistered = 14 + +def get_class_handlers(): + return _handlers + +def set_class_handlers(h): + global _handlers + _handlers = h + +def clean_class_handlers(): + return bb.compat.OrderedDict() + +# Internal +_handlers = clean_class_handlers() +_ui_handlers = {} +_ui_logfilters = {} +_ui_handler_seq = 0 +_event_handler_map = {} +_catchall_handlers = {} + +def execute_handler(name, handler, event, d): + event.data = d + try: + ret = handler(event) + except (bb.parse.SkipRecipe, bb.BBHandledException): + raise + except Exception: + etype, value, tb = sys.exc_info() + logger.error("Execution of event handler '%s' failed" % name, + exc_info=(etype, value, tb.tb_next)) + raise + except SystemExit as exc: + if exc.code != 0: + logger.error("Execution of event handler '%s' failed" % name) + raise + finally: + del event.data + +def fire_class_handlers(event, d): + if isinstance(event, logging.LogRecord): + return + + eid = str(event.__class__)[8:-2] + evt_hmap = _event_handler_map.get(eid, {}) + for name, handler in _handlers.iteritems(): + if name in _catchall_handlers or name in evt_hmap: + execute_handler(name, handler, event, d) + +ui_queue = [] +@atexit.register +def print_ui_queue(): + """If we're exiting before a UI has been spawned, display any queued + LogRecords to the console.""" + logger = logging.getLogger("BitBake") + if not _ui_handlers: + from bb.msg import BBLogFormatter + console = logging.StreamHandler(sys.stdout) + console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s")) + logger.handlers = [console] + + # First check to see if we have any proper messages + msgprint = False + for event in ui_queue: + if isinstance(event, logging.LogRecord): + if event.levelno > logging.DEBUG: + logger.handle(event) + msgprint = True + if msgprint: + return + + # Nope, so just print all of the messages we have (including debug messages) + for event in ui_queue: + if isinstance(event, logging.LogRecord): + logger.handle(event) + +def fire_ui_handlers(event, d): + if not _ui_handlers: + # No UI handlers registered yet, queue up the messages + ui_queue.append(event) + return + + errors = [] + for h in _ui_handlers: + #print "Sending event %s" % event + try: + if not _ui_logfilters[h].filter(event): + continue + # We use pickle here since it better handles object instances + # which xmlrpc's marshaller does not. Events *must* be serializable + # by pickle. + if hasattr(_ui_handlers[h].event, "sendpickle"): + _ui_handlers[h].event.sendpickle((pickle.dumps(event))) + else: + _ui_handlers[h].event.send(event) + except: + errors.append(h) + for h in errors: + del _ui_handlers[h] + +def fire(event, d): + """Fire off an Event""" + + # We can fire class handlers in the worker process context and this is + # desired so they get the task based datastore. + # UI handlers need to be fired in the server context so we defer this. They + # don't have a datastore so the datastore context isn't a problem. + + fire_class_handlers(event, d) + if worker_fire: + worker_fire(event, d) + else: + fire_ui_handlers(event, d) + +def fire_from_worker(event, d): + fire_ui_handlers(event, d) + +noop = lambda _: None +def register(name, handler, mask=[]): + """Register an Event handler""" + + # already registered + if name in _handlers: + return AlreadyRegistered + + if handler is not None: + # handle string containing python code + if isinstance(handler, basestring): + tmp = "def %s(e):\n%s" % (name, handler) + try: + code = compile(tmp, "%s(e)" % name, "exec") + except SyntaxError: + logger.error("Unable to register event handler '%s':\n%s", name, + ''.join(traceback.format_exc(limit=0))) + _handlers[name] = noop + return + env = {} + bb.utils.better_exec(code, env) + func = bb.utils.better_eval(name, env) + _handlers[name] = func + else: + _handlers[name] = handler + + if not mask or '*' in mask: + _catchall_handlers[name] = True + else: + for m in mask: + if _event_handler_map.get(m, None) is None: + _event_handler_map[m] = {} + _event_handler_map[m][name] = True + + return Registered + +def remove(name, handler): + """Remove an Event handler""" + _handlers.pop(name) + +def register_UIHhandler(handler): + bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1 + _ui_handlers[_ui_handler_seq] = handler + level, debug_domains = bb.msg.constructLogOptions() + _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains) + return _ui_handler_seq + +def unregister_UIHhandler(handlerNum): + if handlerNum in _ui_handlers: + del _ui_handlers[handlerNum] + return + +# Class to allow filtering of events and specific filtering of LogRecords *before* we put them over the IPC +class UIEventFilter(object): + def __init__(self, level, debug_domains): + self.update(None, level, debug_domains) + + def update(self, eventmask, level, debug_domains): + self.eventmask = eventmask + self.stdlevel = level + self.debug_domains = debug_domains + + def filter(self, event): + if isinstance(event, logging.LogRecord): + if event.levelno >= self.stdlevel: + return True + if event.name in self.debug_domains and event.levelno >= self.debug_domains[event.name]: + return True + return False + eid = str(event.__class__)[8:-2] + if self.eventmask and eid not in self.eventmask: + return False + return True + +def set_UIHmask(handlerNum, level, debug_domains, mask): + if not handlerNum in _ui_handlers: + return False + if '*' in mask: + _ui_logfilters[handlerNum].update(None, level, debug_domains) + else: + _ui_logfilters[handlerNum].update(mask, level, debug_domains) + return True + +def getName(e): + """Returns the name of a class or class instance""" + if getattr(e, "__name__", None) == None: + return e.__class__.__name__ + else: + return e.__name__ + +class OperationStarted(Event): + """An operation has begun""" + def __init__(self, msg = "Operation Started"): + Event.__init__(self) + self.msg = msg + +class OperationCompleted(Event): + """An operation has completed""" + def __init__(self, total, msg = "Operation Completed"): + Event.__init__(self) + self.total = total + self.msg = msg + +class OperationProgress(Event): + """An operation is in progress""" + def __init__(self, current, total, msg = "Operation in Progress"): + Event.__init__(self) + self.current = current + self.total = total + self.msg = msg + ": %s/%s" % (current, total); + +class ConfigParsed(Event): + """Configuration Parsing Complete""" + +class RecipeEvent(Event): + def __init__(self, fn): + self.fn = fn + Event.__init__(self) + +class RecipePreFinalise(RecipeEvent): + """ Recipe Parsing Complete but not yet finialised""" + +class RecipeParsed(RecipeEvent): + """ Recipe Parsing Complete """ + +class StampUpdate(Event): + """Trigger for any adjustment of the stamp files to happen""" + + def __init__(self, targets, stampfns): + self._targets = targets + self._stampfns = stampfns + Event.__init__(self) + + def getStampPrefix(self): + return self._stampfns + + def getTargets(self): + return self._targets + + stampPrefix = property(getStampPrefix) + targets = property(getTargets) + +class BuildBase(Event): + """Base class for bbmake run events""" + + def __init__(self, n, p, failures = 0): + self._name = n + self._pkgs = p + Event.__init__(self) + self._failures = failures + + def getPkgs(self): + return self._pkgs + + def setPkgs(self, pkgs): + self._pkgs = pkgs + + def getName(self): + return self._name + + def setName(self, name): + self._name = name + + def getCfg(self): + return self.data + + def setCfg(self, cfg): + self.data = cfg + + def getFailures(self): + """ + Return the number of failed packages + """ + return self._failures + + pkgs = property(getPkgs, setPkgs, None, "pkgs property") + name = property(getName, setName, None, "name property") + cfg = property(getCfg, setCfg, None, "cfg property") + + + + + +class BuildStarted(BuildBase, OperationStarted): + """bbmake build run started""" + def __init__(self, n, p, failures = 0): + OperationStarted.__init__(self, "Building Started") + BuildBase.__init__(self, n, p, failures) + +class BuildCompleted(BuildBase, OperationCompleted): + """bbmake build run completed""" + def __init__(self, total, n, p, failures = 0): + if not failures: + OperationCompleted.__init__(self, total, "Building Succeeded") + else: + OperationCompleted.__init__(self, total, "Building Failed") + BuildBase.__init__(self, n, p, failures) + +class DiskFull(Event): + """Disk full case build aborted""" + def __init__(self, dev, type, freespace, mountpoint): + Event.__init__(self) + self._dev = dev + self._type = type + self._free = freespace + self._mountpoint = mountpoint + +class NoProvider(Event): + """No Provider for an Event""" + + def __init__(self, item, runtime=False, dependees=None, reasons=[], close_matches=[]): + Event.__init__(self) + self._item = item + self._runtime = runtime + self._dependees = dependees + self._reasons = reasons + self._close_matches = close_matches + + def getItem(self): + return self._item + + def isRuntime(self): + return self._runtime + +class MultipleProviders(Event): + """Multiple Providers""" + + def __init__(self, item, candidates, runtime = False): + Event.__init__(self) + self._item = item + self._candidates = candidates + self._is_runtime = runtime + + def isRuntime(self): + """ + Is this a runtime issue? + """ + return self._is_runtime + + def getItem(self): + """ + The name for the to be build item + """ + return self._item + + def getCandidates(self): + """ + Get the possible Candidates for a PROVIDER. + """ + return self._candidates + +class ParseStarted(OperationStarted): + """Recipe parsing for the runqueue has begun""" + def __init__(self, total): + OperationStarted.__init__(self, "Recipe parsing Started") + self.total = total + +class ParseCompleted(OperationCompleted): + """Recipe parsing for the runqueue has completed""" + def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total): + OperationCompleted.__init__(self, total, "Recipe parsing Completed") + self.cached = cached + self.parsed = parsed + self.skipped = skipped + self.virtuals = virtuals + self.masked = masked + self.errors = errors + self.sofar = cached + parsed + +class ParseProgress(OperationProgress): + """Recipe parsing progress""" + def __init__(self, current, total): + OperationProgress.__init__(self, current, total, "Recipe parsing") + + +class CacheLoadStarted(OperationStarted): + """Loading of the dependency cache has begun""" + def __init__(self, total): + OperationStarted.__init__(self, "Loading cache Started") + self.total = total + +class CacheLoadProgress(OperationProgress): + """Cache loading progress""" + def __init__(self, current, total): + OperationProgress.__init__(self, current, total, "Loading cache") + +class CacheLoadCompleted(OperationCompleted): + """Cache loading is complete""" + def __init__(self, total, num_entries): + OperationCompleted.__init__(self, total, "Loading cache Completed") + self.num_entries = num_entries + +class TreeDataPreparationStarted(OperationStarted): + """Tree data preparation started""" + def __init__(self): + OperationStarted.__init__(self, "Preparing tree data Started") + +class TreeDataPreparationProgress(OperationProgress): + """Tree data preparation is in progress""" + def __init__(self, current, total): + OperationProgress.__init__(self, current, total, "Preparing tree data") + +class TreeDataPreparationCompleted(OperationCompleted): + """Tree data preparation completed""" + def __init__(self, total): + OperationCompleted.__init__(self, total, "Preparing tree data Completed") + +class DepTreeGenerated(Event): + """ + Event when a dependency tree has been generated + """ + + def __init__(self, depgraph): + Event.__init__(self) + self._depgraph = depgraph + +class TargetsTreeGenerated(Event): + """ + Event when a set of buildable targets has been generated + """ + def __init__(self, model): + Event.__init__(self) + self._model = model + +class FilesMatchingFound(Event): + """ + Event when a list of files matching the supplied pattern has + been generated + """ + def __init__(self, pattern, matches): + Event.__init__(self) + self._pattern = pattern + self._matches = matches + +class CoreBaseFilesFound(Event): + """ + Event when a list of appropriate config files has been generated + """ + def __init__(self, paths): + Event.__init__(self) + self._paths = paths + +class ConfigFilesFound(Event): + """ + Event when a list of appropriate config files has been generated + """ + def __init__(self, variable, values): + Event.__init__(self) + self._variable = variable + self._values = values + +class ConfigFilePathFound(Event): + """ + Event when a path for a config file has been found + """ + def __init__(self, path): + Event.__init__(self) + self._path = path + +class MsgBase(Event): + """Base class for messages""" + + def __init__(self, msg): + self._message = msg + Event.__init__(self) + +class MsgDebug(MsgBase): + """Debug Message""" + +class MsgNote(MsgBase): + """Note Message""" + +class MsgWarn(MsgBase): + """Warning Message""" + +class MsgError(MsgBase): + """Error Message""" + +class MsgFatal(MsgBase): + """Fatal Message""" + +class MsgPlain(MsgBase): + """General output""" + +class LogExecTTY(Event): + """Send event containing program to spawn on tty of the logger""" + def __init__(self, msg, prog, sleep_delay, retries): + Event.__init__(self) + self.msg = msg + self.prog = prog + self.sleep_delay = sleep_delay + self.retries = retries + +class LogHandler(logging.Handler): + """Dispatch logging messages as bitbake events""" + + def emit(self, record): + if record.exc_info: + etype, value, tb = record.exc_info + if hasattr(tb, 'tb_next'): + tb = list(bb.exceptions.extract_traceback(tb, context=3)) + record.bb_exc_info = (etype, value, tb) + record.exc_info = None + fire(record, None) + + def filter(self, record): + record.taskpid = worker_pid + return True + +class RequestPackageInfo(Event): + """ + Event to request package information + """ + +class PackageInfo(Event): + """ + Package information for GUI + """ + def __init__(self, pkginfolist): + Event.__init__(self) + self._pkginfolist = pkginfolist + +class MetadataEvent(Event): + """ + Generic event that target for OE-Core classes + to report information during asynchrous execution + """ + def __init__(self, eventtype, eventdata): + Event.__init__(self) + self.type = eventtype + self._localdata = eventdata + +class SanityCheck(Event): + """ + Event to run sanity checks, either raise errors or generate events as return status. + """ + def __init__(self, generateevents = True): + Event.__init__(self) + self.generateevents = generateevents + +class SanityCheckPassed(Event): + """ + Event to indicate sanity check has passed + """ + +class SanityCheckFailed(Event): + """ + Event to indicate sanity check has failed + """ + def __init__(self, msg, network_error=False): + Event.__init__(self) + self._msg = msg + self._network_error = network_error + +class NetworkTest(Event): + """ + Event to run network connectivity tests, either raise errors or generate events as return status. + """ + def __init__(self, generateevents = True): + Event.__init__(self) + self.generateevents = generateevents + +class NetworkTestPassed(Event): + """ + Event to indicate network test has passed + """ + +class NetworkTestFailed(Event): + """ + Event to indicate network test has failed + """ + diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py new file mode 100644 index 0000000..f182c8f --- /dev/null +++ b/bitbake/lib/bb/exceptions.py @@ -0,0 +1,91 @@ +from __future__ import absolute_import +import inspect +import traceback +import bb.namedtuple_with_abc +from collections import namedtuple + + +class TracebackEntry(namedtuple.abc): + """Pickleable representation of a traceback entry""" + _fields = 'filename lineno function args code_context index' + _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}' + + def format(self, formatter=None): + if not self.code_context: + return self._header.format(self) + '\n' + + formatted = [self._header.format(self) + ':\n'] + + for lineindex, line in enumerate(self.code_context): + if formatter: + line = formatter(line) + + if lineindex == self.index: + formatted.append(' >%s' % line) + else: + formatted.append(' %s' % line) + return formatted + + def __str__(self): + return ''.join(self.format()) + +def _get_frame_args(frame): + """Get the formatted arguments and class (if available) for a frame""" + arginfo = inspect.getargvalues(frame) + + try: + if not arginfo.args: + return '', None + # There have been reports from the field of python 2.6 which doesn't + # return a namedtuple here but simply a tuple so fallback gracefully if + # args isn't present. + except AttributeError: + return '', None + + firstarg = arginfo.args[0] + if firstarg == 'self': + self = arginfo.locals['self'] + cls = self.__class__.__name__ + + arginfo.args.pop(0) + del arginfo.locals['self'] + else: + cls = None + + formatted = inspect.formatargvalues(*arginfo) + return formatted, cls + +def extract_traceback(tb, context=1): + frames = inspect.getinnerframes(tb, context) + for frame, filename, lineno, function, code_context, index in frames: + formatted_args, cls = _get_frame_args(frame) + if cls: + function = '%s.%s' % (cls, function) + yield TracebackEntry(filename, lineno, function, formatted_args, + code_context, index) + +def format_extracted(extracted, formatter=None, limit=None): + if limit: + extracted = extracted[-limit:] + + formatted = [] + for tracebackinfo in extracted: + formatted.extend(tracebackinfo.format(formatter)) + return formatted + + +def format_exception(etype, value, tb, context=1, limit=None, formatter=None): + formatted = ['Traceback (most recent call last):\n'] + + if hasattr(tb, 'tb_next'): + tb = extract_traceback(tb, context) + + formatted.extend(format_extracted(tb, formatter, limit)) + formatted.extend(traceback.format_exception_only(etype, value)) + return formatted + +def to_string(exc): + if isinstance(exc, SystemExit): + if not isinstance(exc.code, basestring): + return 'Exited with "%d"' % exc.code + return str(exc) diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py new file mode 100644 index 0000000..378d41e --- /dev/null +++ b/bitbake/lib/bb/fetch2/__init__.py @@ -0,0 +1,1585 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. +""" + +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2012 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +from __future__ import absolute_import +from __future__ import print_function +import os, re +import signal +import glob +import logging +import urllib +import urlparse +import operator +import bb.persist_data, bb.utils +import bb.checksum +from bb import data +import bb.process +import subprocess + +__version__ = "2" +_checksum_cache = bb.checksum.FileChecksumCache() + +logger = logging.getLogger("BitBake.Fetcher") + +class BBFetchException(Exception): + """Class all fetch exceptions inherit from""" + def __init__(self, message): + self.msg = message + Exception.__init__(self, message) + + def __str__(self): + return self.msg + +class MalformedUrl(BBFetchException): + """Exception raised when encountering an invalid url""" + def __init__(self, url, message=''): + if message: + msg = message + else: + msg = "The URL: '%s' is invalid and cannot be interpreted" % url + self.url = url + BBFetchException.__init__(self, msg) + self.args = (url,) + +class FetchError(BBFetchException): + """General fetcher exception when something happens incorrectly""" + def __init__(self, message, url = None): + if url: + msg = "Fetcher failure for URL: '%s'. %s" % (url, message) + else: + msg = "Fetcher failure: %s" % message + self.url = url + BBFetchException.__init__(self, msg) + self.args = (message, url) + +class ChecksumError(FetchError): + """Exception when mismatched checksum encountered""" + def __init__(self, message, url = None, checksum = None): + self.checksum = checksum + FetchError.__init__(self, message, url) + +class NoChecksumError(FetchError): + """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set""" + +class UnpackError(BBFetchException): + """General fetcher exception when something happens incorrectly when unpacking""" + def __init__(self, message, url): + msg = "Unpack failure for URL: '%s'. %s" % (url, message) + self.url = url + BBFetchException.__init__(self, msg) + self.args = (message, url) + +class NoMethodError(BBFetchException): + """Exception raised when there is no method to obtain a supplied url or set of urls""" + def __init__(self, url): + msg = "Could not find a fetcher which supports the URL: '%s'" % url + self.url = url + BBFetchException.__init__(self, msg) + self.args = (url,) + +class MissingParameterError(BBFetchException): + """Exception raised when a fetch method is missing a critical parameter in the url""" + def __init__(self, missing, url): + msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing) + self.url = url + self.missing = missing + BBFetchException.__init__(self, msg) + self.args = (missing, url) + +class ParameterError(BBFetchException): + """Exception raised when a url cannot be proccessed due to invalid parameters.""" + def __init__(self, message, url): + msg = "URL: '%s' has invalid parameters. %s" % (url, message) + self.url = url + BBFetchException.__init__(self, msg) + self.args = (message, url) + +class NetworkAccess(BBFetchException): + """Exception raised when network access is disabled but it is required.""" + def __init__(self, url, cmd): + msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url) + self.url = url + self.cmd = cmd + BBFetchException.__init__(self, msg) + self.args = (url, cmd) + +class NonLocalMethod(Exception): + def __init__(self): + Exception.__init__(self) + + +class URI(object): + """ + A class representing a generic URI, with methods for + accessing the URI components, and stringifies to the + URI. + + It is constructed by calling it with a URI, or setting + the attributes manually: + + uri = URI("http://example.com/") + + uri = URI() + uri.scheme = 'http' + uri.hostname = 'example.com' + uri.path = '/' + + It has the following attributes: + + * scheme (read/write) + * userinfo (authentication information) (read/write) + * username (read/write) + * password (read/write) + + Note, password is deprecated as of RFC 3986. + + * hostname (read/write) + * port (read/write) + * hostport (read only) + "hostname:port", if both are set, otherwise just "hostname" + * path (read/write) + * path_quoted (read/write) + A URI quoted version of path + * params (dict) (read/write) + * query (dict) (read/write) + * relative (bool) (read only) + True if this is a "relative URI", (e.g. file:foo.diff) + + It stringifies to the URI itself. + + Some notes about relative URIs: while it's specified that + a URI beginning with <scheme>:// should either be directly + followed by a hostname or a /, the old URI handling of the + fetch2 library did not comform to this. Therefore, this URI + class has some kludges to make sure that URIs are parsed in + a way comforming to bitbake's current usage. This URI class + supports the following: + + file:relative/path.diff (IETF compliant) + git:relative/path.git (IETF compliant) + git:///absolute/path.git (IETF compliant) + file:///absolute/path.diff (IETF compliant) + + file://relative/path.diff (not IETF compliant) + + But it does not support the following: + + file://hostname/absolute/path.diff (would be IETF compliant) + + Note that the last case only applies to a list of + "whitelisted" schemes (currently only file://), that requires + its URIs to not have a network location. + """ + + _relative_schemes = ['file', 'git'] + _netloc_forbidden = ['file'] + + def __init__(self, uri=None): + self.scheme = '' + self.userinfo = '' + self.hostname = '' + self.port = None + self._path = '' + self.params = {} + self.query = {} + self.relative = False + + if not uri: + return + + # We hijack the URL parameters, since the way bitbake uses + # them are not quite RFC compliant. + uri, param_str = (uri.split(";", 1) + [None])[:2] + + urlp = urlparse.urlparse(uri) + self.scheme = urlp.scheme + + reparse = 0 + + # Coerce urlparse to make URI scheme use netloc + if not self.scheme in urlparse.uses_netloc: + urlparse.uses_params.append(self.scheme) + reparse = 1 + + # Make urlparse happy(/ier) by converting local resources + # to RFC compliant URL format. E.g.: + # file://foo.diff -> file:foo.diff + if urlp.scheme in self._netloc_forbidden: + uri = re.sub("(?<=:)//(?!/)", "", uri, 1) + reparse = 1 + + if reparse: + urlp = urlparse.urlparse(uri) + + # Identify if the URI is relative or not + if urlp.scheme in self._relative_schemes and \ + re.compile("^\w+:(?!//)").match(uri): + self.relative = True + + if not self.relative: + self.hostname = urlp.hostname or '' + self.port = urlp.port + + self.userinfo += urlp.username or '' + + if urlp.password: + self.userinfo += ':%s' % urlp.password + + self.path = urllib.unquote(urlp.path) + + if param_str: + self.params = self._param_str_split(param_str, ";") + if urlp.query: + self.query = self._param_str_split(urlp.query, "&") + + def __str__(self): + userinfo = self.userinfo + if userinfo: + userinfo += '@' + + return "%s:%s%s%s%s%s%s" % ( + self.scheme, + '' if self.relative else '//', + userinfo, + self.hostport, + self.path_quoted, + self._query_str(), + self._param_str()) + + def _param_str(self): + return ( + ''.join([';', self._param_str_join(self.params, ";")]) + if self.params else '') + + def _query_str(self): + return ( + ''.join(['?', self._param_str_join(self.query, "&")]) + if self.query else '') + + def _param_str_split(self, string, elmdelim, kvdelim="="): + ret = {} + for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]: + ret[k] = v + return ret + + def _param_str_join(self, dict_, elmdelim, kvdelim="="): + return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) + + @property + def hostport(self): + if not self.port: + return self.hostname + return "%s:%d" % (self.hostname, self.port) + + @property + def path_quoted(self): + return urllib.quote(self.path) + + @path_quoted.setter + def path_quoted(self, path): + self.path = urllib.unquote(path) + + @property + def path(self): + return self._path + + @path.setter + def path(self, path): + self._path = path + + if re.compile("^/").match(path): + self.relative = False + else: + self.relative = True + + @property + def username(self): + if self.userinfo: + return (self.userinfo.split(":", 1))[0] + return '' + + @username.setter + def username(self, username): + password = self.password + self.userinfo = username + if password: + self.userinfo += ":%s" % password + + @property + def password(self): + if self.userinfo and ":" in self.userinfo: + return (self.userinfo.split(":", 1))[1] + return '' + + @password.setter + def password(self, password): + self.userinfo = "%s:%s" % (self.username, password) + +def decodeurl(url): + """Decodes an URL into the tokens (scheme, network location, path, + user, password, parameters). + """ + + m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) + if not m: + raise MalformedUrl(url) + + type = m.group('type') + location = m.group('location') + if not location: + raise MalformedUrl(url) + user = m.group('user') + parm = m.group('parm') + + locidx = location.find('/') + if locidx != -1 and type.lower() != 'file': + host = location[:locidx] + path = location[locidx:] + else: + host = "" + path = location + if user: + m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user) + if m: + user = m.group('user') + pswd = m.group('pswd') + else: + user = '' + pswd = '' + + p = {} + if parm: + for s in parm.split(';'): + if s: + if not '=' in s: + raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) + s1, s2 = s.split('=') + p[s1] = s2 + + return type, host, urllib.unquote(path), user, pswd, p + +def encodeurl(decoded): + """Encodes a URL from tokens (scheme, network location, path, + user, password, parameters). + """ + + type, host, path, user, pswd, p = decoded + + if not path: + raise MissingParameterError('path', "encoded from the data %s" % str(decoded)) + if not type: + raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) + url = '%s://' % type + if user and type != "file": + url += "%s" % user + if pswd: + url += ":%s" % pswd + url += "@" + if host and type != "file": + url += "%s" % host + # Standardise path to ensure comparisons work + while '//' in path: + path = path.replace("//", "/") + url += "%s" % urllib.quote(path) + if p: + for parm in p: + url += ";%s=%s" % (parm, p[parm]) + + return url + +def uri_replace(ud, uri_find, uri_replace, replacements, d): + if not ud.url or not uri_find or not uri_replace: + logger.error("uri_replace: passed an undefined value, not replacing") + return None + uri_decoded = list(decodeurl(ud.url)) + uri_find_decoded = list(decodeurl(uri_find)) + uri_replace_decoded = list(decodeurl(uri_replace)) + logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) + result_decoded = ['', '', '', '', '', {}] + for loc, i in enumerate(uri_find_decoded): + result_decoded[loc] = uri_decoded[loc] + regexp = i + if loc == 0 and regexp and not regexp.endswith("$"): + # Leaving the type unanchored can mean "https" matching "file" can become "files" + # which is clearly undesirable. + regexp += "$" + if loc == 5: + # Handle URL parameters + if i: + # Any specified URL parameters must match + for k in uri_replace_decoded[loc]: + if uri_decoded[loc][k] != uri_replace_decoded[loc][k]: + return None + # Overwrite any specified replacement parameters + for k in uri_replace_decoded[loc]: + for l in replacements: + uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) + result_decoded[loc][k] = uri_replace_decoded[loc][k] + elif (re.match(regexp, uri_decoded[loc])): + if not uri_replace_decoded[loc]: + result_decoded[loc] = "" + else: + for k in replacements: + uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) + #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) + result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc]) + if loc == 2: + # Handle path manipulations + basename = None + if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball: + # If the source and destination url types differ, must be a mirrortarball mapping + basename = os.path.basename(ud.mirrortarball) + # Kill parameters, they make no sense for mirror tarballs + uri_decoded[5] = {} + elif ud.localpath and ud.method.supports_checksum(ud): + basename = os.path.basename(ud.localpath) + if basename and not result_decoded[loc].endswith(basename): + result_decoded[loc] = os.path.join(result_decoded[loc], basename) + else: + return None + result = encodeurl(result_decoded) + if result == ud.url: + return None + logger.debug(2, "For url %s returning %s" % (ud.url, result)) + return result + +methods = [] +urldata_cache = {} +saved_headrevs = {} + +def fetcher_init(d): + """ + Called to initialize the fetchers once the configuration data is known. + Calls before this must not hit the cache. + """ + # When to drop SCM head revisions controlled by user policy + srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" + if srcrev_policy == "cache": + logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) + elif srcrev_policy == "clear": + logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) + revs = bb.persist_data.persist('BB_URI_HEADREVS', d) + try: + bb.fetch2.saved_headrevs = revs.items() + except: + pass + revs.clear() + else: + raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) + + _checksum_cache.init_cache(d) + + for m in methods: + if hasattr(m, "init"): + m.init(d) + +def fetcher_parse_save(d): + _checksum_cache.save_extras(d) + +def fetcher_parse_done(d): + _checksum_cache.save_merge(d) + +def fetcher_compare_revisions(d): + """ + Compare the revisions in the persistant cache with current values and + return true/false on whether they've changed. + """ + + data = bb.persist_data.persist('BB_URI_HEADREVS', d).items() + data2 = bb.fetch2.saved_headrevs + + changed = False + for key in data: + if key not in data2 or data2[key] != data[key]: + logger.debug(1, "%s changed", key) + changed = True + return True + else: + logger.debug(2, "%s did not change", key) + return False + +def mirror_from_string(data): + return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] + +def verify_checksum(ud, d): + """ + verify the MD5 and SHA256 checksum for downloaded src + + Raises a FetchError if one or both of the SRC_URI checksums do not match + the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no + checksums specified. + + """ + + if not ud.method.supports_checksum(ud): + return + + md5data = bb.utils.md5_file(ud.localpath) + sha256data = bb.utils.sha256_file(ud.localpath) + + if ud.method.recommends_checksum(ud): + # If strict checking enabled and neither sum defined, raise error + strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0" + if (strict == "1") and not (ud.md5_expected or ud.sha256_expected): + logger.error('No checksum specified for %s, please add at least one to the recipe:\n' + 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % + (ud.localpath, ud.md5_name, md5data, + ud.sha256_name, sha256data)) + raise NoChecksumError('Missing SRC_URI checksum', ud.url) + + # Log missing sums so user can more easily add them + if not ud.md5_expected: + logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n' + 'SRC_URI[%s] = "%s"', + ud.localpath, ud.md5_name, md5data) + + if not ud.sha256_expected: + logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n' + 'SRC_URI[%s] = "%s"', + ud.localpath, ud.sha256_name, sha256data) + + md5mismatch = False + sha256mismatch = False + + if ud.md5_expected != md5data: + md5mismatch = True + + if ud.sha256_expected != sha256data: + sha256mismatch = True + + # We want to alert the user if a checksum is defined in the recipe but + # it does not match. + msg = "" + mismatch = False + if md5mismatch and ud.md5_expected: + msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected) + mismatch = True; + + if sha256mismatch and ud.sha256_expected: + msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected) + mismatch = True; + + if mismatch: + msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data) + + if len(msg): + raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data) + + +def update_stamp(ud, d): + """ + donestamp is file stamp indicating the whole fetching is done + this function update the stamp after verifying the checksum + """ + if os.path.exists(ud.donestamp): + # Touch the done stamp file to show active use of the download + try: + os.utime(ud.donestamp, None) + except: + # Errors aren't fatal here + pass + else: + verify_checksum(ud, d) + open(ud.donestamp, 'w').close() + +def subprocess_setup(): + # Python installs a SIGPIPE handler by default. This is usually not what + # non-Python subprocesses expect. + # SIGPIPE errors are known issues with gzip/bash + signal.signal(signal.SIGPIPE, signal.SIG_DFL) + +def get_autorev(d): + # only not cache src rev in autorev case + if d.getVar('BB_SRCREV_POLICY', True) != "cache": + d.setVar('__BB_DONT_CACHE', '1') + return "AUTOINC" + +def get_srcrev(d): + """ + Return the version string for the current package + (usually to be used as PV) + Most packages usually only have one SCM so we just pass on the call. + In the multi SCM case, we build a value based on SRCREV_FORMAT which must + have been set. + """ + + scms = [] + fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) + urldata = fetcher.ud + for u in urldata: + if urldata[u].method.supports_srcrev(): + scms.append(u) + + if len(scms) == 0: + raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") + + if len(scms) == 1 and len(urldata[scms[0]].names) == 1: + autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0]) + if len(rev) > 10: + rev = rev[:10] + if autoinc: + return "AUTOINC+" + rev + return rev + + # + # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT + # + format = d.getVar('SRCREV_FORMAT', True) + if not format: + raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") + + seenautoinc = False + for scm in scms: + ud = urldata[scm] + for name in ud.names: + autoinc, rev = ud.method.sortable_revision(ud, d, name) + seenautoinc = seenautoinc or autoinc + if len(rev) > 10: + rev = rev[:10] + format = format.replace(name, rev) + if seenautoinc: + format = "AUTOINC+" + format + + return format + +def localpath(url, d): + fetcher = bb.fetch2.Fetch([url], d) + return fetcher.localpath(url) + +def runfetchcmd(cmd, d, quiet = False, cleanup = []): + """ + Run cmd returning the command output + Raise an error if interrupted or cmd fails + Optionally echo command output to stdout + Optionally remove the files/directories listed in cleanup upon failure + """ + + # Need to export PATH as binary could be in metadata paths + # rather than host provided + # Also include some other variables. + # FIXME: Should really include all export varaiables? + exportvars = ['HOME', 'PATH', + 'HTTP_PROXY', 'http_proxy', + 'HTTPS_PROXY', 'https_proxy', + 'FTP_PROXY', 'ftp_proxy', + 'FTPS_PROXY', 'ftps_proxy', + 'NO_PROXY', 'no_proxy', + 'ALL_PROXY', 'all_proxy', + 'GIT_PROXY_COMMAND', + 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', + 'SOCKS5_USER', 'SOCKS5_PASSWD'] + + for var in exportvars: + val = d.getVar(var, True) + if val: + cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) + + logger.debug(1, "Running %s", cmd) + + success = False + error_message = "" + + try: + (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE) + success = True + except bb.process.NotFoundError as e: + error_message = "Fetch command %s" % (e.command) + except bb.process.ExecutionError as e: + if e.stdout: + output = "output:\n%s\n%s" % (e.stdout, e.stderr) + elif e.stderr: + output = "output:\n%s" % e.stderr + else: + output = "no output" + error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output) + except bb.process.CmdError as e: + error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) + if not success: + for f in cleanup: + try: + bb.utils.remove(f, True) + except OSError: + pass + + raise FetchError(error_message) + + return output + +def check_network_access(d, info = "", url = None): + """ + log remote network access, and error if BB_NO_NETWORK is set + """ + if d.getVar("BB_NO_NETWORK", True) == "1": + raise NetworkAccess(url, info) + else: + logger.debug(1, "Fetcher accessed the network with the command %s" % info) + +def build_mirroruris(origud, mirrors, ld): + uris = [] + uds = [] + + replacements = {} + replacements["TYPE"] = origud.type + replacements["HOST"] = origud.host + replacements["PATH"] = origud.path + replacements["BASENAME"] = origud.path.split("/")[-1] + replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.') + + def adduri(ud, uris, uds): + for line in mirrors: + try: + (find, replace) = line + except ValueError: + continue + newuri = uri_replace(ud, find, replace, replacements, ld) + if not newuri or newuri in uris or newuri == origud.url: + continue + try: + newud = FetchData(newuri, ld) + newud.setup_localpath(ld) + except bb.fetch2.BBFetchException as e: + logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) + logger.debug(1, str(e)) + try: + ud.method.clean(ud, ld) + except UnboundLocalError: + pass + continue + uris.append(newuri) + uds.append(newud) + + adduri(newud, uris, uds) + + adduri(origud, uris, uds) + + return uris, uds + +def rename_bad_checksum(ud, suffix): + """ + Renames files to have suffix from parameter + """ + + if ud.localpath is None: + return + + new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix) + bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath)) + bb.utils.movefile(ud.localpath, new_localpath) + + +def try_mirror_url(origud, ud, ld, check = False): + # Return of None or a value means we're finished + # False means try another url + try: + if check: + found = ud.method.checkstatus(ud, ld) + if found: + return found + return False + + os.chdir(ld.getVar("DL_DIR", True)) + + if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld): + ud.method.download(ud, ld) + if hasattr(ud.method,"build_mirror_data"): + ud.method.build_mirror_data(ud, ld) + + if not ud.localpath or not os.path.exists(ud.localpath): + return False + + if ud.localpath == origud.localpath: + return ud.localpath + + # We may be obtaining a mirror tarball which needs further processing by the real fetcher + # If that tarball is a local file:// we need to provide a symlink to it + dldir = ld.getVar("DL_DIR", True) + if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ + and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): + bb.utils.mkdirhier(os.path.dirname(ud.donestamp)) + open(ud.donestamp, 'w').close() + dest = os.path.join(dldir, os.path.basename(ud.localpath)) + if not os.path.exists(dest): + os.symlink(ud.localpath, dest) + if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld): + origud.method.download(origud, ld) + if hasattr(origud.method,"build_mirror_data"): + origud.method.build_mirror_data(origud, ld) + return ud.localpath + # Otherwise the result is a local file:// and we symlink to it + if not os.path.exists(origud.localpath): + if os.path.islink(origud.localpath): + # Broken symbolic link + os.unlink(origud.localpath) + + os.symlink(ud.localpath, origud.localpath) + update_stamp(origud, ld) + return ud.localpath + + except bb.fetch2.NetworkAccess: + raise + + except bb.fetch2.BBFetchException as e: + if isinstance(e, ChecksumError): + logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url)) + logger.warn(str(e)) + rename_bad_checksum(ud, e.checksum) + elif isinstance(e, NoChecksumError): + raise + else: + logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) + logger.debug(1, str(e)) + try: + ud.method.clean(ud, ld) + except UnboundLocalError: + pass + return False + +def try_mirrors(d, origud, mirrors, check = False): + """ + Try to use a mirrored version of the sources. + This method will be automatically called before the fetchers go. + + d Is a bb.data instance + uri is the original uri we're trying to download + mirrors is the list of mirrors we're going to try + """ + ld = d.createCopy() + + uris, uds = build_mirroruris(origud, mirrors, ld) + + for index, uri in enumerate(uris): + ret = try_mirror_url(origud, uds[index], ld, check) + if ret != False: + return ret + return None + +def srcrev_internal_helper(ud, d, name): + """ + Return: + a) a source revision if specified + b) latest revision if SRCREV="AUTOINC" + c) None if not specified + """ + + srcrev = None + pn = d.getVar("PN", True) + attempts = [] + if name != '' and pn: + attempts.append("SRCREV_%s_pn-%s" % (name, pn)) + if name != '': + attempts.append("SRCREV_%s" % name) + if pn: + attempts.append("SRCREV_pn-%s" % pn) + attempts.append("SRCREV") + + for a in attempts: + srcrev = d.getVar(a, True) + if srcrev and srcrev != "INVALID": + break + + if 'rev' in ud.parm and 'tag' in ud.parm: + raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) + + if 'rev' in ud.parm or 'tag' in ud.parm: + if 'rev' in ud.parm: + parmrev = ud.parm['rev'] + else: + parmrev = ud.parm['tag'] + if srcrev == "INVALID" or not srcrev: + return parmrev + if srcrev != parmrev: + raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev)) + return parmrev + + if srcrev == "INVALID" or not srcrev: + raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) + if srcrev == "AUTOINC": + srcrev = ud.method.latest_revision(ud, d, name) + + return srcrev + +def get_checksum_file_list(d): + """ Get a list of files checksum in SRC_URI + + Returns the resolved local paths of all local file entries in + SRC_URI as a space-separated string + """ + fetch = Fetch([], d, cache = False, localonly = True) + + dl_dir = d.getVar('DL_DIR', True) + filelist = [] + for u in fetch.urls: + ud = fetch.ud[u] + + if ud and isinstance(ud.method, local.Local): + paths = ud.method.localpaths(ud, d) + for f in paths: + pth = ud.decodedurl + if '*' in pth: + f = os.path.join(os.path.abspath(f), pth) + if f.startswith(dl_dir): + # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else + if os.path.exists(f): + bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) + else: + bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) + filelist.append(f + ":" + str(os.path.exists(f))) + + return " ".join(filelist) + +def get_file_checksums(filelist, pn): + """Get a list of the checksums for a list of local files + + Returns the checksums for a list of local files, caching the results as + it proceeds + + """ + + def checksum_file(f): + try: + checksum = _checksum_cache.get_checksum(f) + except OSError as e: + bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e)) + return None + return checksum + + def checksum_dir(pth): + # Handle directories recursively + dirchecksums = [] + for root, dirs, files in os.walk(pth): + for name in files: + fullpth = os.path.join(root, name) + checksum = checksum_file(fullpth) + if checksum: + dirchecksums.append((fullpth, checksum)) + return dirchecksums + + checksums = [] + for pth in filelist.split(): + exist = pth.split(":")[1] + if exist == "False": + continue + pth = pth.split(":")[0] + if '*' in pth: + # Handle globs + for f in glob.glob(pth): + if os.path.isdir(f): + checksums.extend(checksum_dir(f)) + else: + checksum = checksum_file(f) + checksums.append((f, checksum)) + elif os.path.isdir(pth): + checksums.extend(checksum_dir(pth)) + else: + checksum = checksum_file(pth) + checksums.append((pth, checksum)) + + checksums.sort(key=operator.itemgetter(1)) + return checksums + + +class FetchData(object): + """ + A class which represents the fetcher state for a given URI. + """ + def __init__(self, url, d, localonly = False): + # localpath is the location of a downloaded result. If not set, the file is local. + self.donestamp = None + self.localfile = "" + self.localpath = None + self.lockfile = None + self.mirrortarball = None + self.basename = None + self.basepath = None + (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) + self.date = self.getSRCDate(d) + self.url = url + if not self.user and "user" in self.parm: + self.user = self.parm["user"] + if not self.pswd and "pswd" in self.parm: + self.pswd = self.parm["pswd"] + self.setup = False + + if "name" in self.parm: + self.md5_name = "%s.md5sum" % self.parm["name"] + self.sha256_name = "%s.sha256sum" % self.parm["name"] + else: + self.md5_name = "md5sum" + self.sha256_name = "sha256sum" + if self.md5_name in self.parm: + self.md5_expected = self.parm[self.md5_name] + elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: + self.md5_expected = None + else: + self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) + if self.sha256_name in self.parm: + self.sha256_expected = self.parm[self.sha256_name] + elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: + self.sha256_expected = None + else: + self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) + + self.names = self.parm.get("name",'default').split(',') + + self.method = None + for m in methods: + if m.supports(self, d): + self.method = m + break + + if not self.method: + raise NoMethodError(url) + + if localonly and not isinstance(self.method, local.Local): + raise NonLocalMethod() + + if self.parm.get("proto", None) and "protocol" not in self.parm: + logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) + self.parm["protocol"] = self.parm.get("proto", None) + + if hasattr(self.method, "urldata_init"): + self.method.urldata_init(self, d) + + if "localpath" in self.parm: + # if user sets localpath for file, use it instead. + self.localpath = self.parm["localpath"] + self.basename = os.path.basename(self.localpath) + elif self.localfile: + self.localpath = self.method.localpath(self, d) + + dldir = d.getVar("DL_DIR", True) + # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be. + if self.localpath and self.localpath.startswith(dldir): + basepath = self.localpath + elif self.localpath: + basepath = dldir + os.sep + os.path.basename(self.localpath) + else: + basepath = dldir + os.sep + (self.basepath or self.basename) + self.donestamp = basepath + '.done' + self.lockfile = basepath + '.lock' + + def setup_revisons(self, d): + self.revisions = {} + for name in self.names: + self.revisions[name] = srcrev_internal_helper(self, d, name) + + # add compatibility code for non name specified case + if len(self.names) == 1: + self.revision = self.revisions[self.names[0]] + + def setup_localpath(self, d): + if not self.localpath: + self.localpath = self.method.localpath(self, d) + + def getSRCDate(self, d): + """ + Return the SRC Date for the component + + d the bb.data module + """ + if "srcdate" in self.parm: + return self.parm['srcdate'] + + pn = d.getVar("PN", True) + + if pn: + return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) + + return d.getVar("SRCDATE", True) or d.getVar("DATE", True) + +class FetchMethod(object): + """Base class for 'fetch'ing data""" + + def __init__(self, urls = []): + self.urls = [] + + def supports(self, urldata, d): + """ + Check to see if this fetch class supports a given url. + """ + return 0 + + def localpath(self, urldata, d): + """ + Return the local filename of a given url assuming a successful fetch. + Can also setup variables in urldata for use in go (saving code duplication + and duplicate code execution) + """ + return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile) + + def supports_checksum(self, urldata): + """ + Is localpath something that can be represented by a checksum? + """ + + # We cannot compute checksums for directories + if os.path.isdir(urldata.localpath) == True: + return False + if urldata.localpath.find("*") != -1: + return False + + return True + + def recommends_checksum(self, urldata): + """ + Is the backend on where checksumming is recommended (should warnings + be displayed if there is no checksum)? + """ + return False + + def _strip_leading_slashes(self, relpath): + """ + Remove leading slash as os.path.join can't cope + """ + while os.path.isabs(relpath): + relpath = relpath[1:] + return relpath + + def setUrls(self, urls): + self.__urls = urls + + def getUrls(self): + return self.__urls + + urls = property(getUrls, setUrls, None, "Urls property") + + def need_update(self, ud, d): + """ + Force a fetch, even if localpath exists? + """ + if os.path.exists(ud.localpath): + return False + return True + + def supports_srcrev(self): + """ + The fetcher supports auto source revisions (SRCREV) + """ + return False + + def download(self, urldata, d): + """ + Fetch urls + Assumes localpath was called first + """ + raise NoMethodError(url) + + def unpack(self, urldata, rootdir, data): + iterate = False + file = urldata.localpath + + try: + unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True) + except ValueError as exc: + bb.fatal("Invalid value for 'unpack' parameter for %s: %s" % + (file, urldata.parm.get('unpack'))) + + dots = file.split(".") + if dots[-1] in ['gz', 'bz2', 'Z', 'xz']: + efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1]))) + else: + efile = file + cmd = None + + if unpack: + if file.endswith('.tar'): + cmd = 'tar x --no-same-owner -f %s' % file + elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): + cmd = 'tar xz --no-same-owner -f %s' % file + elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): + cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file + elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): + cmd = 'gzip -dc %s > %s' % (file, efile) + elif file.endswith('.bz2'): + cmd = 'bzip2 -dc %s > %s' % (file, efile) + elif file.endswith('.tar.xz'): + cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file + elif file.endswith('.xz'): + cmd = 'xz -dc %s > %s' % (file, efile) + elif file.endswith('.zip') or file.endswith('.jar'): + try: + dos = bb.utils.to_boolean(urldata.parm.get('dos'), False) + except ValueError as exc: + bb.fatal("Invalid value for 'dos' parameter for %s: %s" % + (file, urldata.parm.get('dos'))) + cmd = 'unzip -q -o' + if dos: + cmd = '%s -a' % cmd + cmd = "%s '%s'" % (cmd, file) + elif file.endswith('.rpm') or file.endswith('.srpm'): + if 'extract' in urldata.parm: + unpack_file = urldata.parm.get('extract') + cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file) + iterate = True + iterate_file = unpack_file + else: + cmd = 'rpm2cpio.sh %s | cpio -id' % (file) + elif file.endswith('.deb') or file.endswith('.ipk'): + cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file + + if not unpack or not cmd: + # If file == dest, then avoid any copies, as we already put the file into dest! + dest = os.path.join(rootdir, os.path.basename(file)) + if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)): + if os.path.isdir(file): + # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar + basepath = getattr(urldata, "basepath", None) + destdir = "." + if basepath and basepath.endswith("/"): + basepath = basepath.rstrip("/") + elif basepath: + basepath = os.path.dirname(basepath) + if basepath and basepath.find("/") != -1: + destdir = basepath[:basepath.rfind('/')] + destdir = destdir.strip('/') + if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK): + os.makedirs("%s/%s" % (rootdir, destdir)) + cmd = 'cp -fpPR %s %s/%s/' % (file, rootdir, destdir) + #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir) + else: + # The "destdir" handling was specifically done for FILESPATH + # items. So, only do so for file:// entries. + if urldata.type == "file" and urldata.path.find("/") != -1: + destdir = urldata.path.rsplit("/", 1)[0] + if urldata.parm.get('subdir') != None: + destdir = urldata.parm.get('subdir') + "/" + destdir + else: + if urldata.parm.get('subdir') != None: + destdir = urldata.parm.get('subdir') + else: + destdir = "." + bb.utils.mkdirhier("%s/%s" % (rootdir, destdir)) + cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir) + + if not cmd: + return + + # Change to subdir before executing command + save_cwd = os.getcwd(); + os.chdir(rootdir) + if 'subdir' in urldata.parm: + newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir'))) + bb.utils.mkdirhier(newdir) + os.chdir(newdir) + + path = data.getVar('PATH', True) + if path: + cmd = "PATH=\"%s\" %s" % (path, cmd) + bb.note("Unpacking %s to %s/" % (file, os.getcwd())) + ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) + + os.chdir(save_cwd) + + if ret != 0: + raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url) + + if iterate is True: + iterate_urldata = urldata + iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file) + self.unpack(urldata, rootdir, data) + + return + + def clean(self, urldata, d): + """ + Clean any existing full or partial download + """ + bb.utils.remove(urldata.localpath) + + def try_premirror(self, urldata, d): + """ + Should premirrors be used? + """ + return True + + def checkstatus(self, urldata, d): + """ + Check the status of a URL + Assumes localpath was called first + """ + logger.info("URL %s could not be checked for status since no method exists.", url) + return True + + def latest_revision(self, ud, d, name): + """ + Look in the cache for the latest revision, if not present ask the SCM. + """ + if not hasattr(self, "_latest_revision"): + raise ParameterError("The fetcher for this URL does not support _latest_revision", url) + + revs = bb.persist_data.persist('BB_URI_HEADREVS', d) + key = self.generate_revision_key(ud, d, name) + try: + return revs[key] + except KeyError: + revs[key] = rev = self._latest_revision(ud, d, name) + return rev + + def sortable_revision(self, ud, d, name): + latest_rev = self._build_revision(ud, d, name) + return True, str(latest_rev) + + def generate_revision_key(self, ud, d, name): + key = self._revision_key(ud, d, name) + return "%s-%s" % (key, d.getVar("PN", True) or "") + +class Fetch(object): + def __init__(self, urls, d, cache = True, localonly = False): + if localonly and cache: + raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") + + if len(urls) == 0: + urls = d.getVar("SRC_URI", True).split() + self.urls = urls + self.d = d + self.ud = {} + + fn = d.getVar('FILE', True) + if cache and fn and fn in urldata_cache: + self.ud = urldata_cache[fn] + + for url in urls: + if url not in self.ud: + try: + self.ud[url] = FetchData(url, d, localonly) + except NonLocalMethod: + if localonly: + self.ud[url] = None + pass + + if fn and cache: + urldata_cache[fn] = self.ud + + def localpath(self, url): + if url not in self.urls: + self.ud[url] = FetchData(url, self.d) + + self.ud[url].setup_localpath(self.d) + return self.d.expand(self.ud[url].localpath) + + def localpaths(self): + """ + Return a list of the local filenames, assuming successful fetch + """ + local = [] + + for u in self.urls: + ud = self.ud[u] + ud.setup_localpath(self.d) + local.append(ud.localpath) + + return local + + def download(self, urls = []): + """ + Fetch all urls + """ + if len(urls) == 0: + urls = self.urls + + network = self.d.getVar("BB_NO_NETWORK", True) + premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") + + for u in urls: + ud = self.ud[u] + ud.setup_localpath(self.d) + m = ud.method + localpath = "" + + lf = bb.utils.lockfile(ud.lockfile) + + try: + self.d.setVar("BB_NO_NETWORK", network) + + if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d): + localpath = ud.localpath + elif m.try_premirror(ud, self.d): + logger.debug(1, "Trying PREMIRRORS") + mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) + localpath = try_mirrors(self.d, ud, mirrors, False) + + if premirroronly: + self.d.setVar("BB_NO_NETWORK", "1") + + os.chdir(self.d.getVar("DL_DIR", True)) + + firsterr = None + if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)): + try: + logger.debug(1, "Trying Upstream") + m.download(ud, self.d) + if hasattr(m, "build_mirror_data"): + m.build_mirror_data(ud, self.d) + localpath = ud.localpath + # early checksum verify, so that if checksum mismatched, + # fetcher still have chance to fetch from mirror + update_stamp(ud, self.d) + + except bb.fetch2.NetworkAccess: + raise + + except BBFetchException as e: + if isinstance(e, ChecksumError): + logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u) + logger.debug(1, str(e)) + rename_bad_checksum(ud, e.checksum) + elif isinstance(e, NoChecksumError): + raise + else: + logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u) + logger.debug(1, str(e)) + firsterr = e + # Remove any incomplete fetch + m.clean(ud, self.d) + logger.debug(1, "Trying MIRRORS") + mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) + localpath = try_mirrors (self.d, ud, mirrors) + + if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): + if firsterr: + logger.error(str(firsterr)) + raise FetchError("Unable to fetch URL from any source.", u) + + update_stamp(ud, self.d) + + except BBFetchException as e: + if isinstance(e, ChecksumError): + logger.error("Checksum failure fetching %s" % u) + raise + + finally: + bb.utils.unlockfile(lf) + + def checkstatus(self, urls = []): + """ + Check all urls exist upstream + """ + + if len(urls) == 0: + urls = self.urls + + for u in urls: + ud = self.ud[u] + ud.setup_localpath(self.d) + m = ud.method + logger.debug(1, "Testing URL %s", u) + # First try checking uri, u, from PREMIRRORS + mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) + ret = try_mirrors(self.d, ud, mirrors, True) + if not ret: + # Next try checking from the original uri, u + try: + ret = m.checkstatus(ud, self.d) + except: + # Finally, try checking uri, u, from MIRRORS + mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) + ret = try_mirrors(self.d, ud, mirrors, True) + + if not ret: + raise FetchError("URL %s doesn't work" % u, u) + + def unpack(self, root, urls = []): + """ + Check all urls exist upstream + """ + + if len(urls) == 0: + urls = self.urls + + for u in urls: + ud = self.ud[u] + ud.setup_localpath(self.d) + + if self.d.expand(self.localpath) is None: + continue + + if ud.lockfile: + lf = bb.utils.lockfile(ud.lockfile) + + ud.method.unpack(ud, root, self.d) + + if ud.lockfile: + bb.utils.unlockfile(lf) + + def clean(self, urls = []): + """ + Clean files that the fetcher gets or places + """ + + if len(urls) == 0: + urls = self.urls + + for url in urls: + if url not in self.ud: + self.ud[url] = FetchData(url, d) + ud = self.ud[url] + ud.setup_localpath(self.d) + + if not ud.localfile and ud.localpath is None: + continue + + if ud.lockfile: + lf = bb.utils.lockfile(ud.lockfile) + + ud.method.clean(ud, self.d) + if ud.donestamp: + bb.utils.remove(ud.donestamp) + + if ud.lockfile: + bb.utils.unlockfile(lf) + +from . import cvs +from . import git +from . import gitsm +from . import gitannex +from . import local +from . import svn +from . import wget +from . import ssh +from . import sftp +from . import perforce +from . import bzr +from . import hg +from . import osc +from . import repo +from . import clearcase + +methods.append(local.Local()) +methods.append(wget.Wget()) +methods.append(svn.Svn()) +methods.append(git.Git()) +methods.append(gitsm.GitSM()) +methods.append(gitannex.GitANNEX()) +methods.append(cvs.Cvs()) +methods.append(ssh.SSH()) +methods.append(sftp.SFTP()) +methods.append(perforce.Perforce()) +methods.append(bzr.Bzr()) +methods.append(hg.Hg()) +methods.append(osc.Osc()) +methods.append(repo.Repo()) +methods.append(clearcase.ClearCase()) diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py new file mode 100644 index 0000000..03e9ac4 --- /dev/null +++ b/bitbake/lib/bb/fetch2/bzr.py @@ -0,0 +1,143 @@ +""" +BitBake 'Fetch' implementation for bzr. + +""" + +# Copyright (C) 2007 Ross Burton +# Copyright (C) 2007 Richard Purdie +# +# Classes for obtaining upstream sources for the +# BitBake build tools. +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import sys +import logging +import bb +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class Bzr(FetchMethod): + def supports(self, ud, d): + return ud.type in ['bzr'] + + def urldata_init(self, ud, d): + """ + init bzr specific variable within url data + """ + # Create paths to bzr checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) + + ud.setup_revisons(d) + + if not ud.revision: + ud.revision = self.latest_revision(ud, d) + + ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) + + def _buildbzrcommand(self, ud, d, command): + """ + Build up an bzr commandline based on ud + command is "fetch", "update", "revno" + """ + + basecmd = data.expand('${FETCHCMD_bzr}', d) + + proto = ud.parm.get('protocol', 'http') + + bzrroot = ud.host + ud.path + + options = [] + + if command == "revno": + bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) + else: + if ud.revision: + options.append("-r %s" % ud.revision) + + if command == "fetch": + bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) + elif command == "update": + bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) + else: + raise FetchError("Invalid bzr command %s" % command, ud.url) + + return bzrcmd + + def download(self, ud, d): + """Fetch url""" + + if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): + bzrcmd = self._buildbzrcommand(ud, d, "update") + logger.debug(1, "BZR Update %s", ud.url) + bb.fetch2.check_network_access(d, bzrcmd, ud.url) + os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) + runfetchcmd(bzrcmd, d) + else: + bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) + bzrcmd = self._buildbzrcommand(ud, d, "fetch") + bb.fetch2.check_network_access(d, bzrcmd, ud.url) + logger.debug(1, "BZR Checkout %s", ud.url) + bb.utils.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", bzrcmd) + runfetchcmd(bzrcmd, d) + + os.chdir(ud.pkgdir) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" + + # tar them up to a defined filename + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath]) + + def supports_srcrev(self): + return True + + def _revision_key(self, ud, d, name): + """ + Return a unique key for the url + """ + return "bzr:" + ud.pkgdir + + def _latest_revision(self, ud, d, name): + """ + Return the latest upstream revision number + """ + logger.debug(2, "BZR fetcher hitting network for %s", ud.url) + + bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) + + output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) + + return output.strip() + + def sortable_revision(self, ud, d, name): + """ + Return a sortable revision number which in our case is the revision number + """ + + return False, self._build_revision(ud, d) + + def _build_revision(self, ud, d): + return ud.revision diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py new file mode 100644 index 0000000..bfca2f7 --- /dev/null +++ b/bitbake/lib/bb/fetch2/clearcase.py @@ -0,0 +1,263 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' clearcase implementation + +The clearcase fetcher is used to retrieve files from a ClearCase repository. + +Usage in the recipe: + + SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module" + SRCREV = "EXAMPLE_CLEARCASE_TAG" + PV = "${@d.getVar("SRCREV").replace("/", "+")}" + +The fetcher uses the rcleartool or cleartool remote client, depending on which one is available. + +Supported SRC_URI options are: + +- vob + (required) The name of the clearcase VOB (with prepending "/") + +- module + The module in the selected VOB (with prepending "/") + + The module and vob parameters are combined to create + the following load rule in the view config spec: + load <vob><module> + +- proto + http or https + +Related variables: + + CCASE_CUSTOM_CONFIG_SPEC + Write a config spec to this variable in your recipe to use it instead + of the default config spec generated by this fetcher. + Please note that the SRCREV loses its functionality if you specify + this variable. SRCREV is still used to label the archive after a fetch, + but it doesn't define what's fetched. + +User credentials: + cleartool: + The login of cleartool is handled by the system. No special steps needed. + + rcleartool: + In order to use rcleartool with authenticated users an `rcleartool login` is + necessary before using the fetcher. +""" +# Copyright (C) 2014 Siemens AG +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# + +import os +import sys +import shutil +import bb +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger +from distutils import spawn + +class ClearCase(FetchMethod): + """Class to fetch urls via 'clearcase'""" + def init(self, d): + pass + + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with Clearcase. + """ + return ud.type in ['ccrc'] + + def debug(self, msg): + logger.debug(1, "ClearCase: %s", msg) + + def urldata_init(self, ud, d): + """ + init ClearCase specific variable within url data + """ + ud.proto = "https" + if 'protocol' in ud.parm: + ud.proto = ud.parm['protocol'] + if not ud.proto in ('http', 'https'): + raise fetch2.ParameterError("Invalid protocol type", ud.url) + + ud.vob = '' + if 'vob' in ud.parm: + ud.vob = ud.parm['vob'] + else: + msg = ud.url+": vob must be defined so the fetcher knows what to get." + raise MissingParameterError('vob', msg) + + if 'module' in ud.parm: + ud.module = ud.parm['module'] + else: + ud.module = "" + + ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool") + + if data.getVar("SRCREV", d, True) == "INVALID": + raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.") + + ud.label = d.getVar("SRCREV") + ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True) + + ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path) + + ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""), + ud.module.replace("/", "."), + ud.label.replace("/", ".")) + + ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) + ud.csname = "%s-config-spec" % (ud.identifier) + ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type) + ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) + ud.configspecfile = os.path.join(ud.ccasedir, ud.csname) + ud.localfile = "%s.tar.gz" % (ud.identifier) + + self.debug("host = %s" % ud.host) + self.debug("path = %s" % ud.path) + self.debug("server = %s" % ud.server) + self.debug("proto = %s" % ud.proto) + self.debug("type = %s" % ud.type) + self.debug("vob = %s" % ud.vob) + self.debug("module = %s" % ud.module) + self.debug("basecmd = %s" % ud.basecmd) + self.debug("label = %s" % ud.label) + self.debug("ccasedir = %s" % ud.ccasedir) + self.debug("viewdir = %s" % ud.viewdir) + self.debug("viewname = %s" % ud.viewname) + self.debug("configspecfile = %s" % ud.configspecfile) + self.debug("localfile = %s" % ud.localfile) + + ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def _build_ccase_command(self, ud, command): + """ + Build up a commandline based on ud + command is: mkview, setcs, rmview + """ + options = [] + + if "rcleartool" in ud.basecmd: + options.append("-server %s" % ud.server) + + basecmd = "%s %s" % (ud.basecmd, command) + + if command is 'mkview': + if not "rcleartool" in ud.basecmd: + # Cleartool needs a -snapshot view + options.append("-snapshot") + options.append("-tag %s" % ud.viewname) + options.append(ud.viewdir) + + elif command is 'rmview': + options.append("-force") + options.append("%s" % ud.viewdir) + + elif command is 'setcs': + options.append("-overwrite") + options.append(ud.configspecfile) + + else: + raise FetchError("Invalid ccase command %s" % command) + + ccasecmd = "%s %s" % (basecmd, " ".join(options)) + self.debug("ccasecmd = %s" % ccasecmd) + return ccasecmd + + def _write_configspec(self, ud, d): + """ + Create config spec file (ud.configspecfile) for ccase view + """ + config_spec = "" + custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d) + if custom_config_spec is not None: + for line in custom_config_spec.split("\\n"): + config_spec += line+"\n" + bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.") + else: + config_spec += "element * CHECKEDOUT\n" + config_spec += "element * %s\n" % ud.label + config_spec += "load %s%s\n" % (ud.vob, ud.module) + + logger.info("Using config spec: \n%s" % config_spec) + + with open(ud.configspecfile, 'w') as f: + f.write(config_spec) + + def _remove_view(self, ud, d): + if os.path.exists(ud.viewdir): + os.chdir(ud.ccasedir) + cmd = self._build_ccase_command(ud, 'rmview'); + logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname) + bb.fetch2.check_network_access(d, cmd, ud.url) + output = runfetchcmd(cmd, d) + logger.info("rmview output: %s", output) + + def need_updat