summaryrefslogtreecommitdiffstats
path: root/meta-oe/recipes-connectivity
diff options
context:
space:
mode:
authorTudor Florea <tudor.florea@enea.com>2015-10-08 22:51:41 +0200
committerTudor Florea <tudor.florea@enea.com>2015-10-08 22:51:41 +0200
commit1219bf8a90a7bf8cd3a5363551ef635d51e8fc8e (patch)
treea21a5fc103bb3bd65ecd85ed22be5228fc54e447 /meta-oe/recipes-connectivity
downloadmeta-openembedded-1219bf8a90a7bf8cd3a5363551ef635d51e8fc8e.tar.gz
initial commit for Enea Linux 5.0 arm
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'meta-oe/recipes-connectivity')
-rw-r--r--meta-oe/recipes-connectivity/daq/daq_2.0.2.bb25
-rw-r--r--meta-oe/recipes-connectivity/daq/files/disable-run-test-program-while-cross-compiling.patch29
-rw-r--r--meta-oe/recipes-connectivity/gammu/gammu/gammu-smsdrc80
-rw-r--r--meta-oe/recipes-connectivity/gammu/gammu/gammurc173
-rw-r--r--meta-oe/recipes-connectivity/gammu/gammu_1.32.0.bb55
-rw-r--r--meta-oe/recipes-connectivity/gateone/gateone/0001-configuration.py-Hack-around-broken-gethostname-thin.patch26
-rw-r--r--meta-oe/recipes-connectivity/gateone/gateone/80oe.conf23
-rw-r--r--meta-oe/recipes-connectivity/gateone/gateone/gateone-avahi.service10
-rw-r--r--meta-oe/recipes-connectivity/gateone/gateone/gateone-init47
-rw-r--r--meta-oe/recipes-connectivity/gateone/gateone/gateone.service10
-rw-r--r--meta-oe/recipes-connectivity/gateone/gateone_git.bb82
-rw-r--r--meta-oe/recipes-connectivity/gnokii/gnokii_0.6.31.bb33
-rw-r--r--meta-oe/recipes-connectivity/gnuradio/gnuradio/0001-buildsys-don-t-add-usr-include-and-usr-lib-to-config.patch58
-rw-r--r--meta-oe/recipes-connectivity/hostapd/hostapd-2.2/defconfig145
-rw-r--r--meta-oe/recipes-connectivity/hostapd/hostapd-2.2/hostapd.service11
-rw-r--r--meta-oe/recipes-connectivity/hostapd/hostapd-2.2/init58
-rw-r--r--meta-oe/recipes-connectivity/hostapd/hostapd_2.2.bb48
-rw-r--r--meta-oe/recipes-connectivity/irssi/irssi_0.8.16-rc1.bb22
-rw-r--r--meta-oe/recipes-connectivity/iw/iw/0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch43
-rw-r--r--meta-oe/recipes-connectivity/iw/iw_3.15.bb23
-rw-r--r--meta-oe/recipes-connectivity/krb5/krb5/0001-Return-only-new-keys-in-randkey-CVE-2014-5351.patch92
-rw-r--r--meta-oe/recipes-connectivity/krb5/krb5/0001-aclocal-Add-parameter-to-disable-keyutils-detection.patch33
-rw-r--r--meta-oe/recipes-connectivity/krb5/krb5/crosscompile_nm.patch29
-rw-r--r--meta-oe/recipes-connectivity/krb5/krb5/debian-suppress-usr-lib-in-krb5-config.patch48
-rw-r--r--meta-oe/recipes-connectivity/krb5/krb5/etc/default/krb5-admin-server6
-rw-r--r--meta-oe/recipes-connectivity/krb5/krb5/etc/default/krb5-kdc5
-rwxr-xr-xmeta-oe/recipes-connectivity/krb5/krb5/etc/init.d/krb5-admin-server140
-rwxr-xr-xmeta-oe/recipes-connectivity/krb5/krb5/etc/init.d/krb5-kdc133
-rw-r--r--meta-oe/recipes-connectivity/krb5/krb5_1.12.2.bb77
-rw-r--r--meta-oe/recipes-connectivity/libimobiledevice/libimobiledevice_1.1.4.bb26
-rw-r--r--meta-oe/recipes-connectivity/libmtp/libmtp-1.1.5/69-libmtp.rules999
-rw-r--r--meta-oe/recipes-connectivity/libmtp/libmtp-1.1.5/glibc-2.20.patch36
-rw-r--r--meta-oe/recipes-connectivity/libmtp/libmtp_1.1.5.bb61
-rw-r--r--meta-oe/recipes-connectivity/libnet/libnet_1.2-rc3.bb19
-rw-r--r--meta-oe/recipes-connectivity/libtorrent/libtorrent/don-t-run-code-while-configuring-package.patch94
-rw-r--r--meta-oe/recipes-connectivity/libtorrent/libtorrent_0.13.3.bb17
-rw-r--r--meta-oe/recipes-connectivity/linuxptp/linuxptp/build-Allow-CC-and-prefix-to-be-overriden.patch37
-rw-r--r--meta-oe/recipes-connectivity/linuxptp/linuxptp_1.4.bb20
-rw-r--r--meta-oe/recipes-connectivity/loudmouth/loudmouth-1.4.3/04-use-pkg-config-for-gnutls.patch23
-rw-r--r--meta-oe/recipes-connectivity/loudmouth/loudmouth-1.4.3/glib-2.32.patch10
-rw-r--r--meta-oe/recipes-connectivity/loudmouth/loudmouth_1.4.3.bb20
-rw-r--r--meta-oe/recipes-connectivity/mosh/mosh_1.2.4.bb38
-rw-r--r--meta-oe/recipes-connectivity/networkmanager/modemmanager_0.6.0.0.bb29
-rw-r--r--meta-oe/recipes-connectivity/networkmanager/networkmanager/0001-configure.ac-Check-only-for-libsystemd-not-libsystem.patch30
-rw-r--r--meta-oe/recipes-connectivity/networkmanager/networkmanager/0001-don-t-try-to-run-sbin-dhclient-to-get-the-version-nu.patch36
-rw-r--r--meta-oe/recipes-connectivity/networkmanager/networkmanager_0.9.8.10.bb111
-rw-r--r--meta-oe/recipes-connectivity/obex/obex-data-server_0.4.6.bb15
-rw-r--r--meta-oe/recipes-connectivity/obex/openobex-1.5/disable-cable-test.patch16
-rw-r--r--meta-oe/recipes-connectivity/obex/openobex-1.5/libusb_crosscompile_check.patch11
-rw-r--r--meta-oe/recipes-connectivity/obex/openobex-1.5/separate_builddir.patch16
-rw-r--r--meta-oe/recipes-connectivity/obex/openobex_1.5.bb33
-rw-r--r--meta-oe/recipes-connectivity/obexftp/obexftp/Remove_some_printf_in_obexftpd.patch34
-rw-r--r--meta-oe/recipes-connectivity/obexftp/obexftp_0.23.bb16
-rw-r--r--meta-oe/recipes-connectivity/phonet-utils/phonet-utils_git.bb13
-rw-r--r--meta-oe/recipes-connectivity/rabbitmq-c/rabbitmq-c_0.5.0.bb17
-rw-r--r--meta-oe/recipes-connectivity/rfkill/rfkill/0001-rfkill-makefile-don-t-use-t-the-OE-install-wrapper-d.patch30
-rw-r--r--meta-oe/recipes-connectivity/rfkill/rfkill/dont.call.git.rev-parse.on.parent.dir.patch31
-rw-r--r--meta-oe/recipes-connectivity/rfkill/rfkill_0.4.bb27
-rw-r--r--meta-oe/recipes-connectivity/rtorrent/rtorrent/don-t-run-code-while-configuring-package.patch94
-rw-r--r--meta-oe/recipes-connectivity/rtorrent/rtorrent_0.9.3.bb15
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_221618_precise-64bit-prototype.patch20
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_387266_upstream_4104_mention-kerberos-in-smbspool-manpage.patch34
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_598313_upstream_7499-nss_wins-dont-clobber-daemons-logs.patch47
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_601406_fix-perl-path-in-example.patch15
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_604768_upstream_7826_drop-using-samba-link.patch21
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_604768_upstream_7826_fix-WHATSNEW-link.patch18
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-disable-core_pattern-cross-check.patch20
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-disable-getaddrinfo-cross.patch11
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-libunwind.patch82
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/documentation.patch302
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/documentation2.patch314
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/dont-build-VFS-examples.patch31
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/fhs-filespaths.patch65
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/installswat.sh.patch23
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/libutil_drop_AI_ADDRCONFIG.patch54
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/only_export_public_symbols.patch21
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/pam-examples.patch17
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/shadow_copy2_backport.patch2101
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/smbclient-pager.patch18
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/smbtar-bashism.patch19
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/smbtorture-manpage.patch94
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/undefined-symbols.patch24
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/usershare.patch38
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch18534
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-basic.inc55
-rw-r--r--meta-oe/recipes-connectivity/samba/samba.inc159
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/0001-PIDL-fix-parsing-linemarkers-in-preprocessor-output.patch68
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/Managing-Samba.txt40
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/cifs.patch10
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/config-h.patch12
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/config-lfs.patch47
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/configure-3.3.0.patch85
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/init.samba58
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/init.winbind38
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/mtab.patch11
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/nmb.service12
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/quota.patch11
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/smb.conf266
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/smb.service13
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/tdb.pc11
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/tdbheaderfix.patch14
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/volatiles.03_samba2
-rw-r--r--meta-oe/recipes-connectivity/samba/samba/winbind.service12
-rw-r--r--meta-oe/recipes-connectivity/samba/samba_3.6.24.bb67
-rw-r--r--meta-oe/recipes-connectivity/ser2net/ser2net_2.9.1.bb15
-rw-r--r--meta-oe/recipes-connectivity/soft66/files/fix-ar.patch13
-rw-r--r--meta-oe/recipes-connectivity/soft66/soft66_git.bb20
-rw-r--r--meta-oe/recipes-connectivity/umip/umip_1.0.bb18
-rw-r--r--meta-oe/recipes-connectivity/usbmuxd/usbmuxd_git.bb24
-rw-r--r--meta-oe/recipes-connectivity/wvdial/wvdial/typo_pon.wvdial.1.patch20
-rw-r--r--meta-oe/recipes-connectivity/wvdial/wvdial_1.61.bb30
-rw-r--r--meta-oe/recipes-connectivity/wvdial/wvstreams/04_signed_request.diff13
-rw-r--r--meta-oe/recipes-connectivity/wvdial/wvstreams/05_gcc.diff41
-rw-r--r--meta-oe/recipes-connectivity/wvdial/wvstreams/06_gcc-4.7.diff18
-rw-r--r--meta-oe/recipes-connectivity/wvdial/wvstreams/07_buildflags.diff32
-rw-r--r--meta-oe/recipes-connectivity/wvdial/wvstreams_4.6.1.bb45
-rw-r--r--meta-oe/recipes-connectivity/zeroc-ice/zeroc-ice-3.5.1/0002-Modify-Makefile-for-cross-compile.patch292
-rw-r--r--meta-oe/recipes-connectivity/zeroc-ice/zeroc-ice_3.5.1.bb87
-rw-r--r--meta-oe/recipes-connectivity/zeromq/cppzmq_git.bb19
-rwxr-xr-xmeta-oe/recipes-connectivity/zeromq/files/run-ptest10
-rw-r--r--meta-oe/recipes-connectivity/zeromq/zeromq_4.0.4.bb24
121 files changed, 27173 insertions, 0 deletions
diff --git a/meta-oe/recipes-connectivity/daq/daq_2.0.2.bb b/meta-oe/recipes-connectivity/daq/daq_2.0.2.bb
new file mode 100644
index 000000000..4cf3f1ceb
--- /dev/null
+++ b/meta-oe/recipes-connectivity/daq/daq_2.0.2.bb
@@ -0,0 +1,25 @@
1SUMMARY = "The dump DAQ test the various inline mode features "
2HOMEPAGE = "http://www.snort.org"
3LICENSE = "GPLv2"
4LIC_FILES_CHKSUM = "file://COPYING;md5=f9ce51a65dd738dc1ae631d8b21c40e0"
5
6PARALLEL_MAKE = ""
7
8DEPENDS = "libpcap libpcre libdnet"
9
10SRC_URI = "http://fossies.org/linux/misc/daq-${PV}.tar.gz \
11 file://disable-run-test-program-while-cross-compiling.patch "
12
13# these 2 create undeclared dependency on libdnet and libnetfilter-queue from meta-networking
14# this error from test-dependencies script:
15# daq/daq/latest lost dependency on libdnet libmnl libnetfilter-queue libnfnetlink
16#
17# never look to /usr/local lib while cross compiling
18
19EXTRA_OECONF = "--disable-nfq-module --disable-ipq-module --includedir=${includedir} \
20 --with-libpcap-includes=${STAGING_INCDIR} --with-dnet-includes=${STAGING_LIBDIR}"
21
22SRC_URI[md5sum] = "865bf9b750a2a2ca632591a3c70b0ea0"
23SRC_URI[sha256sum] = "d65d1e67c4994e02c3142c49a648642e780b7e3d942b4a51f605309beac269a8"
24
25inherit autotools
diff --git a/meta-oe/recipes-connectivity/daq/files/disable-run-test-program-while-cross-compiling.patch b/meta-oe/recipes-connectivity/daq/files/disable-run-test-program-while-cross-compiling.patch
new file mode 100644
index 000000000..12a6cba60
--- /dev/null
+++ b/meta-oe/recipes-connectivity/daq/files/disable-run-test-program-while-cross-compiling.patch
@@ -0,0 +1,29 @@
1Upstream-Status:Inappropriate [embedded specific]
2
3fix the below error:
4configure: error: cannot run test program while cross compiling
5
6Signed-off-by: Chunrong Guo <B40290@freescale.com>
7
8--- a/m4/sf.m4old 2013-08-30 04:08:18.768409680 -0500
9+++ b/m4/sf.m4 2013-08-30 04:09:24.591367890 -0500
10@@ -135,19 +135,6 @@
11 exit 1
12 fi
13 AC_MSG_CHECKING([for libpcap version >= $1])
14- AC_RUN_IFELSE(
15- [AC_LANG_PROGRAM(
16- [[
17- #include <pcap.h>
18- #include <string.h>
19- extern char pcap_version[];
20- ]],
21- [[
22- if (strcmp(pcap_version, $1) < 0)
23- return 1;
24- ]])],
25- [libpcap_version_1x="yes"],
26- [libpcap_version_1x="no"])
27 if test "x$libpcap_version_1x" = "xno"; then
28 AC_MSG_RESULT(no)
29 echo
diff --git a/meta-oe/recipes-connectivity/gammu/gammu/gammu-smsdrc b/meta-oe/recipes-connectivity/gammu/gammu/gammu-smsdrc
new file mode 100644
index 000000000..da413c2d3
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gammu/gammu/gammu-smsdrc
@@ -0,0 +1,80 @@
1# This is a sample Gammu SMSD configuration file. It's required for gammu-smsd,
2# see gammu-smsdrc(5) for documentation.
3
4# Gammu configuration, this section is like section "gammu" in "gammurc" file,
5# see gammurc(5) for documentation.
6[gammu]
7device = /dev/ttyUSB0
8model = at115200
9#connection = dlr3
10#synchronizetime = yes
11#logfile = gammulog # this is not used at all in SMSD mode
12#logformat = textall
13#use_locking = yes
14#gammuloc = gammu.us
15#startinfo = yes
16
17# When uncomment this section and insert numbers here, smsd will process
18# incoming sms only from numbers written here (incoming sms from all other
19# numbers will be deleted)
20#[include_numbers]
21#number1 = 1234
22
23# When uncomment this section and insert numbers here, smsd will process
24# incoming sms from all numbers not written here (incoming sms from numbers
25# written here will be deleted). This is "black" list.
26# Note: after using "include_numbers" section this one will be ignored
27#[exclude_numbers]
28#number1 = 1234
29
30# General SMSD settings, see gammu-smsdrc(5) for detailed description.
31[smsd]
32# SMSD service to use, one of FILES, MYSQL, PGSQL, DBI
33service = files
34# PIN for SIM card
35PIN = 0000
36# File (or stderr, syslog, eventlog) where information will be logged
37logfile = smsdlog
38# Amount of information being logged, each bit mean one level
39debuglevel = 0
40# Configuration for using more phones on same database
41#phoneid = MyPhone1
42# Script to be executed when new message has been received
43#runonreceive = /some/script
44# Commication frequency settings
45commtimeout = 30
46sendtimeout = 30
47#receivefrequency = 0
48
49# Phone communication settings
50#checksecurity = 1
51#resetfrequency = 0
52
53# Delivery report configuration
54#deliveryreport = no
55#deliveryreportdelay = 10
56
57# Ignoring broken SMSC
58#skipsmscnumber = +48602123456
59
60# Database backends congfiguration
61user = gammu
62password = gammupassword
63pc = localhost
64# pc can also contain port or socket path after colon (eg. localhost:/path/to/socket)
65database = sms
66
67# DBI configuration
68driver = sqlite
69# driverspath = /usr/lib/dbd/
70# Database directory for sqlite
71# dbdir = /var/lib/smsd
72
73# Files backend configuration
74inboxpath = /var/spool/sms/inbox/
75outboxpath = /var/spool/sms/outbox/
76sentsmspath = /var/spool/sms/sent/
77errorsmspath = /var/spool/sms/error/
78inboxformat = detail
79transmitformat = auto
80outboxformat = detail
diff --git a/meta-oe/recipes-connectivity/gammu/gammu/gammurc b/meta-oe/recipes-connectivity/gammu/gammu/gammurc
new file mode 100644
index 000000000..b55d739fa
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gammu/gammu/gammurc
@@ -0,0 +1,173 @@
1; This is a sample ~/.gammurc file.
2; In Unix/Linux copy it into your home directory and name it .gammurc
3; or into /etc and name it gammurc
4; In Win32 copy it into directory with Gammu.exe and name gammurc
5; More about parameters later
6; Anything behind ; or # is comment.
7; -----------------------------------------------------------------------------
8
9[gammu]
10
11device = /dev/ttyUSB0
12connection = at115200
13; Do not use model configuration unless you really need it
14;model = 6110
15;synchronizetime = yes
16;logfile = gammulog
17;logformat = textall
18;use_locking = yes
19;gammuloc = locfile
20;startinfo = yes
21;gammucoding = utf8
22;usephonedb = yes
23
24;[gammu1]
25
26;device = com8:
27;model = 6110
28;connection = fbusblue
29;synchronizetime = yes
30;logfile = gammulog
31;logformat = textall
32;use_locking = yes
33;gammuloc = locfile
34;startinfo = yes
35;gammucoding = utf8
36
37; Step 1. Please find required Connection parameter and look into assigned
38; with it device type. With some Connection you must set concrete model
39
40; ================================================================ cables =====
41; New Nokia protocol for FBUS/DAU9P
42; Connection "fbus", device type serial
43; New Nokia protocol for DLR3/DLR3P
44; Connection "fbusdlr3"/"dlr3", device type serial
45; New Nokia protocol for DKU2 (and phone with USB converter on phone mainboard
46; like 6230)
47; Connection "dku2phonet"/"dku2", device type dku2 on Windows
48; Connection "fbususb" on Linux
49; New Nokia protocol for DKU5 (and phone without USB converter on phone
50; mainboard like 5100)
51; Connection "dku5fbus"/"dku5", device type dku5
52; New Nokia protocol for PL2303 USB cable (and phone without USB converter
53; on phone mainboard like 5100)
54; Connection "fbuspl2303", device type usb
55; Old Nokia protocol for MBUS/DAU9P
56; Connection "mbus", device type serial
57; Variants:
58; You can modify a bit behaviour of connection using additional flags
59; specified just after connection name like connection-variant.
60; If you're using ARK3116 cable (or any other which does not like dtr
61; handling), you might need -nodtr variant of connection, eg. dlr3-nodtr.
62; If cable you use is not powered over DTR/RTS, try using -nopower variant of
63; connection, eg. fbus-nopower.
64; -----------------------------------------------------------------------------
65; AT commands for DLR3, DKU5 or other AT compatible cable (8 bits, None
66; parity, no flow control, 1 stop bit). Used with Nokia, Alcatel, Siemens, etc.
67; Connection "at19200"/"at115200"/.., device type serial
68; AT commands for DKU2 cable
69; Connection "dku2at", device type dku2
70; ============================================================== infrared =====
71; Nokia protocol for infrared with Nokia 6110/6130/6150
72; Connection "fbusirda"/"infrared", device type serial
73; Nokia protocol for infrared with other Nokia models
74; Connection "irdaphonet"/"irda", device type irda
75; -----------------------------------------------------------------------------
76; AT commands for infrared. Used with Nokia, Alcatel, Siemens, etc.
77; Connection "irdaat", device type irda
78; -----------------------------------------------------------------------------
79; OBEX for infrared
80; Connection "irdaobex", device type irda.
81; ============================================================= Bluetooth =====
82; Nokia protocol with serial device set in BT stack (WidComm, other) from
83; adequate service and Nokia 6210
84; Connection "fbusblue", device type serial
85; Nokia protocol with serial device set in BT stack (WidComm, other) from
86; adequate service and other Nokia models
87; Connection "phonetblue", device type serial
88; -----------------------------------------------------------------------------
89; Nokia protocol for Bluetooth stack with Nokia 6210
90; Connection "bluerffbus", device type BT
91; Nokia protocol for Bluetooth stack with DCT4 Nokia models, which don't inform
92; about services correctly (6310, 6310i with firmware lower than 5.50, 8910,..)
93; Connection "bluerfphonet", device type BT
94; Nokia protocol for Bluetooth stack with other DCT4 Nokia models
95; Connection "bluephonet", device type BT
96; -----------------------------------------------------------------------------
97; AT commands for Bluetooth stack and 6210 / DCT4 Nokia models, which don't
98; inform about BT services correctly (6310, 6310i with firmware lower
99; than 5.50, 8910,..)
100; Connection "bluerfat", device type BT
101; AT commands for Bluetooth stack with other phones (Siemens, other Nokia,etc.)
102; Connection "blueat", device type BT
103; -----------------------------------------------------------------------------
104; OBEX for Bluetooth stack with DCT4 Nokia models, which don't inform about
105; BT services correctly (6310, 6310i with firmware lower than 5.50, 8910,...)
106; Connection "bluerfobex", device type BT
107; OBEX for Bluetooth stack with other phones (Siemens, other Nokia, etc.)
108; Connection "blueobex", device type BT.
109; -----------------------------------------------------------------------------
110; Connection "bluerfgnapbus", device type BT, model "gnap"
111; Connection "irdagnapbus", device type irda, model "gnap"
112
113; Step2. According to device type from Step1 and used OS set Port parameter
114
115; -----------------------------------------------------------------------------
116; Port type | "Port" parameter in Windows/DOS | "Port" parameter in Linux/Unix
117; ----------|---------------------------------|--------------------------------
118; serial | "com*:" | "/dev/ttyS*"
119; | (example "com1:") | (example "/dev/ttyS1")
120; | | or "/dev/tts/**" (with DevFS)
121; | | virtual serial ports like
122; | | "/dev/ircomm*" or "/dev/rfcomm*"
123; ----------|---------------------------------|--------------------------------
124; irda | ignored (can be empty) | ignored (can be empty)
125; ----------|---------------------------------|--------------------------------
126; BT | Bluetooth device address (example "00:11:22:33:44:55").
127; | Optionally you can also include channel after slash
128; | (example "00:11:22:33:44:55/12"). Can be also empty.
129; ----------|---------------------------------|--------------------------------
130; dku2 | ignored (can be empty) | /dev/ttyUSB* or /dev/ttyACM*
131; ----------|---------------------------------|--------------------------------
132; dku5 | ignored (can be empty) | connection with it not possible
133; ----------|---------------------------------|--------------------------------
134; usb | connection with it not possible | "/dev/ttyUSB*"
135
136; Step3. Set other config parameters
137
138; -----------------------------------------------------------------------------
139; Parameter name | Description
140; ----------------|------------------------------------------------------------
141; Model | Should not be used unless you have a good reason to do so.
142; | If Gammu doesn't recognize your phone model, put it here.
143; | Example values: "6110", "6150", "6210", "8210"
144; SynchronizeTime | if you want to set time from computer to phone during
145; | starting connection. Do not rather use this option when
146; | when to reset phone during connection (in some phones need
147; | to set time again after restart)
148; GammuLoc | name of localisation file
149; StartInfo | this option allow to set, that you want (setting "yes")
150; | to see message on the phone screen or phone should enable
151; | light for a moment during starting connection. Phone
152; | WON'T beep during starting connection with this option.
153; GammuCoding | forces using specified codepage (in win32 - for example
154; | "1250" will force CP1250) or UTF8 (in Linux - "utf8")
155; ----------------|------------------------------------------------------------
156; Logfile | Use, when want to have logfile from communication.
157; Logformat | What debug info and format should be used:
158; | "nothing" - no debug level (default)
159; | "text" - transmission dump in text format
160; | "textall" - all possible info in text format
161; | "errors" - errors in text format
162; | "binary" - transmission dump in binary format
163; ----------------|------------------------------------------------------------
164; Features | Custom features for phone. This can be used as override
165; | when values coded in common/gsmphones.c are bad or
166; | missing. Consult include/gammu-info.h for possible values
167; | (all Feature values without leading F_ prefix).
168; | Please report correct values to Gammu authors.
169; ----------------|------------------------------------------------------------
170; Use_Locking | under Unix/Linux use "yes", if want to lock used device
171; | to prevent using it by other applications. In win32 ignored
172
173; vim: et ts=4 sw=4 sts=4 tw=78 spell spelllang=en_us
diff --git a/meta-oe/recipes-connectivity/gammu/gammu_1.32.0.bb b/meta-oe/recipes-connectivity/gammu/gammu_1.32.0.bb
new file mode 100644
index 000000000..0dac7cba3
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gammu/gammu_1.32.0.bb
@@ -0,0 +1,55 @@
1SUMMARY = "GNU All Mobile Managment Utilities"
2SECTION = "console/network"
3DEPENDS = "cmake-native virtual/libiconv libdbi mysql5 glib-2.0 udev"
4LICENSE = "GPLv2"
5LIC_FILES_CHKSUM = "file://COPYING;md5=a17cb0a873d252440acfdf9b3d0e7fbf"
6HOMEPAGE = "http://www.gammu.org/"
7
8SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}/${PV}/${BP}.tar.bz2 \
9 file://gammurc \
10 file://gammu-smsdrc \
11"
12
13SRC_URI[md5sum] = "8ea16c6b3cc48097a8e62311fe0e25b9"
14SRC_URI[sha256sum] = "de67caa102aa4c8fbed5300e5a0262e40411c4cc79f4379a8d34eed797968fc3"
15
16inherit distutils cmake gettext
17
18do_install_append() {
19 # these files seem to only be used by symbian and trigger QA warnings
20 rm -rf ${D}/usr/share/gammu
21 #install default configuration files
22 install -d ${D}${sysconfdir}
23 install -m 0644 ${WORKDIR}/gammurc ${D}${sysconfdir}/gammurc
24 install -m 0644 ${WORKDIR}/gammu-smsdrc ${D}${sysconfdir}/gammu-smsdrc
25}
26
27EXTRA_OECONF = " \
28 --enable-shared \
29 --enable-backup \
30 --enable-protection \
31"
32
33EXTRA_OECMAKE = " \
34 -DWITH_CURL=OFF \
35 -DWITH_BLUETOOTH=OFF \
36 -DWITH_NOKIA_SUPPORT=OFF \
37 -DWITH_IRDA=OFF \
38 -DWITH_PYTHON=OFF \
39 -DWITH_MySQL=ON \
40 -DWITH_Postgres=OFF \
41"
42
43PACKAGES =+ "${PN}-smsd libgammu libgsmsd python-${PN}"
44
45FILES_${PN} = "${bindir}/gammu ${bindir}/jadmaker ${sysconfdir}/bash_completion.d/gammu \
46 ${bindir}/gammu-detect ${sysconfdir}/gammurc"
47CONFFILES_${PN} = "${sysconfdir}/gammurc"
48FILES_${PN}-smsd = "${bindir}/gammu-smsd* ${sysconfdir}/gammu-smsdrc"
49CONFFILES_${PN}-smsd = "${sysconfdir}/gammu-smsdrc"
50FILES_${PN}-dev += "${bindir}/gammu-config ${libdir}/*.so"
51FILES_${PN}-dbg += "${bindir}/.debug ${libdir}/.debug ${PYTHON_SITEPACKAGES_DIR}/gammu/.debug"
52FILES_libgammu = "${libdir}/libGammu.so.*"
53FILES_libgsmsd = "${libdir}/libgsmsd.so.*"
54FILES_python-${PN} = "${PYTHON_SITEPACKAGES_DIR}/gammu/*.??"
55
diff --git a/meta-oe/recipes-connectivity/gateone/gateone/0001-configuration.py-Hack-around-broken-gethostname-thin.patch b/meta-oe/recipes-connectivity/gateone/gateone/0001-configuration.py-Hack-around-broken-gethostname-thin.patch
new file mode 100644
index 000000000..5f5fa9117
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gateone/gateone/0001-configuration.py-Hack-around-broken-gethostname-thin.patch
@@ -0,0 +1,26 @@
1From d811d3bdf06d78c93c48bef762c19c392c879077 Mon Sep 17 00:00:00 2001
2From: Koen Kooi <koen@dominion.thruhere.net>
3Date: Tue, 26 Nov 2013 15:54:10 +0100
4Subject: [PATCH] configuration.py: Hack around broken gethostname thingy
5
6Signed-off-by: Koen Kooi <koen@dominion.thruhere.net>
7---
8 gateone/core/configuration.py | 2 +-
9 1 file changed, 1 insertion(+), 1 deletion(-)
10
11diff --git a/gateone/core/configuration.py b/gateone/core/configuration.py
12index da41cbd..d1c8222 100644
13--- a/gateone/core/configuration.py
14+++ b/gateone/core/configuration.py
15@@ -162,7 +162,7 @@ def define_options(installed=True):
16 ]
17 # Used both http and https above to demonstrate that both are acceptable
18 try:
19- additional_origins = socket.gethostbyname_ex(socket.gethostname())
20+ additional_origins = [] # additional_origins = socket.gethostbyname_ex(socket.gethostname())
21 except socket.gaierror:
22 # Couldn't get any IPs from the hostname
23 additional_origins = []
24--
251.8.4.2
26
diff --git a/meta-oe/recipes-connectivity/gateone/gateone/80oe.conf b/meta-oe/recipes-connectivity/gateone/gateone/80oe.conf
new file mode 100644
index 000000000..cde98bc10
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gateone/gateone/80oe.conf
@@ -0,0 +1,23 @@
1// Some custom Gate One settings for OpenEmbedded
2{
3 "*": {
4 "gateone": { // These settings apply to all of Gate One
5 "log_file_max_size": 5242880, // 5 megabyte logs for OE by default (default would normally be 100Mb)
6 "log_file_num_backups": 2, // Default is normally 10
7 "origins": ["*"], // Every device has a unique origin
8 "logging": "info",
9 "pid_file": "/var/run/gateone.pid",
10 "session_dir": "/tmp/gateone",
11 "user_dir": "/var/lib/gateone/users"
12 },
13 "terminal": {
14 // Disabling session logging for embedded devices is a good idea (limited/slow storage)
15 "session_logging": false,
16 "syslog_session_logging": false
17// "commands": {
18// // For some reason this doesn't work (never asks for the password)
19// "login": "setsid /bin/login" // Normally this would emulate logging into the host console
20// }
21 }
22 }
23}
diff --git a/meta-oe/recipes-connectivity/gateone/gateone/gateone-avahi.service b/meta-oe/recipes-connectivity/gateone/gateone/gateone-avahi.service
new file mode 100644
index 000000000..0f613d18e
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gateone/gateone/gateone-avahi.service
@@ -0,0 +1,10 @@
1<?xml version="1.0" standalone='no'?><!--*-nxml-*-->
2<!DOCTYPE service-group SYSTEM "avahi-service.dtd">
3
4<service-group>
5 <name replace-wildcards="yes">GateOne on %h</name>
6 <service>
7 <type>_https._tcp</type>
8 <port>443</port>
9 </service>
10</service-group>
diff --git a/meta-oe/recipes-connectivity/gateone/gateone/gateone-init b/meta-oe/recipes-connectivity/gateone/gateone/gateone-init
new file mode 100644
index 000000000..a1cf5ce7e
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gateone/gateone/gateone-init
@@ -0,0 +1,47 @@
1#!/bin/sh -e
2### BEGIN INIT INFO
3# Provides: gateone
4# Required-Start: networking
5# Default-Start: 2 3 4 5
6# Default-Stop: 0 1 6
7# Short-Description: Gateone HTML5 ssh client
8# Description: Gateone HTML5 terminal emulator and SSH client.
9### END INIT INFO
10
11. /etc/init.d/functions
12
13NAME=gateone
14DAEMON=/var/lib/gateone/gateone.py
15PIDFILE=/var/run/gateone.pid
16WORKDIR=/var/lib/gateone
17
18do_start() {
19 cd $WORKDIR
20 /usr/bin/python $DAEMON > /dev/null 2>&1 &
21 cd $OLDPWD
22}
23
24do_stop() {
25 kill -TERM `cat $PIDFILE`
26}
27
28case "$1" in
29 start)
30 echo "Starting gateone"
31 do_start
32 ;;
33 stop)
34 echo "Stopping gateone"
35 do_stop
36 ;;
37 restart|force-reload)
38 echo "Restart gateone"
39 do_stop
40 sleep 1
41 do_start
42 ;;
43 *)
44 echo "Usage: $0 {start|stop|restart|force-reload}" >&2
45 exit 1
46 ;;
47esac
diff --git a/meta-oe/recipes-connectivity/gateone/gateone/gateone.service b/meta-oe/recipes-connectivity/gateone/gateone/gateone.service
new file mode 100644
index 000000000..7b14d3e6d
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gateone/gateone/gateone.service
@@ -0,0 +1,10 @@
1[Unit]
2Description=GateOne daemon
3ConditionPathExists=|/var/lib/gateone
4
5[Service]
6WorkingDirectory=/var/lib/gateone
7ExecStart=/usr/bin/python /usr/bin/gateone
8
9[Install]
10WantedBy=multi-user.target
diff --git a/meta-oe/recipes-connectivity/gateone/gateone_git.bb b/meta-oe/recipes-connectivity/gateone/gateone_git.bb
new file mode 100644
index 000000000..edc997721
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gateone/gateone_git.bb
@@ -0,0 +1,82 @@
1SUMMARY = "HTML5 (plugin-free) web-based terminal emulator and SSH client"
2LICENSE = "AGPL-3.0"
3LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=834cbc6995db88433db17cdf8953a428"
4HOMEPAGE = "http://liftoffsoftware.com/Products/GateOne"
5
6PV = "1.2"
7SRCREV = "1528d324088fc1c180b7fdf50f5b5c1af057eef6"
8SRC_URI = "git://github.com/liftoff/GateOne.git \
9 file://0001-configuration.py-Hack-around-broken-gethostname-thin.patch \
10 file://gateone-avahi.service \
11 file://80oe.conf \
12 file://gateone.service \
13 file://gateone-init \
14"
15
16S = "${WORKDIR}/git"
17
18inherit distutils python-dir systemd update-rc.d
19export prefix = "${localstatedir}"
20
21DISTUTILS_INSTALL_ARGS = "--root=${D} \
22 --prefix=${prefix} \
23 --install-lib=${PYTHON_SITEPACKAGES_DIR} \
24 --install-data=${PYTHON_SITEPACKAGES_DIR} \
25 --install-scripts=${bindir} \
26 --skip_init_scripts"
27
28do_install_append() {
29
30 # fix up hardcoded paths
31 sed -i -e s:/usr/bin:${bindir}:g ${WORKDIR}/gateone.service
32
33 install -d ${D}${systemd_unitdir}/system
34 install -m 0644 ${WORKDIR}/gateone.service ${D}${systemd_unitdir}/system
35
36 install -d ${D}${sysconfdir}/init.d
37 install -m 0755 ${WORKDIR}/gateone-init ${D}${sysconfdir}/init.d/gateone
38
39 install -m 0755 -d ${D}${sysconfdir}/avahi/services/
40 install -m 0644 ${WORKDIR}/gateone-avahi.service ${D}${sysconfdir}/avahi/services/
41
42 install -m 0755 -d ${D}${sysconfdir}/gateone/conf.d/
43 install -m 0644 ${WORKDIR}/80oe.conf ${D}${sysconfdir}/gateone/conf.d/80oe.conf
44
45 install -d ${D}${localstatedir}/lib/gateone
46}
47
48FILES_${PN} = "${localstatedir}/lib ${bindir} ${base_libdir} ${sysconfdir} ${PYTHON_SITEPACKAGES_DIR}"
49RDEPENDS_${PN} = "mime-support \
50 openssh-ssh \
51 python-compression \
52 python-crypt \
53 python-datetime \
54 python-email \
55 python-fcntl \
56 python-futures \
57 python-html \
58 python-imaging \
59 python-io \
60 python-json \
61 python-logging \
62 python-misc \
63 python-multiprocessing \
64 python-netclient \
65 python-pkgutil \
66 python-pyopenssl \
67 python-re \
68 python-readline \
69 python-setuptools \
70 python-shell \
71 python-simplejson \
72 python-subprocess \
73 python-syslog \
74 python-terminal \
75 python-textutils \
76 python-tornado \
77 python-unixadmin \
78 python-xml \
79"
80
81SYSTEMD_SERVICE_${PN} = "gateone.service"
82INITSCRIPT_NAME = "gateone"
diff --git a/meta-oe/recipes-connectivity/gnokii/gnokii_0.6.31.bb b/meta-oe/recipes-connectivity/gnokii/gnokii_0.6.31.bb
new file mode 100644
index 000000000..482645286
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gnokii/gnokii_0.6.31.bb
@@ -0,0 +1,33 @@
1SUMMARY = "Cellphone tools and driver software"
2SECTION = "console/network"
3LICENSE = "GPLv2+"
4LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3"
5
6SRC_URI = "http://www.gnokii.org/download/gnokii/gnokii-${PV}.tar.bz2"
7
8DEPENDS = "glib-2.0"
9X11DEPENDS = " libxpm gtk+"
10
11PACKAGECONFIG ??= "${@base_contains('DISTRO_FEATURES', 'x11', 'x11', '', d)}"
12PACKAGECONFIG[bluez] = "--enable-bluetooth,--disable-bluetooth,bluez4"
13PACKAGECONFIG[libical] = "--enable-libical,--disable-libical,libical"
14PACKAGECONFIG[pcsc-lite] = "--enable-libpcsclite,--disable-libpcsclite,pcsc-lite"
15PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
16PACKAGECONFIG[usb] = "--enable-libusb,--disable-libusb,virtual/libusb0"
17PACKAGECONFIG[x11] = ",--without-x,${X11DEPENDS}"
18
19inherit autotools pkgconfig
20
21PACKAGES += "libgnokii libgnokii-dev"
22
23EXTRA_OECONF = "--disable-smsd"
24
25FILES_${PN} = "${bindir} ${sbindir}"
26FILES_libgnokii-dev = "${includedir} ${libdir}/lib*.so ${libdir}/*.la \
27 ${libdir}/*.a ${libdir}/*.o ${libdir}/pkgconfig \
28 /lib/*.a /lib/*.o ${datadir}/aclocal"
29FILES_${PN}-dev = ""
30FILES_libgnokii = "${libdir}/libgnokii.so.*"
31
32SRC_URI[md5sum] = "d9627f4a1152d3ea7806df4532850d5f"
33SRC_URI[sha256sum] = "8f5a083b05c1a66a3402ca5cd80084e14c2c0632c991bb53b03c78e9adb02501"
diff --git a/meta-oe/recipes-connectivity/gnuradio/gnuradio/0001-buildsys-don-t-add-usr-include-and-usr-lib-to-config.patch b/meta-oe/recipes-connectivity/gnuradio/gnuradio/0001-buildsys-don-t-add-usr-include-and-usr-lib-to-config.patch
new file mode 100644
index 000000000..7caaf51e6
--- /dev/null
+++ b/meta-oe/recipes-connectivity/gnuradio/gnuradio/0001-buildsys-don-t-add-usr-include-and-usr-lib-to-config.patch
@@ -0,0 +1,58 @@
1From a130153ae84d8bb5914879cef94df09f06825c10 Mon Sep 17 00:00:00 2001
2From: Koen Kooi <koen@dominion.thruhere.net>
3Date: Sat, 18 Jun 2011 11:25:36 +0200
4Subject: [PATCH] buildsys: don't add /usr/include and /usr/lib to configure tests for libusb
5
6Signed-off-by: Koen Kooi <koen@dominion.thruhere.net>
7---
8 config/usrp_libusb.m4 | 13 -------------
9 1 files changed, 0 insertions(+), 13 deletions(-)
10
11diff --git a/config/usrp_libusb.m4 b/config/usrp_libusb.m4
12index cc3410f..b421820 100644
13--- a/config/usrp_libusb.m4
14+++ b/config/usrp_libusb.m4
15@@ -117,10 +117,6 @@ AC_DEFUN([USRP_LIBUSB], [
16 dnl configured to find this header.
17 AC_LANG_PUSH(C)
18 save_CPPFLAGS="$CPPFLAGS"
19- if test x$USB_INCLUDEDIR != x; then
20- USB_INCLUDES="-I$USB_INCLUDEDIR"
21- CPPFLAGS="$USB_INCLUDES"
22- fi
23 AC_MSG_CHECKING([$libusb_name for header $usb_header])
24 AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
25 #include "$usb_header"
26@@ -172,9 +168,6 @@ AC_DEFUN([USRP_LIBUSB], [
27 *)
28 AC_LANG_PUSH(C)
29 save_CPPFLAGS="$CPPFLAGS"
30- if test x$USB_INCLUDEDIR != x; then
31- CPPFLAGS="$USB_INCLUDES"
32- fi
33 save_LIBS="$LIBS"
34 LIBS="$USB_LIBS"
35 AC_MSG_CHECKING([$libusb_name for symbol usb_debug in library $usb_lib_name])
36@@ -217,9 +210,6 @@ AC_DEFUN([USRP_LIBUSB], [
37 dnl final error checking, mostly to create #define's
38 AC_LANG_PUSH(C)
39 save_CPPFLAGS="$CPPFLAGS"
40- if test x$USB_INCLUDEDIR != x; then
41- CPPFLAGS="$USB_INCLUDES"
42- fi
43 dnl Check for the header.
44 AC_CHECK_HEADERS([$usb_header], [], [libusbok=no])
45 CPPFLAGS="$save_CPPFLAGS"
46@@ -231,9 +221,6 @@ AC_DEFUN([USRP_LIBUSB], [
47 dnl check for the library (again)
48 AC_LANG_PUSH(C)
49 save_CPPFLAGS="$CPPFLAGS"
50- if test x$USB_INCLUDEDIR != x; then
51- CPPFLAGS="$USB_INCLUDES"
52- fi
53 save_LIBS="$LIBS"
54 LIBS="$USB_LIBS"
55 AC_CHECK_LIB([$usb_lib_name], [$usb_lib_func], [], [
56--
571.6.6.1
58
diff --git a/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/defconfig b/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/defconfig
new file mode 100644
index 000000000..278964017
--- /dev/null
+++ b/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/defconfig
@@ -0,0 +1,145 @@
1# Example hostapd build time configuration
2#
3# This file lists the configuration options that are used when building the
4# hostapd binary. All lines starting with # are ignored. Configuration option
5# lines must be commented out complete, if they are not to be included, i.e.,
6# just setting VARIABLE=n is not disabling that variable.
7#
8# This file is included in Makefile, so variables like CFLAGS and LIBS can also
9# be modified from here. In most cass, these lines should use += in order not
10# to override previous values of the variables.
11
12# Driver interface for Host AP driver
13CONFIG_DRIVER_HOSTAP=y
14
15# Driver interface for wired authenticator
16CONFIG_DRIVER_WIRED=y
17
18# Driver interface for madwifi driver
19#CONFIG_DRIVER_MADWIFI=y
20#CFLAGS += -I../../madwifi # change to the madwifi source directory
21
22# Driver interface for Prism54 driver
23CONFIG_DRIVER_PRISM54=y
24
25# Driver interface for drivers using the nl80211 kernel interface
26CONFIG_DRIVER_NL80211=y
27CONFIG_LIBNL32=y
28# driver_nl80211.c requires a rather new libnl (version 1.1) which may not be
29# shipped with your distribution yet. If that is the case, you need to build
30# newer libnl version and point the hostapd build to use it.
31#LIBNL=/usr/src/libnl
32#CFLAGS += -I$(LIBNL)/include
33#LIBS += -L$(LIBNL)/lib
34
35# Driver interface for FreeBSD net80211 layer (e.g., Atheros driver)
36#CONFIG_DRIVER_BSD=y
37#CFLAGS += -I/usr/local/include
38#LIBS += -L/usr/local/lib
39
40# Driver interface for no driver (e.g., RADIUS server only)
41#CONFIG_DRIVER_NONE=y
42
43# IEEE 802.11F/IAPP
44CONFIG_IAPP=y
45
46# WPA2/IEEE 802.11i RSN pre-authentication
47CONFIG_RSN_PREAUTH=y
48
49# PeerKey handshake for Station to Station Link (IEEE 802.11e DLS)
50CONFIG_PEERKEY=y
51
52# IEEE 802.11w (management frame protection)
53# This version is an experimental implementation based on IEEE 802.11w/D1.0
54# draft and is subject to change since the standard has not yet been finalized.
55# Driver support is also needed for IEEE 802.11w.
56#CONFIG_IEEE80211W=y
57
58# Integrated EAP server
59CONFIG_EAP=y
60
61# EAP-MD5 for the integrated EAP server
62CONFIG_EAP_MD5=y
63
64# EAP-TLS for the integrated EAP server
65CONFIG_EAP_TLS=y
66
67# EAP-MSCHAPv2 for the integrated EAP server
68CONFIG_EAP_MSCHAPV2=y
69
70# EAP-PEAP for the integrated EAP server
71CONFIG_EAP_PEAP=y
72
73# EAP-GTC for the integrated EAP server
74CONFIG_EAP_GTC=y
75
76# EAP-TTLS for the integrated EAP server
77CONFIG_EAP_TTLS=y
78
79# EAP-SIM for the integrated EAP server
80#CONFIG_EAP_SIM=y
81
82# EAP-AKA for the integrated EAP server
83#CONFIG_EAP_AKA=y
84
85# EAP-AKA' for the integrated EAP server
86# This requires CONFIG_EAP_AKA to be enabled, too.
87#CONFIG_EAP_AKA_PRIME=y
88
89# EAP-PAX for the integrated EAP server
90#CONFIG_EAP_PAX=y
91
92# EAP-PSK for the integrated EAP server (this is _not_ needed for WPA-PSK)
93#CONFIG_EAP_PSK=y
94
95# EAP-SAKE for the integrated EAP server
96#CONFIG_EAP_SAKE=y
97
98# EAP-GPSK for the integrated EAP server
99#CONFIG_EAP_GPSK=y
100# Include support for optional SHA256 cipher suite in EAP-GPSK
101#CONFIG_EAP_GPSK_SHA256=y
102
103# EAP-FAST for the integrated EAP server
104# Note: Default OpenSSL package does not include support for all the
105# functionality needed for EAP-FAST. If EAP-FAST is enabled with OpenSSL,
106# the OpenSSL library must be patched (openssl-0.9.9-session-ticket.patch)
107# to add the needed functions.
108#CONFIG_EAP_FAST=y
109
110# Wi-Fi Protected Setup (WPS)
111CONFIG_WPS=y
112# Enable UPnP support for external WPS Registrars
113#CONFIG_WPS_UPNP=y
114
115# EAP-IKEv2
116#CONFIG_EAP_IKEV2=y
117
118# Trusted Network Connect (EAP-TNC)
119#CONFIG_EAP_TNC=y
120
121# PKCS#12 (PFX) support (used to read private key and certificate file from
122# a file that usually has extension .p12 or .pfx)
123CONFIG_PKCS12=y
124
125# RADIUS authentication server. This provides access to the integrated EAP
126# server from external hosts using RADIUS.
127CONFIG_RADIUS_SERVER=y
128
129# Build IPv6 support for RADIUS operations
130CONFIG_IPV6=y
131
132# IEEE Std 802.11r-2008 (Fast BSS Transition)
133#CONFIG_IEEE80211R=y
134
135# Use the hostapd's IEEE 802.11 authentication (ACL), but without
136# the IEEE 802.11 Management capability (e.g., madwifi or FreeBSD/net80211)
137CONFIG_DRIVER_RADIUS_ACL=y
138
139# IEEE 802.11n (High Throughput) support
140CONFIG_IEEE80211N=y
141
142# Remove debugging code that is printing out debug messages to stdout.
143# This can be used to reduce the size of the hostapd considerably if debugging
144# code is not needed.
145#CONFIG_NO_STDOUT_DEBUG=y
diff --git a/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/hostapd.service b/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/hostapd.service
new file mode 100644
index 000000000..151c0504f
--- /dev/null
+++ b/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/hostapd.service
@@ -0,0 +1,11 @@
1[Unit]
2Description=Hostapd IEEE 802.11 AP, IEEE 802.1X/WPA/WPA2/EAP/RADIUS Authenticator
3After=network.target
4
5[Service]
6Type=forking
7PIDFile=/run/hostapd.pid
8ExecStart=@SBINDIR@/hostapd @SYSCONFDIR@/hostapd.conf -P /run/hostapd.pid -B
9
10[Install]
11WantedBy=multi-user.target
diff --git a/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/init b/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/init
new file mode 100644
index 000000000..8ba4e0794
--- /dev/null
+++ b/meta-oe/recipes-connectivity/hostapd/hostapd-2.2/init
@@ -0,0 +1,58 @@
1#!/bin/sh
2DAEMON=/usr/sbin/hostapd
3NAME=hostapd
4DESC="HOSTAP Daemon"
5ARGS="/etc/hostapd.conf -B"
6
7test -f $DAEMON || exit 0
8
9set -e
10
11# source function library
12. /etc/init.d/functions
13
14delay_stop() {
15 count=0
16 while [ $count -lt 9 ] ; do
17 if pidof $DAEMON >/dev/null; then
18 sleep 1
19 else
20 return 0
21 fi
22 count=`expr $count + 1`
23 done
24 echo "Failed to stop $DESC."
25 return 1
26}
27
28case "$1" in
29 start)
30 echo -n "Starting $DESC: "
31 start-stop-daemon -S -x $DAEMON -- $ARGS
32 echo "$NAME."
33 ;;
34 stop)
35 echo -n "Stopping $DESC: "
36 start-stop-daemon -K --oknodo -x $DAEMON
37 echo "$NAME."
38 ;;
39 restart)
40 $0 stop
41 delay_stop && $0 start
42 ;;
43 reload)
44 echo -n "Reloading $DESC: "
45 killall -HUP $(basename ${DAEMON})
46 echo "$NAME."
47 ;;
48 status)
49 status $DAEMON
50 exit $?
51 ;;
52 *)
53 echo "Usage: $0 {start|stop|restart|reload|status}"
54 exit 1
55 ;;
56esac
57
58exit 0
diff --git a/meta-oe/recipes-connectivity/hostapd/hostapd_2.2.bb b/meta-oe/recipes-connectivity/hostapd/hostapd_2.2.bb
new file mode 100644
index 000000000..b75f2b9a7
--- /dev/null
+++ b/meta-oe/recipes-connectivity/hostapd/hostapd_2.2.bb
@@ -0,0 +1,48 @@
1HOMEPAGE = "http://hostap.epitest.fi"
2SECTION = "kernel/userland"
3LICENSE = "GPLv2 | BSD"
4LIC_FILES_CHKSUM = "file://README;md5=0854a4da34ac3990770794d771fac7fd"
5DEPENDS = "libnl openssl"
6SUMMARY = "User space daemon for extended IEEE 802.11 management"
7
8inherit update-rc.d systemd
9INITSCRIPT_NAME = "hostapd"
10
11SYSTEMD_SERVICE_${PN} = "hostapd.service"
12SYSTEMD_AUTO_ENABLE_${PN} = "disable"
13
14DEFAULT_PREFERENCE = "-1"
15
16SRC_URI = " \
17 http://hostap.epitest.fi/releases/hostapd-${PV}.tar.gz \
18 file://defconfig \
19 file://init \
20 file://hostapd.service \
21"
22
23S = "${WORKDIR}/hostapd-${PV}/hostapd"
24
25
26do_configure() {
27 install -m 0644 ${WORKDIR}/defconfig ${S}/.config
28}
29
30do_compile() {
31 export CFLAGS="-MMD -O2 -Wall -g -I${STAGING_INCDIR}/libnl3"
32 make
33}
34
35do_install() {
36 install -d ${D}${sbindir} ${D}${sysconfdir}/init.d ${D}${systemd_unitdir}/system/
37 install -m 0644 ${S}/hostapd.conf ${D}${sysconfdir}
38 install -m 0755 ${S}/hostapd ${D}${sbindir}
39 install -m 0755 ${S}/hostapd_cli ${D}${sbindir}
40 install -m 755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/hostapd
41 install -m 0644 ${WORKDIR}/hostapd.service ${D}${systemd_unitdir}/system/
42 sed -i -e 's,@SBINDIR@,${sbindir},g' -e 's,@SYSCONFDIR@,${sysconfdir},g' ${D}${systemd_unitdir}/system/hostapd.service
43}
44
45CONFFILES_${PN} += "${sysconfdir}/hostapd.conf"
46
47SRC_URI[md5sum] = "23c1f78a693c3288802d516adb7fd289"
48SRC_URI[sha256sum] = "f15b6bcb434378860ea5b88dffed7f54d8cb71fff2146de0f006977a5e25a882"
diff --git a/meta-oe/recipes-connectivity/irssi/irssi_0.8.16-rc1.bb b/meta-oe/recipes-connectivity/irssi/irssi_0.8.16-rc1.bb
new file mode 100644
index 000000000..54d881d1c
--- /dev/null
+++ b/meta-oe/recipes-connectivity/irssi/irssi_0.8.16-rc1.bb
@@ -0,0 +1,22 @@
1SUMMARY = "ncurses IRC client"
2DESCRIPTION = "Irssi is an ncurses IRC client"
3LICENSE = "GPLv2"
4LIC_FILES_CHKSUM = "file://COPYING;md5=55fdc1113306167d6ea2561404ce02f8"
5
6DEPENDS = "glib-2.0 ncurses openssl"
7
8REALPV = "0.8.16-rc1"
9PV = "0.8.15+${REALPV}"
10
11SRC_URI = "http://irssi.org/files/irssi-${REALPV}.tar.gz"
12
13SRC_URI[md5sum] = "769fec4df8e633c583c411ccd2cd563a"
14SRC_URI[sha256sum] = "bb6c0125db30b697f80837941c17372b7484c64d57a6920b8bfa7ee3def92de3"
15
16S = "${WORKDIR}/irssi-${REALPV}"
17
18inherit autotools pkgconfig
19
20EXTRA_OECONF = "--enable-ssl \
21 --with-ncurses=${STAGING_EXECPREFIXDIR} \
22"
diff --git a/meta-oe/recipes-connectivity/iw/iw/0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch b/meta-oe/recipes-connectivity/iw/iw/0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch
new file mode 100644
index 000000000..e64dd0ad9
--- /dev/null
+++ b/meta-oe/recipes-connectivity/iw/iw/0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch
@@ -0,0 +1,43 @@
1From 5310abba864cfe3a8b65af130729447604190b29 Mon Sep 17 00:00:00 2001
2From: Koen Kooi <koen@dominion.thruhere.net>
3Date: Tue, 29 Nov 2011 17:03:27 +0100
4Subject: [PATCH] iw: version.sh: don't use git describe for versioning
5
6It will detect top-level git repositories like the Angstrom setup-scripts and break.
7
8Upstream-Status: Unknown
9
10Signed-off-by: Koen Kooi <koen@dominion.thruhere.net>
11---
12 version.sh | 16 +---------------
13 1 file changed, 1 insertion(+), 15 deletions(-)
14
15diff --git a/version.sh b/version.sh
16index 11d124b..5d423c4 100755
17--- a/version.sh
18+++ b/version.sh
19@@ -3,21 +3,7 @@
20 VERSION="3.15"
21 OUT="$1"
22
23-if [ -d .git ] && head=`git rev-parse --verify HEAD 2>/dev/null`; then
24- git update-index --refresh --unmerged > /dev/null
25- descr=$(git describe)
26-
27- # on git builds check that the version number above
28- # is correct...
29- [ "${descr%%-*}" = "v$VERSION" ] || exit 2
30-
31- v="${descr#v}"
32- if git diff-index --name-only HEAD | read dummy ; then
33- v="$v"-dirty
34- fi
35-else
36- v="$VERSION"
37-fi
38+v="$VERSION"
39
40 echo '#include "iw.h"' > "$OUT"
41 echo "const char iw_version[] = \"$v\";" >> "$OUT"
42--
431.7.7.3
diff --git a/meta-oe/recipes-connectivity/iw/iw_3.15.bb b/meta-oe/recipes-connectivity/iw/iw_3.15.bb
new file mode 100644
index 000000000..7800e905e
--- /dev/null
+++ b/meta-oe/recipes-connectivity/iw/iw_3.15.bb
@@ -0,0 +1,23 @@
1SUMMARY = "nl80211 based CLI configuration utility for wireless devices"
2DESCRIPTION = "iw is a new nl80211 based CLI configuration utility for \
3wireless devices. It supports almost all new drivers that have been added \
4to the kernel recently. "
5HOMEPAGE = "http://wireless.kernel.org/en/users/Documentation/iw"
6SECTION = "base"
7LICENSE = "BSD"
8LIC_FILES_CHKSUM = "file://COPYING;md5=878618a5c4af25e9b93ef0be1a93f774"
9
10DEPENDS = "libnl pkgconfig"
11
12SRC_URI = "http://www.kernel.org/pub/software/network/iw/${BP}.tar.gz \
13 file://0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch \
14"
15
16SRC_URI[md5sum] = "ebb16e6c29b075e3a58b99552583fd79"
17SRC_URI[sha256sum] = "1223ebab68dc337f16ed80c45af37b78f112ea091e919eafe96a4cbd63942081"
18
19EXTRA_OEMAKE = ""
20
21do_install() {
22 oe_runmake DESTDIR=${D} install
23}
diff --git a/meta-oe/recipes-connectivity/krb5/krb5/0001-Return-only-new-keys-in-randkey-CVE-2014-5351.patch b/meta-oe/recipes-connectivity/krb5/krb5/0001-Return-only-new-keys-in-randkey-CVE-2014-5351.patch
new file mode 100644
index 000000000..08526610a
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5/0001-Return-only-new-keys-in-randkey-CVE-2014-5351.patch
@@ -0,0 +1,92 @@
1From af0ed4df4dfae762ab5fb605f5a0c8f59cb4f6ca Mon Sep 17 00:00:00 2001
2From: Greg Hudson <ghudson@mit.edu>
3Date: Thu, 21 Aug 2014 13:52:07 -0400
4Subject: [PATCH] Return only new keys in randkey [CVE-2014-5351]
5
6In kadmind's randkey operation, if a client specifies the keepold
7flag, do not include the preserved old keys in the response.
8
9CVE-2014-5351:
10
11An authenticated remote attacker can retrieve the current keys for a
12service principal when generating a new set of keys for that
13principal. The attacker needs to be authenticated as a user who has
14the elevated privilege for randomizing the keys of other principals.
15
16Normally, when a Kerberos administrator randomizes the keys of a
17service principal, kadmind returns only the new keys. This prevents
18an administrator who lacks legitimate privileged access to a service
19from forging tickets to authenticate to that service. If the
20"keepold" flag to the kadmin randkey RPC operation is true, kadmind
21retains the old keys in the KDC database as intended, but also
22unexpectedly returns the old keys to the client, which exposes the
23service to ticket forgery attacks from the administrator.
24
25A mitigating factor is that legitimate clients of the affected service
26will start failing to authenticate to the service once they begin to
27receive service tickets encrypted in the new keys. The affected
28service will be unable to decrypt the newly issued tickets, possibly
29alerting the legitimate administrator of the affected service.
30
31CVSSv2: AV:N/AC:H/Au:S/C:P/I:N/A:N/E:POC/RL:OF/RC:C
32
33[tlyu@mit.edu: CVE description and CVSS score]
34
35ticket: 8018 (new)
36target_version: 1.13
37tags: pullup
38
39Upstream-Status: Backport
40---
41 src/lib/kadm5/srv/svr_principal.c | 21 ++++++++++++++++++---
42 1 files changed, 18 insertions(+), 3 deletions(-)
43
44diff --git a/lib/kadm5/srv/svr_principal.c b/lib/kadm5/srv/svr_principal.c
45index 5d358bd..d4e74cc 100644
46--- a/lib/kadm5/srv/svr_principal.c
47+++ b/lib/kadm5/srv/svr_principal.c
48@@ -344,6 +344,20 @@ check_1_6_dummy(kadm5_principal_ent_t entry, long mask,
49 *passptr = NULL;
50 }
51
52+/* Return the number of keys with the newest kvno. Assumes that all key data
53+ * with the newest kvno are at the front of the key data array. */
54+static int
55+count_new_keys(int n_key_data, krb5_key_data *key_data)
56+{
57+ int n;
58+
59+ for (n = 1; n < n_key_data; n++) {
60+ if (key_data[n - 1].key_data_kvno != key_data[n].key_data_kvno)
61+ return n;
62+ }
63+ return n_key_data;
64+}
65+
66 kadm5_ret_t
67 kadm5_create_principal(void *server_handle,
68 kadm5_principal_ent_t entry, long mask,
69@@ -1593,7 +1607,7 @@ kadm5_randkey_principal_3(void *server_handle,
70 osa_princ_ent_rec adb;
71 krb5_int32 now;
72 kadm5_policy_ent_rec pol;
73- int ret, last_pwd;
74+ int ret, last_pwd, n_new_keys;
75 krb5_boolean have_pol = FALSE;
76 kadm5_server_handle_t handle = server_handle;
77 krb5_keyblock *act_mkey;
78@@ -1686,8 +1700,9 @@ kadm5_randkey_principal_3(void *server_handle,
79 kdb->fail_auth_count = 0;
80
81 if (keyblocks) {
82- ret = decrypt_key_data(handle->context,
83- kdb->n_key_data, kdb->key_data,
84+ /* Return only the new keys added by krb5_dbe_crk. */
85+ n_new_keys = count_new_keys(kdb->n_key_data, kdb->key_data);
86+ ret = decrypt_key_data(handle->context, n_new_keys, kdb->key_data,
87 keyblocks, n_keys);
88 if (ret)
89 goto done;
90--
911.7.4.1
92
diff --git a/meta-oe/recipes-connectivity/krb5/krb5/0001-aclocal-Add-parameter-to-disable-keyutils-detection.patch b/meta-oe/recipes-connectivity/krb5/krb5/0001-aclocal-Add-parameter-to-disable-keyutils-detection.patch
new file mode 100644
index 000000000..f0c310c5e
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5/0001-aclocal-Add-parameter-to-disable-keyutils-detection.patch
@@ -0,0 +1,33 @@
1From ecb62f3467f493cc0d679323f05367eebbf0fb67 Mon Sep 17 00:00:00 2001
2From: Martin Jansa <Martin.Jansa@gmail.com>
3Date: Tue, 1 Oct 2013 22:22:57 +0200
4Subject: [PATCH] aclocal: Add parameter to disable keyutils detection
5
6Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
7---
8 aclocal.m4 | 4 ++++
9 1 file changed, 4 insertions(+)
10
11diff --git a/aclocal.m4 b/aclocal.m4
12index 210c473..83b1f02 100644
13--- a/aclocal.m4
14+++ b/aclocal.m4
15@@ -1650,11 +1650,15 @@ fi
16 dnl
17 dnl If libkeyutils exists (on Linux) include it and use keyring ccache
18 AC_DEFUN(KRB5_AC_KEYRING_CCACHE,[
19+AC_ARG_ENABLE([keyutils],
20+AC_HELP_STRING([--disable-keyutils],don't enable using keyutils for keyring ccache @<:@enabled@:>@), , enable_keyutils=yes)
21+if test "$enable_keyutils" = yes; then
22 AC_CHECK_HEADERS([keyutils.h],
23 AC_CHECK_LIB(keyutils, add_key,
24 [dnl Pre-reqs were found
25 AC_DEFINE(USE_KEYRING_CCACHE, 1, [Define if the keyring ccache should be enabled])
26 LIBS="-lkeyutils $LIBS"
27 ]))
28+fi
29 ])dnl
30 dnl
31--
321.8.3.2
33
diff --git a/meta-oe/recipes-connectivity/krb5/krb5/crosscompile_nm.patch b/meta-oe/recipes-connectivity/krb5/krb5/crosscompile_nm.patch
new file mode 100644
index 000000000..d3e693751
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5/crosscompile_nm.patch
@@ -0,0 +1,29 @@
1Modifies export-check.pl to use look for $ENV{'NM'} before
2defaulting to using 'nm'
3
4Upstream-Status: Pending
5
6Signed-off-by: Amy Fong <amy.fong@windriver.com>
7---
8
9 export-check.pl | 7 ++++++-
10 1 file changed, 6 insertions(+), 1 deletion(-)
11
12Index: src/util/export-check.pl
13===================================================================
14--- src.orig/util/export-check.pl
15+++ src/util/export-check.pl
16@@ -38,7 +38,12 @@
17 my($exfile, $libfile) = @ARGV;
18
19 @missing = ();
20-open NM, "nm -Dg --defined-only $libfile |" || die "can't run nm on $libfile: $!";
21+if (defined($ENV{'NM'})) {
22+ $nm = $ENV{'NM'};
23+} else {
24+ $nm = "nm";
25+}
26+open NM, "$nm -Dg --defined-only $libfile |" || die "can't run nm on $libfile: $!";
27 open EXPORT, "< $exfile" || die "can't read $exfile: $!";
28
29 @export = <EXPORT>;
diff --git a/meta-oe/recipes-connectivity/krb5/krb5/debian-suppress-usr-lib-in-krb5-config.patch b/meta-oe/recipes-connectivity/krb5/krb5/debian-suppress-usr-lib-in-krb5-config.patch
new file mode 100644
index 000000000..f0182ee5d
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5/debian-suppress-usr-lib-in-krb5-config.patch
@@ -0,0 +1,48 @@
1Subject: [PATCH] debian: suppress /usr/lib in krb5-config
2
3Upstream-Status: Pending
4
5Handel multi-arch suppressions
6
7The patch is from debian.
8
9Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
10---
11 src/build-tools/krb5-config.in | 14 +++++++++-----
12 1 files changed, 9 insertions(+), 5 deletions(-)
13
14diff --git a/src/build-tools/krb5-config.in b/src/build-tools/krb5-config.in
15index f6184da..637bad7 100755
16--- a/src/build-tools/krb5-config.in
17+++ b/src/build-tools/krb5-config.in
18@@ -138,6 +138,7 @@ if test -n "$do_help"; then
19 echo " [--defktname] Show built-in default keytab name"
20 echo " [--defcktname] Show built-in default client keytab name"
21 echo " [--cflags] Compile time CFLAGS"
22+ echo " [--deps] Include dependent libraries"
23 echo " [--libs] List libraries required to link [LIBRARIES]"
24 echo "Libraries:"
25 echo " krb5 Kerberos 5 application"
26@@ -209,11 +210,14 @@ fi
27
28 if test -n "$do_libs"; then
29 # Assumes /usr/lib is the standard library directory everywhere...
30- if test "$libdir" = /usr/lib; then
31- libdirarg=
32- else
33- libdirarg="-L$libdir"
34- fi
35+ case $libdir in
36+ /usr/lib*)
37+ libdirarg=
38+ ;;
39+ *)
40+ libdirarg="-L$libdir"
41+ ;;
42+ esac
43 # Ugly gross hack for our build tree
44 lib_flags=`echo $CC_LINK | sed -e 's/\$(CC)//' \
45 -e 's/\$(PURE)//' \
46--
471.7.1
48
diff --git a/meta-oe/recipes-connectivity/krb5/krb5/etc/default/krb5-admin-server b/meta-oe/recipes-connectivity/krb5/krb5/etc/default/krb5-admin-server
new file mode 100644
index 000000000..283592913
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5/etc/default/krb5-admin-server
@@ -0,0 +1,6 @@
1# Automatically generated. If you change anything in this file other than the
2# values of RUN_KADMIND or DAEMON_ARGS, first run dpkg-reconfigure
3# krb5-admin-server and disable managing the kadmin configuration with
4# debconf. Otherwise, changes will be overwritten.
5
6RUN_KADMIND=true
diff --git a/meta-oe/recipes-connectivity/krb5/krb5/etc/default/krb5-kdc b/meta-oe/recipes-connectivity/krb5/krb5/etc/default/krb5-kdc
new file mode 100644
index 000000000..310bfcf8a
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5/etc/default/krb5-kdc
@@ -0,0 +1,5 @@
1# Automatically generated. Only the value of DAEMON_ARGS will be preserved.
2# If you change anything in this file other than DAEMON_ARGS, first run
3# dpkg-reconfigure krb5-kdc and disable managing the KDC configuration with
4# debconf. Otherwise, changes will be overwritten.
5
diff --git a/meta-oe/recipes-connectivity/krb5/krb5/etc/init.d/krb5-admin-server b/meta-oe/recipes-connectivity/krb5/krb5/etc/init.d/krb5-admin-server
new file mode 100755
index 000000000..79238d4f4
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5/etc/init.d/krb5-admin-server
@@ -0,0 +1,140 @@
1#! /bin/sh
2### BEGIN INIT INFO
3# Provides: krb5-admin-server
4# Required-Start: $local_fs $remote_fs $network $syslog
5# Required-Stop: $local_fs $remote_fs $network $syslog
6# Should-Start: krb5-kdc
7# Should-Stop: krb5-kdc
8# Default-Start: 2 3 4 5
9# Default-Stop: 0 1 6
10# Short-Description: MIT Kerberos KDC administrative daemon
11# Description: Starts, stops, or restarts the MIT Kerberos KDC
12# administrative daemon (kadmind). This daemon answers
13# requests from kadmin clients and allows administrators
14# to create, delete, and modify principals in the KDC
15# database.
16### END INIT INFO
17
18# Author: Sam Hartman <hartmans@mit.edu>
19# Author: Russ Allbery <rra@debian.org>
20#
21# Based on the /etc/init.d/skeleton template as found in initscripts version
22# 2.86.ds1-15.
23
24# June, 2012: Adopted for yocto <amy.fong@windriver.com>
25
26PATH=/usr/sbin:/usr/bin:/sbin:/bin
27DESC="Kerberos administrative servers"
28NAME=kadmind
29DAEMON=/usr/sbin/$NAME
30DAEMON_ARGS=""
31SCRIPTNAME=/etc/init.d/krb5-admin-server
32DEFAULT=/etc/default/krb5-admin-server
33
34# Exit if the package is not installed.
35[ -x "$DAEMON" ] || exit 0
36
37# Read configuration if it is present.
38[ -r "$DEFAULT" ] && . "$DEFAULT"
39
40# Get the setting of VERBOSE and other rcS variables.
41[ -f /etc/default/rcS ] && . /etc/default/rcS
42
43. /etc/init.d/functions
44
45ADMIN_SERVER_LOG=/var/log/kadmind.log
46[ -f $ADMIN_SERVER_LOG ] && (test ! -x /sbin/restorecon \
47 || /sbin/restorecon -F $ADMIN_SERVER_LOG)
48
49# Return
50# 0 if daemon has been started
51# 1 if daemon was already running
52# 2 if daemon could not be started
53do_start()
54{
55 start-stop-daemon --start --quiet --startas $DAEMON --name $NAME --test \
56 > /dev/null || return 1
57 start-stop-daemon --start --quiet --startas $DAEMON --name $NAME \
58 -- $DAEMON_ARGS || return 2
59}
60
61# Return
62# 0 if daemon has been stopped
63# 1 if daemon was already stopped
64# 2 if daemon could not be stopped
65# other if a failure occurred
66do_stop()
67{
68 start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --name $NAME
69 RETVAL="$?"
70 [ "$RETVAL" = 2 ] && return 2
71 return "$RETVAL"
72}
73
74
75case "$1" in
76 start)
77 if [ "$RUN_KADMIND" = false ] ; then
78 if [ "$VERBOSE" != no ] ; then
79 echo "Not starting $DESC per configuration"
80 fi
81 exit 0
82 fi
83 [ "$VERBOSE" != no ] && echo "Starting $DESC" "$NAME"
84 do_start
85 case "$?" in
86 0|1) [ "$VERBOSE" != no ] && echo 0 ;;
87 2) [ "$VERBOSE" != no ] && echo 1 ;;
88 esac
89 ;;
90
91 stop)
92 [ "$VERBOSE" != no ] && echo "Stopping $DESC" "$NAME"
93 do_stop
94 case "$?" in
95 0|1) [ "$VERBOSE" != no ] && echo 0 ;;
96 2) [ "$VERBOSE" != no ] && echo 1 ;;
97 esac
98 ;;
99
100 restart|force-reload)
101 if [ "$RUN_KADMIND" = false ] ; then
102 if [ "$VERBOSE" != no ] ; then
103 echo "Not restarting $DESC per configuration"
104 fi
105 exit 0
106 fi
107 echo "Restarting $DESC" "$NAME"
108 do_stop
109 case "$?" in
110 0|1)
111 do_start
112 case "$?" in
113 0) [ "$VERBOSE" != no ] && echo 0 ;;
114 *) [ "$VERBOSE" != no ] && echo 1 ;;
115 esac
116 ;;
117 *)
118 echo 1
119 ;;
120 esac
121 ;;
122
123 status)
124 pidofproc "$DAEMON" >/dev/null
125 status=$?
126 if [ $status -eq 0 ]; then
127 echo "$NAME is running."
128 else
129 echo "$NAME is not running."
130 fi
131 exit $status
132 ;;
133
134 *)
135 echo "Usage: $SCRIPTNAME {start|stop|restart|force-reload|status}" >&2
136 exit 3
137 ;;
138esac
139
140:
diff --git a/meta-oe/recipes-connectivity/krb5/krb5/etc/init.d/krb5-kdc b/meta-oe/recipes-connectivity/krb5/krb5/etc/init.d/krb5-kdc
new file mode 100755
index 000000000..865d1b970
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5/etc/init.d/krb5-kdc
@@ -0,0 +1,133 @@
1#! /bin/sh
2### BEGIN INIT INFO
3# Provides: krb5-kdc
4# Required-Start: $local_fs $remote_fs $network $syslog
5# Required-Stop: $local_fs $remote_fs $network $syslog
6# X-Start-Before: $x-display-manager
7# Default-Start: 2 3 4 5
8# Default-Stop: 0 1 6
9# Short-Description: MIT Kerberos KDC
10# Description: Starts, stops, or restarts the MIT Kerberos KDC. This
11# daemon responds to ticket requests from Kerberos
12# clients.
13### END INIT INFO
14
15# Author: Sam Hartman <hartmans@mit.edu>
16# Author: Russ Allbery <rra@debian.org>
17#
18# Based on the /etc/init.d/skeleton template as found in initscripts version
19# 2.86.ds1-15.
20
21# June, 2012: Adopted for yocto <amy.fong@windriver.com>
22
23PATH=/usr/sbin:/usr/bin:/sbin:/bin
24DESC="Kerberos KDC"
25NAME=krb5kdc
26DAEMON=/usr/sbin/$NAME
27DAEMON_ARGS=""
28SCRIPTNAME=/etc/init.d/krb5-kdc
29
30# Exit if the package is not installed.
31[ -x "$DAEMON" ] || exit 0
32
33# Read configuration if it is present.
34[ -r /etc/default/krb5-kdc ] && . /etc/default/krb5-kdc
35
36# Get the setting of VERBOSE and other rcS variables.
37[ -f /etc/default/rcS ] && . /etc/default/rcS
38
39. /etc/init.d/functions
40
41# Return
42# 0 if daemon has been started
43# 1 if daemon was already running
44# 2 if daemon could not be started
45do_start_kdc()
46{
47 start-stop-daemon --start --quiet --startas $DAEMON --name $NAME --test \
48 > /dev/null || return 1
49 start-stop-daemon --start --quiet --startas $DAEMON --name $NAME \
50 -- $DAEMON_ARGS || return 2
51}
52
53
54# Return
55# 0 if daemon has been stopped
56# 1 if daemon was already stopped
57# 2 if daemon could not be stopped
58# other if a failure occurred
59do_stop_kdc()
60{
61 start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --name $NAME
62 RETVAL="$?"
63 [ "$RETVAL" = 2 ] && return 2
64 return "$RETVAL"
65}
66
67
68case "$1" in
69 start)
70 [ "$VERBOSE" != no ] && echo "Starting $DESC" "$NAME"
71 do_start_kdc
72 case "$?" in
73 0|1)
74 [ "$VERBOSE" != no ] && echo 0
75 ;;
76 2)
77 [ "$VERBOSE" != no ] && echo 1
78 ;;
79 esac
80 ;;
81
82 stop)
83 [ "$VERBOSE" != no ] && echo "Stopping $DESC" "$NAME"
84 do_stop_kdc
85 case "$?" in
86 0|1)
87 [ "$VERBOSE" != no ] && echo "krb524d"
88 ;;
89 2)
90 [ "$VERBOSE" != no ] && echo 1
91 ;;
92 esac
93 ;;
94
95 restart|force-reload)
96 echo "Restarting $DESC" "$NAME"
97 do_stop_kdc
98 case "$?" in
99 0|1)
100 do_start_kdc
101 case "$?" in
102 0)
103 echo 0
104 ;;
105 1|2)
106 echo 1
107 ;;
108 esac
109 ;;
110 *)
111 echo 1
112 ;;
113 esac
114 ;;
115
116 status)
117 pidofproc "$DAEMON" >/dev/null
118 status=$?
119 if [ $status -eq 0 ]; then
120 echo "$NAME is running."
121 else
122 echo "$NAME is not running."
123 fi
124 exit $status
125 ;;
126
127 *)
128 echo "Usage: $SCRIPTNAME {start|stop|restart|force-reload|status}" >&2
129 exit 3
130 ;;
131esac
132
133:
diff --git a/meta-oe/recipes-connectivity/krb5/krb5_1.12.2.bb b/meta-oe/recipes-connectivity/krb5/krb5_1.12.2.bb
new file mode 100644
index 000000000..72de38b07
--- /dev/null
+++ b/meta-oe/recipes-connectivity/krb5/krb5_1.12.2.bb
@@ -0,0 +1,77 @@
1SUMMARY = "A network authentication protocol"
2DESCRIPTION = "Kerberos is a system for authenticating users and services on a network. \
3 Kerberos is a trusted third-party service. That means that there is a \
4 third party (the Kerberos server) that is trusted by all the entities on \
5 the network (users and services, usually called "principals"). \
6 . \
7 This is the MIT reference implementation of Kerberos V5. \
8 . \
9 This package contains the Kerberos key server (KDC). The KDC manages all \
10 authentication credentials for a Kerberos realm, holds the master keys \
11 for the realm, and responds to authentication requests. This package \
12 should be installed on both master and slave KDCs."
13
14HOMEPAGE = "http://web.mit.edu/Kerberos/"
15SECTION = "console/network"
16LICENSE = "MIT"
17LIC_FILES_CHKSUM = "file://${S}/../NOTICE;md5=450c80c6258ce03387bd09df37638ebc"
18DEPENDS = "ncurses util-linux e2fsprogs e2fsprogs-native"
19
20inherit autotools-brokensep binconfig perlnative
21
22SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}"
23SRC_URI = "http://web.mit.edu/kerberos/dist/${BPN}/${SHRT_VER}/${BP}-signed.tar \
24 file://0001-aclocal-Add-parameter-to-disable-keyutils-detection.patch \
25 file://0001-Return-only-new-keys-in-randkey-CVE-2014-5351.patch \
26 file://debian-suppress-usr-lib-in-krb5-config.patch;striplevel=2 \
27 file://crosscompile_nm.patch \
28 file://etc/init.d/krb5-kdc \
29 file://etc/init.d/krb5-admin-server \
30 file://etc/default/krb5-kdc \
31 file://etc/default/krb5-admin-server \
32"
33SRC_URI[md5sum] = "357f1312b7720a0a591e22db0f7829fe"
34SRC_URI[sha256sum] = "09bd180107b5c2b3b7378c57c023fb02a103d4cac39d6f2dd600275d7a4f3744"
35
36S = "${WORKDIR}/${BP}/src/"
37
38PACKAGECONFIG ??= "openssl"
39PACKAGECONFIG[libedit] = "--with-libedit,--without-libedit,libedit"
40PACKAGECONFIG[openssl] = "--with-pkinit-crypto-impl=openssl,,openssl"
41PACKAGECONFIG[keyutils] = "--enable-keyutils,--disable-keyutils,keyutils"
42PACKAGECONFIG[ldap] = "--with-ldap,--without-ldap,openldap"
43PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
44
45EXTRA_OECONF += " --without-tcl --with-system-et --disable-rpath"
46CACHED_CONFIGUREVARS += "krb5_cv_attr_constructor_destructor=yes ac_cv_func_regcomp=yes \
47 ac_cv_printf_positional=yes ac_cv_file__etc_environment=yes \
48 ac_cv_file__etc_TIMEZONE=no"
49
50CFLAGS_append += "-DDESTRUCTOR_ATTR_WORKS=1 -I${STAGING_INCDIR}/et"
51LDFLAGS_append += "-lpthread"
52
53FILES_${PN} += "${datadir}/gnats"
54FILES_${PN}-doc += "${datadir}/examples"
55FILES_${PN}-dbg += "${libdir}/krb5/plugins/*/.debug"
56
57krb5_do_unpack() {
58 # ${P}-signed.tar contains ${P}.tar.gz.asc and ${P}.tar.gz
59 tar xzf ${WORKDIR}/${BP}.tar.gz -C ${WORKDIR}/
60}
61
62python do_unpack() {
63 bb.build.exec_func('base_do_unpack', d)
64 bb.build.exec_func('krb5_do_unpack', d)
65}
66
67do_configure() {
68 gnu-configize --force
69 autoreconf
70 oe_runconf
71}
72
73do_install_append() {
74 mkdir -p ${D}/etc/init.d ${D}/etc/default
75 install -m 0755 ${WORKDIR}/etc/init.d/* ${D}/etc/init.d
76 install -m 0644 ${WORKDIR}/etc/default/* ${D}/etc/default
77}
diff --git a/meta-oe/recipes-connectivity/libimobiledevice/libimobiledevice_1.1.4.bb b/meta-oe/recipes-connectivity/libimobiledevice/libimobiledevice_1.1.4.bb
new file mode 100644
index 000000000..5256ce0d2
--- /dev/null
+++ b/meta-oe/recipes-connectivity/libimobiledevice/libimobiledevice_1.1.4.bb
@@ -0,0 +1,26 @@
1SUMMARY = "A protocol library to access an iPhone or iPod Touch in Linux"
2LICENSE = "GPLv2 & LGPLv2.1"
3LIC_FILES_CHKSUM = "\
4 file://COPYING;md5=ebb5c50ab7cab4baeffba14977030c07 \
5 file://COPYING.LESSER;md5=6ab17b41640564434dda85c06b7124f7 \
6"
7
8#| configure:17888: checking for native large file support
9#| configure:17891: error: in `/home/jenkins/oe/shr-core-branches/shr-core/tmp-eglibc/work/core2-64-oe-linux/libimobiledevice/1.1.4-r0/libimobiledevice-1.1.4':
10#| configure:17893: error: cannot run test program while cross compiling
11PNBLACKLIST[libimobiledevice] ?= "cannot run test program while cross compiling"
12
13HOMEPAGE ="http://www.libimobiledevice.org/"
14
15DEPENDS = "libplist usbmuxd libtasn1 gnutls libgcrypt"
16
17SRC_URI = " \
18 http://www.libimobiledevice.org/downloads/libimobiledevice-${PV}.tar.bz2 \
19"
20
21SRC_URI[md5sum] = "3f28cbc6a2e30d34685049c0abde5183"
22SRC_URI[sha256sum] = "67499cfaa6172f566ee6b0783605acffe484fb7ddc3b09881ab7ac58667ee5b8"
23
24inherit autotools pkgconfig
25
26EXTRA_OECONF = " --without-cython "
diff --git a/meta-oe/recipes-connectivity/libmtp/libmtp-1.1.5/69-libmtp.rules b/meta-oe/recipes-connectivity/libmtp/libmtp-1.1.5/69-libmtp.rules
new file mode 100644
index 000000000..4cd27c762
--- /dev/null
+++ b/meta-oe/recipes-connectivity/libmtp/libmtp-1.1.5/69-libmtp.rules
@@ -0,0 +1,999 @@
1# UDEV-style hotplug map for libmtp
2# Put this file in /etc/udev/rules.d
3
4ACTION!="add", GOTO="libmtp_rules_end"
5ENV{MAJOR}!="?*", GOTO="libmtp_rules_end"
6SUBSYSTEM=="usb", GOTO="libmtp_usb_rules"
7GOTO="libmtp_rules_end"
8
9LABEL="libmtp_usb_rules"
10
11# Some sensitive devices we surely don't wanna probe
12# Color instruments
13ATTR{idVendor}=="0670", GOTO="libmtp_rules_end"
14ATTR{idVendor}=="0765", GOTO="libmtp_rules_end"
15ATTR{idVendor}=="085c", GOTO="libmtp_rules_end"
16ATTR{idVendor}=="0971", GOTO="libmtp_rules_end"
17# Canon scanners that look like MTP devices (PID 0x22nn)
18ATTR{idVendor}=="04a9", ATTR{idProduct}=="22*", GOTO="libmtp_rules_end"
19# Canon digital camera (EOS 3D) that looks like MTP device (PID 0x3113)
20ATTR{idVendor}=="04a9", ATTR{idProduct}=="3113", GOTO="libmtp_rules_end"
21# Sensitive Atheros devices that look like MTP devices
22ATTR{idVendor}=="0cf3", GOTO="libmtp_rules_end"
23# Sensitive Atmel JTAG programmers
24ATTR{idVendor}=="03eb", GOTO="libmtp_rules_end"
25# Creative ZEN Vision
26ATTR{idVendor}=="041e", ATTR{idProduct}=="411f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
27# Creative Portable Media Center
28ATTR{idVendor}=="041e", ATTR{idProduct}=="4123", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
29# Creative ZEN Xtra (MTP mode)
30ATTR{idVendor}=="041e", ATTR{idProduct}=="4128", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
31# Dell DJ (2nd generation)
32ATTR{idVendor}=="041e", ATTR{idProduct}=="412f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
33# Creative ZEN Micro (MTP mode)
34ATTR{idVendor}=="041e", ATTR{idProduct}=="4130", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
35# Creative ZEN Touch (MTP mode)
36ATTR{idVendor}=="041e", ATTR{idProduct}=="4131", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
37# Dell Dell Pocket DJ (MTP mode)
38ATTR{idVendor}=="041e", ATTR{idProduct}=="4132", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
39# Creative ZEN MicroPhoto (alternate version)
40ATTR{idVendor}=="041e", ATTR{idProduct}=="4133", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
41# Creative ZEN Sleek (MTP mode)
42ATTR{idVendor}=="041e", ATTR{idProduct}=="4137", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
43# Creative ZEN MicroPhoto
44ATTR{idVendor}=="041e", ATTR{idProduct}=="413c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
45# Creative ZEN Sleek Photo
46ATTR{idVendor}=="041e", ATTR{idProduct}=="413d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
47# Creative ZEN Vision:M
48ATTR{idVendor}=="041e", ATTR{idProduct}=="413e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
49# Creative ZEN V
50ATTR{idVendor}=="041e", ATTR{idProduct}=="4150", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
51# Creative ZEN Vision:M (DVP-HD0004)
52ATTR{idVendor}=="041e", ATTR{idProduct}=="4151", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
53# Creative ZEN V Plus
54ATTR{idVendor}=="041e", ATTR{idProduct}=="4152", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
55# Creative ZEN Vision W
56ATTR{idVendor}=="041e", ATTR{idProduct}=="4153", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
57# Creative ZEN
58ATTR{idVendor}=="041e", ATTR{idProduct}=="4157", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
59# Creative ZEN V 2GB
60ATTR{idVendor}=="041e", ATTR{idProduct}=="4158", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
61# Creative ZEN Mozaic
62ATTR{idVendor}=="041e", ATTR{idProduct}=="4161", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
63# Creative ZEN X-Fi
64ATTR{idVendor}=="041e", ATTR{idProduct}=="4162", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
65# Creative ZEN X-Fi 3
66ATTR{idVendor}=="041e", ATTR{idProduct}=="4169", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
67# ZiiLABS Zii EGG
68ATTR{idVendor}=="041e", ATTR{idProduct}=="6000", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
69# Samsung YP-900
70ATTR{idVendor}=="04e8", ATTR{idProduct}=="0409", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
71# Samsung I550W Phone
72ATTR{idVendor}=="04e8", ATTR{idProduct}=="04a4", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
73# Samsung Jet S8000
74ATTR{idVendor}=="04e8", ATTR{idProduct}=="4f1f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
75# Samsung YH-920 (501d)
76ATTR{idVendor}=="04e8", ATTR{idProduct}=="501d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
77# Samsung YH-920 (5022)
78ATTR{idVendor}=="04e8", ATTR{idProduct}=="5022", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
79# Samsung YH-925GS
80ATTR{idVendor}=="04e8", ATTR{idProduct}=="5024", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
81# Samsung YH-820
82ATTR{idVendor}=="04e8", ATTR{idProduct}=="502e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
83# Samsung YH-925(-GS)
84ATTR{idVendor}=="04e8", ATTR{idProduct}=="502f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
85# Samsung YH-J70J
86ATTR{idVendor}=="04e8", ATTR{idProduct}=="5033", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
87# Samsung YP-Z5
88ATTR{idVendor}=="04e8", ATTR{idProduct}=="503c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
89# Samsung YP-T7J
90ATTR{idVendor}=="04e8", ATTR{idProduct}=="5047", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
91# Samsung YP-U2J (YP-U2JXB/XAA)
92ATTR{idVendor}=="04e8", ATTR{idProduct}=="5054", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
93# Samsung YP-F2J
94ATTR{idVendor}=="04e8", ATTR{idProduct}=="5057", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
95# Samsung YP-K5
96ATTR{idVendor}=="04e8", ATTR{idProduct}=="505a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
97# Samsung YP-U3
98ATTR{idVendor}=="04e8", ATTR{idProduct}=="507d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
99# Samsung YP-T9
100ATTR{idVendor}=="04e8", ATTR{idProduct}=="507f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
101# Samsung YP-K3
102ATTR{idVendor}=="04e8", ATTR{idProduct}=="5081", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
103# Samsung YP-P2
104ATTR{idVendor}=="04e8", ATTR{idProduct}=="5083", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
105# Samsung YP-T10
106ATTR{idVendor}=="04e8", ATTR{idProduct}=="508a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
107# Samsung YP-S5
108ATTR{idVendor}=="04e8", ATTR{idProduct}=="508b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
109# Samsung YP-S3
110ATTR{idVendor}=="04e8", ATTR{idProduct}=="5091", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
111# Samsung YP-U4
112ATTR{idVendor}=="04e8", ATTR{idProduct}=="5093", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
113# Samsung YP-R1
114ATTR{idVendor}=="04e8", ATTR{idProduct}=="510f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
115# Samsung YP-Q1
116ATTR{idVendor}=="04e8", ATTR{idProduct}=="5115", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
117# Samsung YP-M1
118ATTR{idVendor}=="04e8", ATTR{idProduct}=="5118", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
119# Samsung YP-P3
120ATTR{idVendor}=="04e8", ATTR{idProduct}=="511a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
121# Samsung YP-Q2
122ATTR{idVendor}=="04e8", ATTR{idProduct}=="511d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
123# Samsung YP-U5
124ATTR{idVendor}=="04e8", ATTR{idProduct}=="5121", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
125# Samsung YP-R0
126ATTR{idVendor}=="04e8", ATTR{idProduct}=="5125", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
127# Samsung YP-Q3
128ATTR{idVendor}=="04e8", ATTR{idProduct}=="5130", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
129# Samsung YP-Z3
130ATTR{idVendor}=="04e8", ATTR{idProduct}=="5137", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
131# Samsung YH-999 Portable Media Center/SGH-A707/SGH-L760V/SGH-U900/Verizon Intensity/Fascinate
132ATTR{idVendor}=="04e8", ATTR{idProduct}=="5a0f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
133# Samsung M7600 Beat/GT-S8300T/SGH-F490/S8300
134ATTR{idVendor}=="04e8", ATTR{idProduct}=="6642", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
135# Samsung X830 Mobile Phone
136ATTR{idVendor}=="04e8", ATTR{idProduct}=="6702", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
137# Samsung U600 Mobile Phone
138ATTR{idVendor}=="04e8", ATTR{idProduct}=="6709", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
139# Samsung F250 Mobile Phone
140ATTR{idVendor}=="04e8", ATTR{idProduct}=="6727", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
141# Samsung Juke (SCH-U470)
142ATTR{idVendor}=="04e8", ATTR{idProduct}=="6734", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
143# Samsung GT-B2700
144ATTR{idVendor}=="04e8", ATTR{idProduct}=="6752", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
145# Samsung SAMSUNG Trance
146ATTR{idVendor}=="04e8", ATTR{idProduct}=="6763", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
147# Samsung GT-S8500
148ATTR{idVendor}=="04e8", ATTR{idProduct}=="6819", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
149# Samsung Galaxy models (MTP+ADB)
150ATTR{idVendor}=="04e8", ATTR{idProduct}=="685c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
151# Samsung Galaxy Y
152ATTR{idVendor}=="04e8", ATTR{idProduct}=="685e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
153# Samsung Galaxy models (MTP)
154ATTR{idVendor}=="04e8", ATTR{idProduct}=="6860", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
155# Samsung Galaxy models Kies mode
156ATTR{idVendor}=="04e8", ATTR{idProduct}=="6877", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
157# Samsung Vibrant SGH-T959/Captivate/Media player mode
158ATTR{idVendor}=="04e8", ATTR{idProduct}=="68a9", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
159# Samsung GT-B2710/Xcover 271
160ATTR{idVendor}=="04e8", ATTR{idProduct}=="68af", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
161# Samsung GT-S5230
162ATTR{idVendor}=="04e8", ATTR{idProduct}=="e20c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
163# Microsoft/Intel Bandon Portable Media Center
164ATTR{idVendor}=="045e", ATTR{idProduct}=="00c9", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
165# Microsoft Windows Phone
166ATTR{idVendor}=="045e", ATTR{idProduct}=="04ec", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
167# Microsoft Windows MTP Simulator
168ATTR{idVendor}=="045e", ATTR{idProduct}=="0622", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
169# Microsoft Zune HD
170ATTR{idVendor}=="045e", ATTR{idProduct}=="063e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
171# Microsoft Kin 1
172ATTR{idVendor}=="045e", ATTR{idProduct}=="0640", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
173# Microsoft/Sharp/nVidia Kin TwoM
174ATTR{idVendor}=="045e", ATTR{idProduct}=="0641", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
175# Microsoft Zune
176ATTR{idVendor}=="045e", ATTR{idProduct}=="0710", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
177# JVC Alneo XA-HD500
178ATTR{idVendor}=="04f1", ATTR{idProduct}=="6105", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
179# Philips HDD6320/00 or HDD6330/17
180ATTR{idVendor}=="0471", ATTR{idProduct}=="014b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
181# Philips HDD14XX,HDD1620 or HDD1630/17
182ATTR{idVendor}=="0471", ATTR{idProduct}=="014c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
183# Philips HDD085/00 or HDD082/17
184ATTR{idVendor}=="0471", ATTR{idProduct}=="014d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
185# Philips GoGear SA9200
186ATTR{idVendor}=="0471", ATTR{idProduct}=="014f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
187# Philips SA1115/55
188ATTR{idVendor}=="0471", ATTR{idProduct}=="0164", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
189# Philips GoGear Audio
190ATTR{idVendor}=="0471", ATTR{idProduct}=="0165", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
191# Philips Shoqbox
192ATTR{idVendor}=="0471", ATTR{idProduct}=="0172", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
193# Philips PSA610
194ATTR{idVendor}=="0471", ATTR{idProduct}=="0181", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
195# Philips HDD6320
196ATTR{idVendor}=="0471", ATTR{idProduct}=="01eb", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
197# Philips GoGear SA6014/SA6015/SA6024/SA6025/SA6044/SA6045
198ATTR{idVendor}=="0471", ATTR{idProduct}=="084e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
199# Philips GoGear SA5145
200ATTR{idVendor}=="0471", ATTR{idProduct}=="0857", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
201# Philips GoGear SA6125/SA6145/SA6185
202ATTR{idVendor}=="0471", ATTR{idProduct}=="2002", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
203# Philips GoGear SA3345
204ATTR{idVendor}=="0471", ATTR{idProduct}=="2004", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
205# Philips SA5285
206ATTR{idVendor}=="0471", ATTR{idProduct}=="2022", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
207# Philips GoGear ViBE SA1VBE04
208ATTR{idVendor}=="0471", ATTR{idProduct}=="2075", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
209# Philips GoGear Muse
210ATTR{idVendor}=="0471", ATTR{idProduct}=="2077", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
211# Philips GoGear ViBE SA1VBE04/08
212ATTR{idVendor}=="0471", ATTR{idProduct}=="207b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
213# Philips GoGear Aria
214ATTR{idVendor}=="0471", ATTR{idProduct}=="207c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
215# Philips GoGear SA1VBE08KX/78
216ATTR{idVendor}=="0471", ATTR{idProduct}=="208e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
217# Philips GoGear VIBE SA2VBE[08|16]K/02
218ATTR{idVendor}=="0471", ATTR{idProduct}=="20b7", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
219# Philips GoGear Ariaz
220ATTR{idVendor}=="0471", ATTR{idProduct}=="20b9", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
221# Philips GoGear Vibe/02
222ATTR{idVendor}=="0471", ATTR{idProduct}=="20e5", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
223# Philips PSA235
224ATTR{idVendor}=="0471", ATTR{idProduct}=="7e01", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
225# Acer Iconia TAB A500 (ID1)
226ATTR{idVendor}=="0502", ATTR{idProduct}=="3325", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
227# Acer Iconia TAB A500 (ID2)
228ATTR{idVendor}=="0502", ATTR{idProduct}=="3341", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
229# Acer Iconia TAB A501
230ATTR{idVendor}=="0502", ATTR{idProduct}=="3344", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
231# Acer Iconia TAB A100 (ID1)
232ATTR{idVendor}=="0502", ATTR{idProduct}=="3348", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
233# Acer Iconia TAB A100 (ID2)
234ATTR{idVendor}=="0502", ATTR{idProduct}=="3349", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
235# Acer Iconia TAB A700
236ATTR{idVendor}=="0502", ATTR{idProduct}=="3378", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
237# Acer Iconia TAB A200 (ID1)
238ATTR{idVendor}=="0502", ATTR{idProduct}=="337c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
239# Acer Iconia TAB A200 (ID2)
240ATTR{idVendor}=="0502", ATTR{idProduct}=="337d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
241# Acer Iconia TAB A510
242ATTR{idVendor}=="0502", ATTR{idProduct}=="338a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
243# Acer E350 Liquid Gallant Duo
244ATTR{idVendor}=="0502", ATTR{idProduct}=="33c3", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
245# SanDisk Sansa m230/m240
246ATTR{idVendor}=="0781", ATTR{idProduct}=="7400", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
247# SanDisk Sansa m200-tcc (MTP mode)
248ATTR{idVendor}=="0781", ATTR{idProduct}=="7401", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
249# SanDisk Sansa c150
250ATTR{idVendor}=="0781", ATTR{idProduct}=="7410", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
251# SanDisk Sansa e200/e250/e260/e270/e280
252ATTR{idVendor}=="0781", ATTR{idProduct}=="7420", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
253# SanDisk Sansa e260/e280 v2
254ATTR{idVendor}=="0781", ATTR{idProduct}=="7422", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
255# SanDisk Sansa m240/m250
256ATTR{idVendor}=="0781", ATTR{idProduct}=="7430", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
257# SanDisk Sansa Clip
258ATTR{idVendor}=="0781", ATTR{idProduct}=="7432", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
259# SanDisk Sansa Clip v2
260ATTR{idVendor}=="0781", ATTR{idProduct}=="7434", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
261# SanDisk Sansa c240/c250
262ATTR{idVendor}=="0781", ATTR{idProduct}=="7450", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
263# SanDisk Sansa c250 v2
264ATTR{idVendor}=="0781", ATTR{idProduct}=="7452", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
265# SanDisk Sansa Express
266ATTR{idVendor}=="0781", ATTR{idProduct}=="7460", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
267# SanDisk Sansa Connect
268ATTR{idVendor}=="0781", ATTR{idProduct}=="7480", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
269# SanDisk Sansa View
270ATTR{idVendor}=="0781", ATTR{idProduct}=="74b0", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
271# SanDisk Sansa Fuze
272ATTR{idVendor}=="0781", ATTR{idProduct}=="74c0", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
273# SanDisk Sansa Fuze v2
274ATTR{idVendor}=="0781", ATTR{idProduct}=="74c2", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
275# SanDisk Sansa Clip+
276ATTR{idVendor}=="0781", ATTR{idProduct}=="74d0", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
277# SanDisk Sansa Fuze+
278ATTR{idVendor}=="0781", ATTR{idProduct}=="74e0", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
279# SanDisk Sansa Clip Zip
280ATTR{idVendor}=="0781", ATTR{idProduct}=="74e4", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
281# iRiver H300 Series MTP
282ATTR{idVendor}=="1006", ATTR{idProduct}=="3004", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
283# iRiver Portable Media Center
284ATTR{idVendor}=="1006", ATTR{idProduct}=="4002", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
285# iRiver Portable Media Center
286ATTR{idVendor}=="1006", ATTR{idProduct}=="4003", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
287# iRiver T7 Volcano
288ATTR{idVendor}=="1042", ATTR{idProduct}=="1143", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
289# iRiver iFP-880
290ATTR{idVendor}=="4102", ATTR{idProduct}=="1008", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
291# iRiver T10
292ATTR{idVendor}=="4102", ATTR{idProduct}=="1113", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
293# iRiver T20 FM
294ATTR{idVendor}=="4102", ATTR{idProduct}=="1114", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
295# iRiver T20
296ATTR{idVendor}=="4102", ATTR{idProduct}=="1115", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
297# iRiver U10
298ATTR{idVendor}=="4102", ATTR{idProduct}=="1116", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
299# iRiver T10a
300ATTR{idVendor}=="4102", ATTR{idProduct}=="1117", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
301# iRiver T20
302ATTR{idVendor}=="4102", ATTR{idProduct}=="1118", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
303# iRiver T30
304ATTR{idVendor}=="4102", ATTR{idProduct}=="1119", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
305# iRiver T10 2GB
306ATTR{idVendor}=="4102", ATTR{idProduct}=="1120", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
307# iRiver N12
308ATTR{idVendor}=="4102", ATTR{idProduct}=="1122", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
309# iRiver Clix2
310ATTR{idVendor}=="4102", ATTR{idProduct}=="1126", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
311# iRiver Clix
312ATTR{idVendor}=="4102", ATTR{idProduct}=="112a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
313# iRiver X20
314ATTR{idVendor}=="4102", ATTR{idProduct}=="1132", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
315# iRiver T60
316ATTR{idVendor}=="4102", ATTR{idProduct}=="1134", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
317# iRiver E100
318ATTR{idVendor}=="4102", ATTR{idProduct}=="1141", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
319# iRiver E100 v2/Lplayer
320ATTR{idVendor}=="4102", ATTR{idProduct}=="1142", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
321# iRiver Spinn
322ATTR{idVendor}=="4102", ATTR{idProduct}=="1147", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
323# iRiver E50
324ATTR{idVendor}=="4102", ATTR{idProduct}=="1151", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
325# iRiver T5
326ATTR{idVendor}=="4102", ATTR{idProduct}=="1153", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
327# iRiver E30
328ATTR{idVendor}=="4102", ATTR{idProduct}=="1167", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
329# iRiver H10 20GB
330ATTR{idVendor}=="4102", ATTR{idProduct}=="2101", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
331# iRiver H10 5GB
332ATTR{idVendor}=="4102", ATTR{idProduct}=="2102", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
333# iRiver H10 5.6GB
334ATTR{idVendor}=="4102", ATTR{idProduct}=="2105", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
335# Dell, Inc DJ Itty
336ATTR{idVendor}=="413c", ATTR{idProduct}=="4500", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
337# Dell, Inc Dell Streak 7
338ATTR{idVendor}=="413c", ATTR{idProduct}=="b10b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
339# Toshiba Gigabeat MEGF-40
340ATTR{idVendor}=="0930", ATTR{idProduct}=="0009", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
341# Toshiba Gigabeat
342ATTR{idVendor}=="0930", ATTR{idProduct}=="000c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
343# Toshiba Gigabeat P20
344ATTR{idVendor}=="0930", ATTR{idProduct}=="000f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
345# Toshiba Gigabeat S
346ATTR{idVendor}=="0930", ATTR{idProduct}=="0010", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
347# Toshiba Gigabeat P10
348ATTR{idVendor}=="0930", ATTR{idProduct}=="0011", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
349# Toshiba Gigabeat V30
350ATTR{idVendor}=="0930", ATTR{idProduct}=="0014", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
351# Toshiba Gigabeat U
352ATTR{idVendor}=="0930", ATTR{idProduct}=="0016", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
353# Toshiba Gigabeat MEU202
354ATTR{idVendor}=="0930", ATTR{idProduct}=="0018", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
355# Toshiba Gigabeat T
356ATTR{idVendor}=="0930", ATTR{idProduct}=="0019", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
357# Toshiba Gigabeat MEU201
358ATTR{idVendor}=="0930", ATTR{idProduct}=="001a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
359# Toshiba Gigabeat MET401
360ATTR{idVendor}=="0930", ATTR{idProduct}=="001d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
361# Toshiba Excite AT300
362ATTR{idVendor}=="0930", ATTR{idProduct}=="0963", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
363# Toshiba Thrive AT100/AT105
364ATTR{idVendor}=="0930", ATTR{idProduct}=="7100", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
365# Archos Gmini XS100
366ATTR{idVendor}=="0e79", ATTR{idProduct}=="1207", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
367# Archos XS202 (MTP mode)
368ATTR{idVendor}=="0e79", ATTR{idProduct}=="1208", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
369# Archos 104 (MTP mode)
370ATTR{idVendor}=="0e79", ATTR{idProduct}=="120a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
371# Archos 204 (MTP mode)
372ATTR{idVendor}=="0e79", ATTR{idProduct}=="120c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
373# Archos 404 (MTP mode)
374ATTR{idVendor}=="0e79", ATTR{idProduct}=="1301", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
375# Archos 404CAM (MTP mode)
376ATTR{idVendor}=="0e79", ATTR{idProduct}=="1303", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
377# Archos 504 (MTP mode)
378ATTR{idVendor}=="0e79", ATTR{idProduct}=="1307", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
379# Archos 604 (MTP mode)
380ATTR{idVendor}=="0e79", ATTR{idProduct}=="1309", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
381# Archos 604WIFI (MTP mode)
382ATTR{idVendor}=="0e79", ATTR{idProduct}=="130b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
383# Archos 704 mobile dvr
384ATTR{idVendor}=="0e79", ATTR{idProduct}=="130d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
385# Archos 704TV (MTP mode)
386ATTR{idVendor}=="0e79", ATTR{idProduct}=="130f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
387# Archos 405 (MTP mode)
388ATTR{idVendor}=="0e79", ATTR{idProduct}=="1311", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
389# Archos 605 (MTP mode)
390ATTR{idVendor}=="0e79", ATTR{idProduct}=="1313", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
391# Archos 605F (MTP mode)
392ATTR{idVendor}=="0e79", ATTR{idProduct}=="1315", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
393# Archos 705 (MTP mode)
394ATTR{idVendor}=="0e79", ATTR{idProduct}=="1319", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
395# Archos TV+ (MTP mode)
396ATTR{idVendor}=="0e79", ATTR{idProduct}=="131b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
397# Archos 105 (MTP mode)
398ATTR{idVendor}=="0e79", ATTR{idProduct}=="131d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
399# Archos 405HDD (MTP mode)
400ATTR{idVendor}=="0e79", ATTR{idProduct}=="1321", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
401# Archos 5 (MTP mode)
402ATTR{idVendor}=="0e79", ATTR{idProduct}=="1331", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
403# Archos 5 (MTP mode)
404ATTR{idVendor}=="0e79", ATTR{idProduct}=="1333", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
405# Archos 7 (MTP mode)
406ATTR{idVendor}=="0e79", ATTR{idProduct}=="1335", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
407# Archos SPOD (MTP mode)
408ATTR{idVendor}=="0e79", ATTR{idProduct}=="1341", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
409# Archos 5S IT (MTP mode)
410ATTR{idVendor}=="0e79", ATTR{idProduct}=="1351", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
411# Archos 5H IT (MTP mode)
412ATTR{idVendor}=="0e79", ATTR{idProduct}=="1357", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
413# Archos Arnova Childpad
414ATTR{idVendor}=="0e79", ATTR{idProduct}=="1458", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
415# Archos 8o G9 (MTP mode)
416ATTR{idVendor}=="0e79", ATTR{idProduct}=="1508", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
417# Archos 8o G9 Turbo (MTP mode)
418ATTR{idVendor}=="0e79", ATTR{idProduct}=="1509", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
419# Archos 80G9
420ATTR{idVendor}=="0e79", ATTR{idProduct}=="1518", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
421# Archos 101 G9
422ATTR{idVendor}=="0e79", ATTR{idProduct}=="1528", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
423# Archos 101 G9 (v2)
424ATTR{idVendor}=="0e79", ATTR{idProduct}=="1529", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
425# Archos 101 G9 Turbo 250 HD
426ATTR{idVendor}=="0e79", ATTR{idProduct}=="1538", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
427# Archos 101 G9 Turbo
428ATTR{idVendor}=="0e79", ATTR{idProduct}=="1539", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
429# Archos 70it2 (mode 1)
430ATTR{idVendor}=="0e79", ATTR{idProduct}=="1568", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
431# Archos 70it2 (mode 2)
432ATTR{idVendor}=="0e79", ATTR{idProduct}=="1569", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
433# Dunlop MP3 player 1GB / EGOMAN MD223AFD
434ATTR{idVendor}=="10d6", ATTR{idProduct}=="2200", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
435# Memorex or iRiver MMP 8585/8586 or iRiver E200
436ATTR{idVendor}=="10d6", ATTR{idProduct}=="2300", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
437# Sirius Stiletto
438ATTR{idVendor}=="18f6", ATTR{idProduct}=="0102", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
439# Sirius Stiletto 2
440ATTR{idVendor}=="18f6", ATTR{idProduct}=="0110", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
441# Canon Ixus Digital 700 (PTP/MTP mode)
442ATTR{idVendor}=="04a9", ATTR{idProduct}=="30f2", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
443# Canon PowerShot A640 (PTP/MTP mode)
444ATTR{idVendor}=="04a9", ATTR{idProduct}=="3139", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
445# Canon PowerShot SX20IS (PTP/MTP mode)
446ATTR{idVendor}=="04a9", ATTR{idProduct}=="31e4", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
447# Nokia N81 Mobile Phone
448ATTR{idVendor}=="0421", ATTR{idProduct}=="000a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
449# Nokia 6120c Classic Mobile Phone
450ATTR{idVendor}=="0421", ATTR{idProduct}=="002e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
451# Nokia N96 Mobile Phone
452ATTR{idVendor}=="0421", ATTR{idProduct}=="0039", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
453# Nokia 6500c Classic Mobile Phone
454ATTR{idVendor}=="0421", ATTR{idProduct}=="003c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
455# Nokia 3110c Mobile Phone
456ATTR{idVendor}=="0421", ATTR{idProduct}=="005f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
457# Nokia 3109c Mobile Phone
458ATTR{idVendor}=="0421", ATTR{idProduct}=="0065", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
459# Nokia 5310 XpressMusic
460ATTR{idVendor}=="0421", ATTR{idProduct}=="006c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
461# Nokia N95 Mobile Phone 8GB
462ATTR{idVendor}=="0421", ATTR{idProduct}=="006e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
463# Nokia N82 Mobile Phone
464ATTR{idVendor}=="0421", ATTR{idProduct}=="0074", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
465# Nokia N78 Mobile Phone
466ATTR{idVendor}=="0421", ATTR{idProduct}=="0079", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
467# Nokia 6220 Classic
468ATTR{idVendor}=="0421", ATTR{idProduct}=="008d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
469# Nokia N85 Mobile Phone
470ATTR{idVendor}=="0421", ATTR{idProduct}=="0092", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
471# Nokia 6210 Navigator
472ATTR{idVendor}=="0421", ATTR{idProduct}=="0098", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
473# Nokia E71
474ATTR{idVendor}=="0421", ATTR{idProduct}=="00e4", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
475# Nokia E66
476ATTR{idVendor}=="0421", ATTR{idProduct}=="00e5", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
477# Nokia 5320 XpressMusic
478ATTR{idVendor}=="0421", ATTR{idProduct}=="00ea", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
479# Nokia 5800 XpressMusic
480ATTR{idVendor}=="0421", ATTR{idProduct}=="0154", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
481# Nokia 5800 XpressMusic v2
482ATTR{idVendor}=="0421", ATTR{idProduct}=="0155", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
483# Nokia 5800 XpressMusic v3
484ATTR{idVendor}=="0421", ATTR{idProduct}=="0159", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
485# Nokia E63
486ATTR{idVendor}=="0421", ATTR{idProduct}=="0179", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
487# Nokia N79
488ATTR{idVendor}=="0421", ATTR{idProduct}=="0186", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
489# Nokia E71x
490ATTR{idVendor}=="0421", ATTR{idProduct}=="01a1", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
491# Nokia E52
492ATTR{idVendor}=="0421", ATTR{idProduct}=="01cf", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
493# Nokia 3710
494ATTR{idVendor}=="0421", ATTR{idProduct}=="01ee", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
495# Nokia N97-1
496ATTR{idVendor}=="0421", ATTR{idProduct}=="01f4", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
497# Nokia N97
498ATTR{idVendor}=="0421", ATTR{idProduct}=="01f5", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
499# Nokia 5130 XpressMusic
500ATTR{idVendor}=="0421", ATTR{idProduct}=="0209", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
501# Nokia E72
502ATTR{idVendor}=="0421", ATTR{idProduct}=="0221", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
503# Nokia 5530
504ATTR{idVendor}=="0421", ATTR{idProduct}=="0229", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
505# Nokia N97 mini
506ATTR{idVendor}=="0421", ATTR{idProduct}=="026b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
507# Nokia X6
508ATTR{idVendor}=="0421", ATTR{idProduct}=="0274", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
509# Nokia 6600i
510ATTR{idVendor}=="0421", ATTR{idProduct}=="0297", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
511# Nokia 2710
512ATTR{idVendor}=="0421", ATTR{idProduct}=="02c1", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
513# Nokia 5230
514ATTR{idVendor}=="0421", ATTR{idProduct}=="02e2", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
515# Nokia N8
516ATTR{idVendor}=="0421", ATTR{idProduct}=="02fe", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
517# Nokia N8 (Ovi mode)
518ATTR{idVendor}=="0421", ATTR{idProduct}=="0302", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
519# Nokia E7
520ATTR{idVendor}=="0421", ATTR{idProduct}=="0334", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
521# Nokia E7 (Ovi mode)
522ATTR{idVendor}=="0421", ATTR{idProduct}=="0335", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
523# Nokia C7
524ATTR{idVendor}=="0421", ATTR{idProduct}=="03c1", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
525# Nokia C7 (ID2)
526ATTR{idVendor}=="0421", ATTR{idProduct}=="03cd", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
527# Nokia N950
528ATTR{idVendor}=="0421", ATTR{idProduct}=="03d2", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
529# Nokia 3250 Mobile Phone
530ATTR{idVendor}=="0421", ATTR{idProduct}=="0462", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
531# Nokia N93 Mobile Phone
532ATTR{idVendor}=="0421", ATTR{idProduct}=="0478", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
533# Nokia 5500 Sport Mobile Phone
534ATTR{idVendor}=="0421", ATTR{idProduct}=="047e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
535# Nokia N91 Mobile Phone
536ATTR{idVendor}=="0421", ATTR{idProduct}=="0485", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
537# Nokia 5700 XpressMusic Mobile Phone
538ATTR{idVendor}=="0421", ATTR{idProduct}=="04b4", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
539# Nokia 5300 Mobile Phone
540ATTR{idVendor}=="0421", ATTR{idProduct}=="04ba", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
541# Nokia 5200 Mobile Phone
542ATTR{idVendor}=="0421", ATTR{idProduct}=="04be", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
543# Nokia N73 Mobile Phone
544ATTR{idVendor}=="0421", ATTR{idProduct}=="04d1", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
545# Nokia N75 Mobile Phone
546ATTR{idVendor}=="0421", ATTR{idProduct}=="04e1", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
547# Nokia N93i Mobile Phone
548ATTR{idVendor}=="0421", ATTR{idProduct}=="04e5", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
549# Nokia N95 Mobile Phone
550ATTR{idVendor}=="0421", ATTR{idProduct}=="04ef", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
551# Nokia N80 Internet Edition (Media Player)
552ATTR{idVendor}=="0421", ATTR{idProduct}=="04f1", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
553# Nokia N9
554ATTR{idVendor}=="0421", ATTR{idProduct}=="051a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
555# Nokia 5530 Xpressmusic
556ATTR{idVendor}=="05c6", ATTR{idProduct}=="0229", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
557# Nokia/Verizon 6205 Balboa/Verizon Music Phone
558ATTR{idVendor}=="05c6", ATTR{idProduct}=="3196", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
559# Logik LOG DAX MP3 and DAB Player
560ATTR{idVendor}=="13d1", ATTR{idProduct}=="7002", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
561# Technika MP-709
562ATTR{idVendor}=="13d1", ATTR{idProduct}=="7017", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
563# Thomson EM28 Series
564ATTR{idVendor}=="069b", ATTR{idProduct}=="0774", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
565# Thomson / RCA Opal / Lyra MC4002
566ATTR{idVendor}=="069b", ATTR{idProduct}=="0777", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
567# Thomson Lyra MC5104B (M51 Series)
568ATTR{idVendor}=="069b", ATTR{idProduct}=="077c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
569# Thomson RCA H106
570ATTR{idVendor}=="069b", ATTR{idProduct}=="301a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
571# Thomson scenium E308
572ATTR{idVendor}=="069b", ATTR{idProduct}=="3028", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
573# Thomson / RCA Lyra HC308A
574ATTR{idVendor}=="069b", ATTR{idProduct}=="3035", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
575# FOMA F903iX HIGH-SPEED
576ATTR{idVendor}=="04c5", ATTR{idProduct}=="1140", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
577# NormSoft, Inc. Pocket Tunes
578ATTR{idVendor}=="1703", ATTR{idProduct}=="0001", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
579# NormSoft, Inc. Pocket Tunes 4
580ATTR{idVendor}=="1703", ATTR{idProduct}=="0002", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
581# TrekStor Vibez 8/12GB
582ATTR{idVendor}=="066f", ATTR{idProduct}=="842a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
583# Medion MD8333
584ATTR{idVendor}=="066f", ATTR{idProduct}=="8550", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
585# Medion MD8333
586ATTR{idVendor}=="066f", ATTR{idProduct}=="8588", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
587# Medion MD99000 (P9514)/Olivetti Olipad 110
588ATTR{idVendor}=="0408", ATTR{idProduct}=="b009", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
589# Medion Lifetab P9514
590ATTR{idVendor}=="0408", ATTR{idProduct}=="b00a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
591# Maxfield G-Flash NG 1GB
592ATTR{idVendor}=="066f", ATTR{idProduct}=="846c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
593# SigmaTel Inc. MTPMSCN Audio Player
594ATTR{idVendor}=="066f", ATTR{idProduct}=="a010", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
595# TrekStor i.Beat Sweez FM
596ATTR{idVendor}=="0402", ATTR{idProduct}=="0611", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
597# TrekStor i.Beat Organix 2.0
598ATTR{idVendor}=="1e68", ATTR{idProduct}=="0002", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
599# Disney MixMax
600ATTR{idVendor}=="0aa6", ATTR{idProduct}=="6021", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
601# Tevion MD 81488
602ATTR{idVendor}=="0aa6", ATTR{idProduct}=="3011", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
603# MyMusix PD-6070
604ATTR{idVendor}=="0aa6", ATTR{idProduct}=="9601", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
605# Cowon iAudio U3 (MTP mode)
606ATTR{idVendor}=="0e21", ATTR{idProduct}=="0701", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
607# Cowon iAudio 6 (MTP mode)
608ATTR{idVendor}=="0e21", ATTR{idProduct}=="0711", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
609# Cowon iAudio 7 (MTP mode)
610ATTR{idVendor}=="0e21", ATTR{idProduct}=="0751", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
611# Cowon iAudio U5 (MTP mode)
612ATTR{idVendor}=="0e21", ATTR{idProduct}=="0761", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
613# Cowon iAudio D2 (MTP mode)
614ATTR{idVendor}=="0e21", ATTR{idProduct}=="0801", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
615# Cowon iAudio D2+ FW 2.x (MTP mode)
616ATTR{idVendor}=="0e21", ATTR{idProduct}=="0861", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
617# Cowon iAudio D2+ DAB FW 4.x (MTP mode)
618ATTR{idVendor}=="0e21", ATTR{idProduct}=="0871", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
619# Cowon iAudio D2+ FW 3.x (MTP mode)
620ATTR{idVendor}=="0e21", ATTR{idProduct}=="0881", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
621# Cowon iAudio D2+ DMB FW 1.x (MTP mode)
622ATTR{idVendor}=="0e21", ATTR{idProduct}=="0891", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
623# Cowon iAudio S9 (MTP mode)
624ATTR{idVendor}=="0e21", ATTR{idProduct}=="0901", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
625# Cowon iAudio 9 (MTP mode)
626ATTR{idVendor}=="0e21", ATTR{idProduct}=="0911", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
627# Cowon iAudio J3 (MTP mode)
628ATTR{idVendor}=="0e21", ATTR{idProduct}=="0921", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
629# Cowon iAudio X7 (MTP mode)
630ATTR{idVendor}=="0e21", ATTR{idProduct}=="0931", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
631# Cowon iAudio C2 (MTP mode)
632ATTR{idVendor}=="0e21", ATTR{idProduct}=="0941", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
633# Cowon iAudio 10 (MTP mode)
634ATTR{idVendor}=="0e21", ATTR{idProduct}=="0952", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
635# Insignia NS-DV45
636ATTR{idVendor}=="19ff", ATTR{idProduct}=="0303", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
637# Insignia Sport Player
638ATTR{idVendor}=="19ff", ATTR{idProduct}=="0307", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
639# Insignia Pilot 4GB
640ATTR{idVendor}=="19ff", ATTR{idProduct}=="0309", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
641# LG Electronics Inc. T54
642ATTR{idVendor}=="043e", ATTR{idProduct}=="7040", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
643# LG Electronics Inc. UP3
644ATTR{idVendor}=="043e", ATTR{idProduct}=="70b1", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
645# LG Electronics Inc. VX8550 V CAST Mobile Phone
646ATTR{idVendor}=="1004", ATTR{idProduct}=="6010", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
647# LG Electronics Inc. KC910 Renoir Mobile Phone
648ATTR{idVendor}=="1004", ATTR{idProduct}=="608f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
649# LG Electronics Inc. GR-500 Music Player
650ATTR{idVendor}=="1004", ATTR{idProduct}=="611b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
651# LG Electronics Inc. KM900
652ATTR{idVendor}=="1004", ATTR{idProduct}=="6132", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
653# LG Electronics Inc. LG8575
654ATTR{idVendor}=="1004", ATTR{idProduct}=="619a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
655# LG Electronics Inc. V909 G-Slate
656ATTR{idVendor}=="1004", ATTR{idProduct}=="61f9", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
657# LG Electronics Inc. LG-E617G/P700
658ATTR{idVendor}=="1004", ATTR{idProduct}=="631c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
659# Sony Walkman NWZ-A815/NWZ-A818
660ATTR{idVendor}=="054c", ATTR{idProduct}=="0325", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
661# Sony Walkman NWZ-S516
662ATTR{idVendor}=="054c", ATTR{idProduct}=="0326", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
663# Sony Walkman NWZ-S615F/NWZ-S616F/NWZ-S618F
664ATTR{idVendor}=="054c", ATTR{idProduct}=="0327", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
665# Sony Walkman NWZ-S716F
666ATTR{idVendor}=="054c", ATTR{idProduct}=="035a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
667# Sony Walkman NWZ-A826/NWZ-A828/NWZ-A829
668ATTR{idVendor}=="054c", ATTR{idProduct}=="035b", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
669# Sony Walkman NWZ-A726/NWZ-A728/NWZ-A768
670ATTR{idVendor}=="054c", ATTR{idProduct}=="035c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
671# Sony Walkman NWZ-B135
672ATTR{idVendor}=="054c", ATTR{idProduct}=="036e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
673# Sony Walkman NWZ-E436F
674ATTR{idVendor}=="054c", ATTR{idProduct}=="0385", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
675# Sony Walkman NWZ-W202
676ATTR{idVendor}=="054c", ATTR{idProduct}=="0388", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
677# Sony Walkman NWZ-S739F
678ATTR{idVendor}=="054c", ATTR{idProduct}=="038c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
679# Sony Walkman NWZ-S638F
680ATTR{idVendor}=="054c", ATTR{idProduct}=="038e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
681# Sony Walkman NWZ-X1050B/NWZ-X1060B
682ATTR{idVendor}=="054c", ATTR{idProduct}=="0397", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
683# Sony Walkman NWZ-X1051/NWZ-X1061
684ATTR{idVendor}=="054c", ATTR{idProduct}=="0398", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
685# Sony Walkman NWZ-B142F
686ATTR{idVendor}=="054c", ATTR{idProduct}=="03d8", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
687# Sony Walkman NWZ-E344
688ATTR{idVendor}=="054c", ATTR{idProduct}=="03fc", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
689# Sony Walkman NWZ-E445
690ATTR{idVendor}=="054c", ATTR{idProduct}=="03fd", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
691# Sony Walkman NWZ-S545
692ATTR{idVendor}=="054c", ATTR{idProduct}=="03fe", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
693# Sony Walkman NWZ-A845
694ATTR{idVendor}=="054c", ATTR{idProduct}=="0404", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
695# Sony Walkman NWZ-W252B
696ATTR{idVendor}=="054c", ATTR{idProduct}=="04bb", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
697# Sony Walkman NWZ-B153F
698ATTR{idVendor}=="054c", ATTR{idProduct}=="04be", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
699# Sony Walkman NWZ-E354
700ATTR{idVendor}=="054c", ATTR{idProduct}=="04cb", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
701# Sony Walkman NWZ-S754
702ATTR{idVendor}=="054c", ATTR{idProduct}=="04cc", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
703# Sony NWZ-B163F
704ATTR{idVendor}=="054c", ATTR{idProduct}=="059a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
705# Sony Walkman NWZ-E464
706ATTR{idVendor}=="054c", ATTR{idProduct}=="05a6", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
707# Sony NWZ-S765
708ATTR{idVendor}=="054c", ATTR{idProduct}=="05a8", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
709# Sony Sony Tablet S
710ATTR{idVendor}=="054c", ATTR{idProduct}=="05b3", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
711# Sony Sony Tablet S1
712ATTR{idVendor}=="054c", ATTR{idProduct}=="05b4", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
713# Sony DCR-SR75
714ATTR{idVendor}=="054c", ATTR{idProduct}=="1294", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
715# SonyEricsson K850i
716ATTR{idVendor}=="0fce", ATTR{idProduct}=="0075", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
717# SonyEricsson W910
718ATTR{idVendor}=="0fce", ATTR{idProduct}=="0076", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
719# SonyEricsson W890i
720ATTR{idVendor}=="0fce", ATTR{idProduct}=="00b3", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
721# SonyEricsson W760i
722ATTR{idVendor}=="0fce", ATTR{idProduct}=="00c6", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
723# SonyEricsson C902
724ATTR{idVendor}=="0fce", ATTR{idProduct}=="00d4", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
725# SonyEricsson C702
726ATTR{idVendor}=="0fce", ATTR{idProduct}=="00d9", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
727# SonyEricsson W980
728ATTR{idVendor}=="0fce", ATTR{idProduct}=="00da", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
729# SonyEricsson C905
730ATTR{idVendor}=="0fce", ATTR{idProduct}=="00ef", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
731# SonyEricsson W595
732ATTR{idVendor}=="0fce", ATTR{idProduct}=="00f3", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
733# SonyEricsson W902
734ATTR{idVendor}=="0fce", ATTR{idProduct}=="00f5", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
735# SonyEricsson T700
736ATTR{idVendor}=="0fce", ATTR{idProduct}=="00fb", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
737# SonyEricsson W705/W715
738ATTR{idVendor}=="0fce", ATTR{idProduct}=="0105", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
739# SonyEricsson W995
740ATTR{idVendor}=="0fce", ATTR{idProduct}=="0112", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
741# SonyEricsson U5
742ATTR{idVendor}=="0fce", ATTR{idProduct}=="0133", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
743# SonyEricsson U8i
744ATTR{idVendor}=="0fce", ATTR{idProduct}=="013a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
745# SonyEricsson j10i2 (Elm)
746ATTR{idVendor}=="0fce", ATTR{idProduct}=="0144", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
747# SonyEricsson j108i (Cedar)
748ATTR{idVendor}=="0fce", ATTR{idProduct}=="014e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
749# SonyEricsson W302
750ATTR{idVendor}=="0fce", ATTR{idProduct}=="10c8", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
751# SonyEricsson j10i (Elm)
752ATTR{idVendor}=="0fce", ATTR{idProduct}=="d144", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
753# SonyEricsson K550i
754ATTR{idVendor}=="0fce", ATTR{idProduct}=="e000", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
755# SonyEricsson LT15i (Xperia arc S)
756ATTR{idVendor}=="0fce", ATTR{idProduct}=="014f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
757# SonyEricsson MT11i Xperia Neo
758ATTR{idVendor}=="0fce", ATTR{idProduct}=="0156", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
759# SonyEricsson MK16i Xperia
760ATTR{idVendor}=="0fce", ATTR{idProduct}=="015a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
761# SonyEricsson ST18a Xperia Ray
762ATTR{idVendor}=="0fce", ATTR{idProduct}=="0161", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
763# SonyEricsson SK17i Xperia Mini Pro
764ATTR{idVendor}=="0fce", ATTR{idProduct}=="0166", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
765# SonyEricsson ST15i Xperia Mini
766ATTR{idVendor}=="0fce", ATTR{idProduct}=="0167", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
767# SonyEricsson ST17i Xperia Active
768ATTR{idVendor}=="0fce", ATTR{idProduct}=="0168", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
769# SONY LT26i Xperia S
770ATTR{idVendor}=="0fce", ATTR{idProduct}=="0169", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
771# SONY WT19i Live Walkman
772ATTR{idVendor}=="0fce", ATTR{idProduct}=="016d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
773# SONY ST21i Xperia Tipo
774ATTR{idVendor}=="0fce", ATTR{idProduct}=="0170", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
775# SONY ST15i Xperia U
776ATTR{idVendor}=="0fce", ATTR{idProduct}=="0171", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
777# SONY LT22i Xperia P
778ATTR{idVendor}=="0fce", ATTR{idProduct}=="0172", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
779# SONY LT26w Xperia Acro S
780ATTR{idVendor}=="0fce", ATTR{idProduct}=="0176", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
781# SonyEricsson ST17i Xperia Active (MTP+UMS mode)
782ATTR{idVendor}=="0fce", ATTR{idProduct}=="4168", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
783# SONY LT26i Xperia S (MTP+UMS mode)
784ATTR{idVendor}=="0fce", ATTR{idProduct}=="4169", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
785# SONY ST21i Xperia Tipo (MTP+UMS mode)
786ATTR{idVendor}=="0fce", ATTR{idProduct}=="4170", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
787# SONY ST25i Xperia U (MTP+UMS mode)
788ATTR{idVendor}=="0fce", ATTR{idProduct}=="4171", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
789# SONY LT22i Xperia P (MTP+UMS mode)
790ATTR{idVendor}=="0fce", ATTR{idProduct}=="4172", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
791# SONY LT26w Xperia Acro S (MTP+UMS mode)
792ATTR{idVendor}=="0fce", ATTR{idProduct}=="4176", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
793# SonyEricsson LT15i Xperia Arc (MTP+ADB mode)
794ATTR{idVendor}=="0fce", ATTR{idProduct}=="514f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
795# SonyEricsson MT11i Xperia Neo (MTP+ADB mode)
796ATTR{idVendor}=="0fce", ATTR{idProduct}=="5156", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
797# SonyEricsson ST17i Xperia Active (MTP+ADB mode)
798ATTR{idVendor}=="0fce", ATTR{idProduct}=="5168", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
799# SONY LT26i Xperia S (MTP+ADB mode)
800ATTR{idVendor}=="0fce", ATTR{idProduct}=="5169", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
801# SonyEricsson MK16i Xperia (MTP+ADB mode)
802ATTR{idVendor}=="0fce", ATTR{idProduct}=="515a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
803# SonyEricsson ST18i Xperia Ray (MTP+ADB mode)
804ATTR{idVendor}=="0fce", ATTR{idProduct}=="5161", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
805# SonyEricsson SK17i Xperia Mini Pro (MTP+ADB mode)
806ATTR{idVendor}=="0fce", ATTR{idProduct}=="5166", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
807# SonyEricsson ST15i Xperia Mini (MTP+ADB mode)
808ATTR{idVendor}=="0fce", ATTR{idProduct}=="5167", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
809# SonyEricsson SK17i Xperia Mini Pro (MTP+ADB mode)
810ATTR{idVendor}=="0fce", ATTR{idProduct}=="516d", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
811# SONY ST21i Xperia Tipo (MTP+ADB mode)
812ATTR{idVendor}=="0fce", ATTR{idProduct}=="5170", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
813# SONY ST25i Xperia U (MTP+ADB mode)
814ATTR{idVendor}=="0fce", ATTR{idProduct}=="5171", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
815# SONY LT22i Xperia P (MTP+ADB mode)
816ATTR{idVendor}=="0fce", ATTR{idProduct}=="5172", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
817# SONY LT26w Xperia Acro S (MTP+ADB mode)
818ATTR{idVendor}=="0fce", ATTR{idProduct}=="5176", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
819# SONY MT27i Xperia Sola (MTP+UMS+? mode)
820ATTR{idVendor}=="0fce", ATTR{idProduct}=="a173", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
821# SONY ST27i Xperia Go (MTP+UMS+? mode)
822ATTR{idVendor}=="0fce", ATTR{idProduct}=="a17e", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
823# Motorola V3m/V750 verizon
824ATTR{idVendor}=="22b8", ATTR{idProduct}=="2a65", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
825# Motorola Xoom 2 Media Edition (ID2)
826ATTR{idVendor}=="22b8", ATTR{idProduct}=="41cf", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
827# Motorola Droid X/MB525 (Defy)
828ATTR{idVendor}=="22b8", ATTR{idProduct}=="41d6", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
829# Motorola Milestone / Verizon Droid
830ATTR{idVendor}=="22b8", ATTR{idProduct}=="41dc", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
831# Motorola DROID2
832ATTR{idVendor}=="22b8", ATTR{idProduct}=="42a7", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
833# Motorola Xoom 2 Media Edition
834ATTR{idVendor}=="22b8", ATTR{idProduct}=="4311", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
835# Motorola XT912/XT928
836ATTR{idVendor}=="22b8", ATTR{idProduct}=="4362", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
837# Motorola DROID4
838ATTR{idVendor}=="22b8", ATTR{idProduct}=="437f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
839# Motorola IdeaPad K1
840ATTR{idVendor}=="22b8", ATTR{idProduct}=="4811", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
841# Motorola A1200
842ATTR{idVendor}=="22b8", ATTR{idProduct}=="60ca", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
843# Motorola MTP Test Command Interface
844ATTR{idVendor}=="22b8", ATTR{idProduct}=="6413", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
845# Motorola RAZR2 V8/U9/Z6
846ATTR{idVendor}=="22b8", ATTR{idProduct}=="6415", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
847# Motorola Xoom (Factory test)
848ATTR{idVendor}=="22b8", ATTR{idProduct}=="70a3", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
849# Motorola Xoom (MTP)
850ATTR{idVendor}=="22b8", ATTR{idProduct}=="70a8", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
851# Motorola Xoom (MTP+ADB)
852ATTR{idVendor}=="22b8", ATTR{idProduct}=="70a9", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
853# Motorola Milestone X2
854ATTR{idVendor}=="22b8", ATTR{idProduct}=="70ca", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
855# Google Inc (for Sony) S1
856ATTR{idVendor}=="18d1", ATTR{idProduct}=="05b3", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
857# Google Inc (for Barnes & Noble) Nook Color
858ATTR{idVendor}=="18d1", ATTR{idProduct}=="2d02", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
859# Google Inc (for Asus) TF101 Transformer
860ATTR{idVendor}=="18d1", ATTR{idProduct}=="4e0f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
861# Google Inc (for Samsung) Nexus S
862ATTR{idVendor}=="18d1", ATTR{idProduct}=="4e21", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
863# Google Inc (for Asus) Nexus 7 (MTP)
864ATTR{idVendor}=="18d1", ATTR{idProduct}=="4e41", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
865# Google Inc (for Asus) Nexus 7 (MTP+ADB)
866ATTR{idVendor}=="18d1", ATTR{idProduct}=="4e42", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
867# Google Inc (for Motorola) Xoom (MZ604)
868ATTR{idVendor}=="18d1", ATTR{idProduct}=="70a8", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
869# Google Inc (for Toshiba) Thrive 7/AT105
870ATTR{idVendor}=="18d1", ATTR{idProduct}=="7102", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
871# Google Inc (for Lenovo) Ideapad K1
872ATTR{idVendor}=="18d1", ATTR{idProduct}=="740a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
873# Google Inc (for Medion) MD99000 (P9514)
874ATTR{idVendor}=="18d1", ATTR{idProduct}=="b00a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
875# Google Inc (for LG Electronics) P990/Optimus (Cyanogen)
876ATTR{idVendor}=="18d1", ATTR{idProduct}=="d109", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
877# Google Inc (for LG Electronics) P990/Optimus
878ATTR{idVendor}=="18d1", ATTR{idProduct}=="d10a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
879# Kenwood Media Keg HD10GB7 Sport Player
880ATTR{idVendor}=="0b28", ATTR{idProduct}=="100c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
881# Micro-Star International P610/Model MS-5557
882ATTR{idVendor}=="0db0", ATTR{idProduct}=="5572", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
883# FOMA D905i
884ATTR{idVendor}=="06d3", ATTR{idProduct}=="21ba", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
885# Haier Ibiza Rhapsody
886ATTR{idVendor}=="1302", ATTR{idProduct}=="1016", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
887# Haier Ibiza Rhapsody
888ATTR{idVendor}=="1302", ATTR{idProduct}=="1017", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
889# Panasonic P905i
890ATTR{idVendor}=="04da", ATTR{idProduct}=="2145", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
891# Panasonic P906i
892ATTR{idVendor}=="04da", ATTR{idProduct}=="2158", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
893# Polaroid Freescape/MPU-433158
894ATTR{idVendor}=="0546", ATTR{idProduct}=="2035", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
895# Pioneer XMP3
896ATTR{idVendor}=="08e4", ATTR{idProduct}=="0148", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
897# Slacker Inc. Slacker Portable Media Player
898ATTR{idVendor}=="1bdc", ATTR{idProduct}=="fabf", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
899# Conceptronic CMTD2
900ATTR{idVendor}=="1e53", ATTR{idProduct}=="0005", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
901# O2 Sistemas ZoltarTV
902ATTR{idVendor}=="1e53", ATTR{idProduct}=="0006", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
903# Wyplay Wyplayer
904ATTR{idVendor}=="1e53", ATTR{idProduct}=="0007", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
905# Perception Digital, Ltd Gigaware GX400
906ATTR{idVendor}=="0aa6", ATTR{idProduct}=="9702", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
907# RIM BlackBerry Storm/9650
908ATTR{idVendor}=="0fca", ATTR{idProduct}=="8007", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
909# Nextar MA715A-8R
910ATTR{idVendor}=="0402", ATTR{idProduct}=="5668", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
911# Coby COBY MP705
912ATTR{idVendor}=="1e74", ATTR{idProduct}=="6512", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
913# Apple iPhone
914ATTR{idVendor}=="05ac", ATTR{idProduct}=="1290", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
915# Apple iPod Touch 1st Gen
916ATTR{idVendor}=="05ac", ATTR{idProduct}=="1291", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
917# Apple iPhone 3G
918ATTR{idVendor}=="05ac", ATTR{idProduct}=="1292", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
919# Apple iPod Touch 2nd Gen
920ATTR{idVendor}=="05ac", ATTR{idProduct}=="1293", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
921# Apple iPhone 3GS
922ATTR{idVendor}=="05ac", ATTR{idProduct}=="1294", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
923# Apple 0x1296
924ATTR{idVendor}=="05ac", ATTR{idProduct}=="1296", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
925# Apple 0x1297
926ATTR{idVendor}=="05ac", ATTR{idProduct}=="1297", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
927# Apple 0x1298
928ATTR{idVendor}=="05ac", ATTR{idProduct}=="1298", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
929# Apple iPod Touch 3rd Gen
930ATTR{idVendor}=="05ac", ATTR{idProduct}=="1299", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
931# Apple iPad
932ATTR{idVendor}=="05ac", ATTR{idProduct}=="129a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
933# Curitel Communications, Inc. Verizon Wireless Device
934ATTR{idVendor}=="106c", ATTR{idProduct}=="3215", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
935# Pantech Crux
936ATTR{idVendor}=="106c", ATTR{idProduct}=="f003", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
937# Asus TF300 Transformer
938ATTR{idVendor}=="0b05", ATTR{idProduct}=="4c80", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
939# Asus TF300 Transformer (USB debug mode)
940ATTR{idVendor}=="0b05", ATTR{idProduct}=="4c81", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
941# Asus TF700 Transformer
942ATTR{idVendor}=="0b05", ATTR{idProduct}=="4c90", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
943# Asus TF201 Transformer Prime (keyboard dock)
944ATTR{idVendor}=="0b05", ATTR{idProduct}=="4d00", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
945# Asus TF201 Transformer Prime (tablet only)
946ATTR{idVendor}=="0b05", ATTR{idProduct}=="4d01", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
947# Asus TFXXX Transformer Prime (unknown version)
948ATTR{idVendor}=="0b05", ATTR{idProduct}=="4d04", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
949# Asus TF101 Eeepad Slider
950ATTR{idVendor}=="0b05", ATTR{idProduct}=="4e01", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
951# Asus TF101 Eeepad Transformer
952ATTR{idVendor}=="0b05", ATTR{idProduct}=="4e0f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
953# Asus TF101 Eeepad Transformer (debug mode)
954ATTR{idVendor}=="0b05", ATTR{idProduct}=="4e1f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
955# Lenovo K1
956ATTR{idVendor}=="17ef", ATTR{idProduct}=="740a", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
957# Lenovo ThinkPad Tablet
958ATTR{idVendor}=="17ef", ATTR{idProduct}=="741c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
959# Lenovo P700
960ATTR{idVendor}=="17ef", ATTR{idProduct}=="7497", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
961# Lenovo Lifetab S9512
962ATTR{idVendor}=="17ef", ATTR{idProduct}=="74cc", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
963# Huawei Honor U8860
964ATTR{idVendor}=="12d1", ATTR{idProduct}=="1051", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
965# Huawei Mediapad (mode 0)
966ATTR{idVendor}=="12d1", ATTR{idProduct}=="360f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
967# Huawei Mediapad (mode 1)
968ATTR{idVendor}=="12d1", ATTR{idProduct}=="361f", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
969# ZTE V55 ID 1
970ATTR{idVendor}=="19d2", ATTR{idProduct}=="0244", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
971# ZTE V55 ID 2
972ATTR{idVendor}=="19d2", ATTR{idProduct}=="0245", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
973# HTC Zopo ZP100 (ID1)
974ATTR{idVendor}=="0bb4", ATTR{idProduct}=="0c02", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
975# HTC EVO 4G LTE
976ATTR{idVendor}=="0bb4", ATTR{idProduct}=="0c93", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
977# HTC EVO 4G LTE (second ID)
978ATTR{idVendor}=="0bb4", ATTR{idProduct}=="0ca8", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
979# Hewlett-Packard HP Touchpad
980ATTR{idVendor}=="0bb4", ATTR{idProduct}=="685c", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
981# Hewlett-Packard HP Touchpad (debug mode)
982ATTR{idVendor}=="0bb4", ATTR{idProduct}=="6860", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
983# HTC Zopo ZP100 (ID2)
984ATTR{idVendor}=="0bb4", ATTR{idProduct}=="2008", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
985# NEC FOMA N01A
986ATTR{idVendor}=="0409", ATTR{idProduct}=="0242", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
987# nVidia CM9-Adam
988ATTR{idVendor}=="0955", ATTR{idProduct}=="70a9", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
989# Vizio VTAB1008
990ATTR{idVendor}=="0489", ATTR{idProduct}=="e040", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
991# Various Viewpia DR/bq Kepler
992ATTR{idVendor}=="2207", ATTR{idProduct}=="0001", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
993# Isabella Her Prototype
994ATTR{idVendor}=="0b20", ATTR{idProduct}=="ddee", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
995
996# Autoprobe vendor-specific, communication and PTP devices
997ENV{ID_MTP_DEVICE}!="1", ENV{MTP_NO_PROBE}!="1", ENV{COLOR_MEASUREMENT_DEVICE}!="1", ENV{libsane_matched}!="yes", ATTR{bDeviceClass}=="00|02|06|ef|ff", PROGRAM="mtp-probe /sys$env{DEVPATH} $attr{busnum} $attr{devnum}", RESULT=="1", SYMLINK+="libmtp-%k", MODE="660", GROUP="audio", ENV{ID_MTP_DEVICE}="1", ENV{ID_MEDIA_PLAYER}="1"
998
999LABEL="libmtp_rules_end"
diff --git a/meta-oe/recipes-connectivity/libmtp/libmtp-1.1.5/glibc-2.20.patch b/meta-oe/recipes-connectivity/libmtp/libmtp-1.1.5/glibc-2.20.patch
new file mode 100644
index 000000000..38b45c2a9
--- /dev/null
+++ b/meta-oe/recipes-connectivity/libmtp/libmtp-1.1.5/glibc-2.20.patch
@@ -0,0 +1,36 @@
1
2Include config.h so we get the defines available for subsequent
3include files
4
5Fixes errors like
6
7| In file included from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/beaglebone/usr/include/string.h:634:0,
8| from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/work/cortexa8t2hf-vfp-neon-oe-linux-gnueabi/libmtp/1.1.5-r0/libmtp-1.1.5/src/util.c:36:
9| /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/work/cortexa8t2hf-vfp-neon-oe-linux-gnueabi/libmtp/1.1.5-r0/libmtp-1.1.5/src/util.h:29:7: error: expected identifier or '(' before '__extension
10__'
11| char *strndup (const char *s, size_t n);
12| ^
13| /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/work/cortexa8t2hf-vfp-neon-oe-linux-gnueabi/libmtp/1.1.5-r0/libmtp-1.1.5/src/util.c:111:7: error: expected identifier or '(' before '__extensio
14n__'
15| char *strndup (const char *s, size_t n)
16| ^
17| make[2]: *** [libmtp_la-util.lo] Error 1
18
19
20Signed-off-by: Khem Raj <raj.khem@gmail.com>
21
22Upstream-Status: Pending
23
24Index: libmtp-1.1.5/src/util.c
25===================================================================
26--- libmtp-1.1.5.orig/src/util.c 2011-01-10 05:37:21.000000000 -0800
27+++ libmtp-1.1.5/src/util.c 2014-09-03 23:50:44.703563888 -0700
28@@ -22,6 +22,8 @@
29 * Boston, MA 02111-1307, USA.
30 */
31
32+#include "config.h"
33+
34 /* MSVC does not have these */
35 #ifndef _MSC_VER
36 #include <sys/time.h>
diff --git a/meta-oe/recipes-connectivity/libmtp/libmtp_1.1.5.bb b/meta-oe/recipes-connectivity/libmtp/libmtp_1.1.5.bb
new file mode 100644
index 000000000..de63c72f5
--- /dev/null
+++ b/meta-oe/recipes-connectivity/libmtp/libmtp_1.1.5.bb
@@ -0,0 +1,61 @@
1# TODO: include debian's mtp-tools man page (needs xsltproc-native and
2# docbook-xsl-native, or we pregenerate it), add support for doxygen
3# generation fully with -natives
4DESCRIPTION = "libmtp is an Initiator implementation of the Media Transfer \
5Protocol (MTP) in the form of a library suitable primarily for POSIX \
6compliant operating systems"
7SUMMARY = "libmtp is an Initiator implementation of the Media Transfer Protocol (MTP)"
8HOMEPAGE = "http://libmtp.sourceforge.net/"
9LICENSE = "LGPL-2.1+"
10LIC_FILES_CHKSUM = "\
11 file://COPYING;md5=0448d3676bc0de00406af227d341a4d1 \
12 file://src/ptp.c;beginline=3;endline=22;md5=dafe6cfd1782f56471bb94ab06624c1f \
13 file://examples/albums.c;beginline=5;endline=21;md5=84f4e55dfec49e898b7f68a828c15620 \
14"
15
16DEPENDS += "libusb1 gettext-native"
17
18SCM_URI = "git://git.code.sf.net/p/libmtp/code"
19SRC_URI = "\
20 ${SOURCEFORGE_MIRROR}/${BPN}/${BPN}-${PV}.tar.gz \
21 file://69-libmtp.rules \
22 file://glibc-2.20.patch \
23"
24SRC_URI[md5sum] = "f80e45c0e6e5798c434bb1c26a7b602d"
25SRC_URI[sha256sum] = "787679171baf8b3cf2fcc03196c705ab4d7cbc969bd71f9d3696be1ce7f1c63a"
26
27# Currently we use a pregenerated rules file produced by mtp-hotplug, rather
28# than having to depend upon libmtp-native or run mtp-hotplug in a postinst.
29do_unpack[vardeps] += "skip_udev_rules_generation"
30do_unpack[postfuncs] += "skip_udev_rules_generation"
31
32skip_udev_rules_generation () {
33 sed -i -e '/^noinst_DATA=/,/util\/mtp-hotplug -H/d' ${S}/Makefile.am
34 cp ${WORKDIR}/69-libmtp.rules ${S}/
35}
36
37inherit autotools pkgconfig lib_package
38
39EXTRA_OECONF += "--disable-rpath"
40
41PACKAGECONFIG ?= "\
42 ${@base_contains('DISTRO_FEATURES', 'largefile$', 'largefile', '', d)} \
43"
44PACKAGECONFIG[doxygen] = "--enable-doxygen,--disable-doxygen"
45PACKAGECONFIG[largefile] = "--enable-largefile,--disable-largefile"
46PACKAGECONFIG[mtpz] = "--enable-mtpz,--disable-mtpz,libgcrypt"
47
48PACKAGES =+ "libmtp-common libmtp-runtime mtp-tools"
49
50RDEPENDS_${PN} += "libmtp-common"
51RRECOMMENDS_${PN} += "libmtp-runtime mtp-tools"
52FILES_${PN}-dbg += "${base_libdir}/udev/.debug/*"
53PKG_${PN}-bin = "mtp-tools"
54SUMMARY_${PN}-bin = "Tools for communicating with MTP devices"
55DESCRIPTION_${PN}-bin = "${DESCRIPTION}\nThis package contains tools for communicating with MTP devices."
56FILES_libmtp-common = "${base_libdir}/udev/rules.d/*"
57SUMMARY_libmtp-common = "The udev rules file for MTP devices"
58FILES_libmtp-runtime = "${base_libdir}/udev/mtp-probe"
59DEPENDS_libmtp-runtime = "libmtp-common"
60SUMMARY_libmtp-runtime = "mtp-probe, used for the MTP udev rules"
61DESCRIPTION_libmtp-runtime = "This package provides mtp-probe, a program to probe newly connected device interfaces from userspace to determine if they are MTP devices, used for udev rules."
diff --git a/meta-oe/recipes-connectivity/libnet/libnet_1.2-rc3.bb b/meta-oe/recipes-connectivity/libnet/libnet_1.2-rc3.bb
new file mode 100644
index 000000000..a558e8096
--- /dev/null
+++ b/meta-oe/recipes-connectivity/libnet/libnet_1.2-rc3.bb
@@ -0,0 +1,19 @@
1SUMMARY = "A packet dissection and creation library"
2# libnet at packetfactory.net is dead
3HOMEPAGE = "https://github.com/sam-github/libnet"
4SECTION = "libs"
5LICENSE = "BSD"
6LIC_FILES_CHKSUM = "file://doc/COPYING;md5=3ec839e00408b484d33b472a86b7c266"
7DEPENDS = "libpcap"
8# There are major API changes beween libnet v1.0 and libnet v1.1
9PROVIDES = "libnet-1.2rc2"
10
11SRC_URI = "${SOURCEFORGE_MIRROR}/libnet-dev/${BPN}-${PV}.tar.gz"
12
13SRC_URI[md5sum] = "f051e6e5bdecddb90f77c701c2ca1804"
14SRC_URI[sha256sum] = "72c380785ad44183005e654b47cc12485ee0228d7fa6b0a87109ff7614be4a63"
15
16S = "${WORKDIR}/${BPN}-${PV}"
17
18inherit autotools binconfig
19
diff --git a/meta-oe/recipes-connectivity/libtorrent/libtorrent/don-t-run-code-while-configuring-package.patch b/meta-oe/recipes-connectivity/libtorrent/libtorrent/don-t-run-code-while-configuring-package.patch
new file mode 100644
index 000000000..79d4f29fa
--- /dev/null
+++ b/meta-oe/recipes-connectivity/libtorrent/libtorrent/don-t-run-code-while-configuring-package.patch
@@ -0,0 +1,94 @@
1Using AC_RUN_IFELSE prevent people from configuring package for
2cross-compiling. Don't run code while configuring package.
3
4Upstream-Status: Pending
5Signed-off-by: Andrei Gherzan <andrei@gherzan.ro>
6
7Index: libtorrent-0.13.3/scripts/checks.m4
8===================================================================
9--- libtorrent-0.13.3.orig/scripts/checks.m4 2012-05-14 14:17:04.000000000 +0300
10+++ libtorrent-0.13.3/scripts/checks.m4 2013-02-10 15:28:37.414445524 +0200
11@@ -95,40 +95,6 @@
12
13 AC_DEFUN([TORRENT_CHECK_KQUEUE_SOCKET_ONLY], [
14 AC_MSG_CHECKING(whether kqueue supports pipes and ptys)
15-
16- AC_RUN_IFELSE([AC_LANG_SOURCE([
17- #include <fcntl.h>
18- #include <stdlib.h>
19- #include <unistd.h>
20- #include <sys/event.h>
21- #include <sys/time.h>
22- int main() {
23- struct kevent ev@<:@2@:>@, ev_out@<:@2@:>@;
24- struct timespec ts = { 0, 0 };
25- int pfd@<:@2@:>@, pty@<:@2@:>@, kfd, n;
26- char buffer@<:@9001@:>@;
27- if (pipe(pfd) == -1) return 1;
28- if (fcntl(pfd@<:@1@:>@, F_SETFL, O_NONBLOCK) == -1) return 2;
29- while ((n = write(pfd@<:@1@:>@, buffer, sizeof(buffer))) == sizeof(buffer));
30- if ((pty@<:@0@:>@=posix_openpt(O_RDWR | O_NOCTTY)) == -1) return 3;
31- if ((pty@<:@1@:>@=grantpt(pty@<:@0@:>@)) == -1) return 4;
32- EV_SET(ev+0, pfd@<:@1@:>@, EVFILT_WRITE, EV_ADD | EV_ENABLE, 0, 0, NULL);
33- EV_SET(ev+1, pty@<:@1@:>@, EVFILT_READ, EV_ADD | EV_ENABLE, 0, 0, NULL);
34- if ((kfd = kqueue()) == -1) return 5;
35- if ((n = kevent(kfd, ev, 2, NULL, 0, NULL)) == -1) return 6;
36- if (ev_out@<:@0@:>@.flags & EV_ERROR) return 7;
37- if (ev_out@<:@1@:>@.flags & EV_ERROR) return 8;
38- read(pfd@<:@0@:>@, buffer, sizeof(buffer));
39- if ((n = kevent(kfd, NULL, 0, ev_out, 2, &ts)) < 1) return 9;
40- return 0;
41- }
42- ])],
43- [
44- AC_MSG_RESULT(yes)
45- ], [
46- AC_DEFINE(KQUEUE_SOCKET_ONLY, 1, kqueue only supports sockets.)
47- AC_MSG_RESULT(no)
48- ])
49 ])
50
51 AC_DEFUN([TORRENT_WITH_KQUEUE], [
52Index: libtorrent-0.13.3/scripts/common.m4
53===================================================================
54--- libtorrent-0.13.3.orig/scripts/common.m4 2012-05-14 14:17:04.000000000 +0300
55+++ libtorrent-0.13.3/scripts/common.m4 2013-02-10 15:27:55.874446741 +0200
56@@ -222,38 +222,10 @@
57
58 AC_DEFUN([TORRENT_CHECK_EXECINFO], [
59 AC_MSG_CHECKING(for execinfo.h)
60-
61- AC_RUN_IFELSE([AC_LANG_SOURCE([
62- #include <execinfo.h>
63- int main() { backtrace((void**)0, 0); backtrace_symbols((char**)0, 0); return 0;}
64- ])],
65- [
66- AC_MSG_RESULT(yes)
67- AC_DEFINE(USE_EXECINFO, 1, Use execinfo.h)
68- ], [
69- AC_MSG_RESULT(no)
70- ])
71 ])
72
73 AC_DEFUN([TORRENT_CHECK_ALIGNED], [
74 AC_MSG_CHECKING(the byte alignment)
75-
76- AC_RUN_IFELSE([AC_LANG_SOURCE([
77- #include <inttypes.h>
78- int main() {
79- char buf@<:@8@:>@ = { 0, 0, 0, 0, 1, 0, 0, 0 };
80- int i;
81- for (i = 1; i < 4; ++i)
82- if (*(uint32_t*)(buf + i) == 0) return -1;
83- return 0;
84- }
85- ])],
86- [
87- AC_MSG_RESULT(none needed)
88- ], [
89- AC_DEFINE(USE_ALIGNED, 1, Require byte alignment)
90- AC_MSG_RESULT(required)
91- ])
92 ])
93
94
diff --git a/meta-oe/recipes-connectivity/libtorrent/libtorrent_0.13.3.bb b/meta-oe/recipes-connectivity/libtorrent/libtorrent_0.13.3.bb
new file mode 100644
index 000000000..4af3a6a6a
--- /dev/null
+++ b/meta-oe/recipes-connectivity/libtorrent/libtorrent_0.13.3.bb
@@ -0,0 +1,17 @@
1DESCRIPTION = "libTorrent is a BitTorrent library written in C++ for *nix, \
2with a focus on high performance and good code."
3HOMEPAGE = "http://libtorrent.rakshasa.no/"
4LICENSE = "GPL-2.0"
5LIC_FILES_CHKSUM = "file://COPYING;md5=393a5ca445f6965873eca0259a17f833"
6
7DEPENDS = "libsigc++-2.0 openssl cppunit"
8
9SRC_URI = "http://libtorrent.rakshasa.no/downloads/${BP}.tar.gz \
10 file://don-t-run-code-while-configuring-package.patch \
11"
12
13SRC_URI[md5sum] = "e94f6c590bb02aaf4d58618f738a85f2"
14SRC_URI[sha256sum] = "34317d6783b7f8d0805274c9467475b5432a246c0de8e28fc16e3b0b43f35677"
15
16inherit autotools pkgconfig
17
diff --git a/meta-oe/recipes-connectivity/linuxptp/linuxptp/build-Allow-CC-and-prefix-to-be-overriden.patch b/meta-oe/recipes-connectivity/linuxptp/linuxptp/build-Allow-CC-and-prefix-to-be-overriden.patch
new file mode 100644
index 000000000..b1d96ae5a
--- /dev/null
+++ b/meta-oe/recipes-connectivity/linuxptp/linuxptp/build-Allow-CC-and-prefix-to-be-overriden.patch
@@ -0,0 +1,37 @@
1From af485c638c61fa883212ea424e676fbf90bee594 Mon Sep 17 00:00:00 2001
2From: Otavio Salvador <otavio@ossystems.com.br>
3Date: Tue, 1 Jul 2014 17:37:31 -0300
4Subject: [PATCH] build: Allow CC and prefix to be overriden
5
6Upstream-Status: Pending
7
8Signed-off-by: Otavio Salvador <otavio@ossystems.com.br>
9---
10 makefile | 4 ++--
11 1 file changed, 2 insertions(+), 2 deletions(-)
12
13diff --git a/makefile b/makefile
14index 22e7d0d..809cc8f 100644
15--- a/makefile
16+++ b/makefile
17@@ -18,7 +18,7 @@
18 KBUILD_OUTPUT =
19
20 DEBUG =
21-CC = $(CROSS_COMPILE)gcc
22+CC ?= $(CROSS_COMPILE)gcc
23 VER = -DVER=$(version)
24 CFLAGS = -Wall $(VER) $(incdefs) $(DEBUG) $(EXTRA_CFLAGS)
25 LDLIBS = -lm -lrt $(EXTRA_LDFLAGS)
26@@ -35,7 +35,7 @@ incdefs := $(shell $(srcdir)/incdefs.sh)
27 version := $(shell $(srcdir)/version.sh $(srcdir))
28 VPATH = $(srcdir)
29
30-prefix = /usr/local
31+prefix ?= /usr/local
32 sbindir = $(prefix)/sbin
33 mandir = $(prefix)/man
34 man8dir = $(mandir)/man8
35--
361.7.10.4
37
diff --git a/meta-oe/recipes-connectivity/linuxptp/linuxptp_1.4.bb b/meta-oe/recipes-connectivity/linuxptp/linuxptp_1.4.bb
new file mode 100644
index 000000000..4ee0c8873
--- /dev/null
+++ b/meta-oe/recipes-connectivity/linuxptp/linuxptp_1.4.bb
@@ -0,0 +1,20 @@
1DESCRIPTION = "Precision Time Protocol (PTP) according to IEEE standard 1588 for Linux"
2LICENSE = "GPLv2"
3LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
4
5SRC_URI = "http://sourceforge.net/projects/linuxptp/files/v${PV}/linuxptp-${PV}.tgz \
6 file://build-Allow-CC-and-prefix-to-be-overriden.patch"
7
8SRC_URI[md5sum] = "a37ad2b2ef7d1ebc4d64a66d3fe55cdf"
9SRC_URI[sha256sum] = "6cfd5291fb7394cc9f25458927874a203971b66b76d1c9d6568e007d0cbd81f2"
10
11EXTRA_OEMAKE = "ARCH=${TARGET_ARCH} \
12 EXTRA_CFLAGS='-D_GNU_SOURCE -DHAVE_CLOCK_ADJTIME -DHAVE_ONESTEP_SYNC ${CFLAGS}'"
13
14do_install () {
15 install -d ${D}/${bindir}
16 install -p ${S}/ptp4l ${D}/${bindir}
17 install -p ${S}/pmc ${D}/${bindir}
18 install -p ${S}/phc2sys ${D}/${bindir}
19 install -p ${S}/hwstamp_ctl ${D}/${bindir}
20}
diff --git a/meta-oe/recipes-connectivity/loudmouth/loudmouth-1.4.3/04-use-pkg-config-for-gnutls.patch b/meta-oe/recipes-connectivity/loudmouth/loudmouth-1.4.3/04-use-pkg-config-for-gnutls.patch
new file mode 100644
index 000000000..20f388e89
--- /dev/null
+++ b/meta-oe/recipes-connectivity/loudmouth/loudmouth-1.4.3/04-use-pkg-config-for-gnutls.patch
@@ -0,0 +1,23 @@
1Description: use pkg-config to detect gnutls
2Debian: http://bugs.debian.org/529835
3Origin: http://groups.google.com/group/loudmouth-dev/browse_thread/thread/3f78255837048daf#
4
5--- a/configure.ac.orig 2009-08-16 20:29:36.000000000 +0200
6+++ b/configure.ac 2009-08-16 20:30:43.000000000 +0200
7@@ -146,10 +146,12 @@ AC_ARG_WITH(openssl-libs,
8 enable_ssl=no
9 if test "x$ac_ssl" = "xgnutls"; then
10 dnl Look for GnuTLS
11- AM_PATH_LM_LIBGNUTLS($GNUTLS_REQUIRED, have_libgnutls=yes, have_libgnutls=no)
12- if test "x$have_libgnutls" = "xyes"; then
13- CFLAGS="$CFLAGS $LIBGNUTLS_CFLAGS"
14- LIBS="$LIBS $LIBGNUTLS_LIBS"
15+ PKG_CHECK_MODULES(GNUTLS, gnutls >= $GNUTLS_REQUIRED, have_gnutls=yes, have_gnutls=no)
16+ if test "x$have_gnutls" = "xyes"; then
17+ AC_SUBST(ASYNCNS_CFLAGS)
18+ AC_SUBST(ASYNCNS_LIBS)
19+ CFLAGS="$CFLAGS $GNUTLS_CFLAGS"
20+ LIBS="$LIBS $GNUTLS_LIBS"
21 AC_DEFINE(HAVE_GNUTLS, 1, [whether to use GnuTSL support.])
22 enable_ssl=GnuTLS
23 else
diff --git a/meta-oe/recipes-connectivity/loudmouth/loudmouth-1.4.3/glib-2.32.patch b/meta-oe/recipes-connectivity/loudmouth/loudmouth-1.4.3/glib-2.32.patch
new file mode 100644
index 000000000..b6c9422e8
--- /dev/null
+++ b/meta-oe/recipes-connectivity/loudmouth/loudmouth-1.4.3/glib-2.32.patch
@@ -0,0 +1,10 @@
1--- loudmouth-1.4.3.orig/loudmouth/lm-error.c 2008-10-29 21:48:15.000000000 +0100
2+++ loudmouth-1.4.3/loudmouth/lm-error.c 2012-05-06 08:27:07.455739440 +0200
3@@ -19,7 +19,6 @@
4 */
5
6 #include <config.h>
7-#include <glib/gerror.h>
8 #include "lm-error.h"
9
10 /**
diff --git a/meta-oe/recipes-connectivity/loudmouth/loudmouth_1.4.3.bb b/meta-oe/recipes-connectivity/loudmouth/loudmouth_1.4.3.bb
new file mode 100644
index 000000000..e51241909
--- /dev/null
+++ b/meta-oe/recipes-connectivity/loudmouth/loudmouth_1.4.3.bb
@@ -0,0 +1,20 @@
1DESCRIPTION = "Loudmouth is a lightweight and easy-to-use C library for programming with the Jabber protocol."
2HOMEPAGE = "http://www.loudmouth-project.org/"
3
4LICENSE = "LGPLv2.1"
5LIC_FILES_CHKSUM = "file://COPYING;md5=c4f38aef94828f6b280e00d1173be689"
6
7DEPENDS = "glib-2.0 libcheck openssl libidn"
8
9inherit gnomebase gtk-doc
10
11PR = "r2"
12
13SRC_URI += "file://04-use-pkg-config-for-gnutls.patch \
14 file://glib-2.32.patch"
15
16SRC_URI[archive.md5sum] = "55339ca42494690c3942ee1465a96937"
17SRC_URI[archive.sha256sum] = "95a93f5d009b71ea8193d994aa11f311bc330a3efe1b7cd74dc48f11c7f929e3"
18
19EXTRA_OECONF = "--with-ssl=openssl"
20
diff --git a/meta-oe/recipes-connectivity/mosh/mosh_1.2.4.bb b/meta-oe/recipes-connectivity/mosh/mosh_1.2.4.bb
new file mode 100644
index 000000000..5a8e336b9
--- /dev/null
+++ b/meta-oe/recipes-connectivity/mosh/mosh_1.2.4.bb
@@ -0,0 +1,38 @@
1# NOTE: mosh-server requires a UTF-8 locale, but there's no way to add
2# an explicit dependency for this so you need to ensure this is in your
3# image yourself when you install mosh-server.
4
5SUMMARY = "Remote shell supporting roaming and high-latency connections"
6DESCRIPTION = "Remote terminal application that allows roaming, supports \
7intermittent connectivity, and provides intelligent local echo and line \
8editing of user keystrokes. Mosh is a replacement for SSH. It's more \
9robust and responsive, especially over Wi-Fi, cellular, and \
10long-distance links."
11HOMEPAGE = "http://mosh.mit.edu"
12LICENSE = "GPLv3+"
13LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
14
15DEPENDS = "protobuf-native protobuf ncurses zlib libio-pty-perl openssl"
16
17SRC_URI = "http://mosh.mit.edu/mosh-${PV}.tar.gz"
18
19SRC_URI[md5sum] = "c2d918f4d91fdc32546e2e089f9281b2"
20SRC_URI[sha256sum] = "e74d0d323226046e402dd469a176075fc2013b69b0e67cea49762c957175df46"
21
22inherit autotools
23
24PACKAGE_BEFORE_PN += "${PN}-server"
25FILES_${PN}-server = "${bindir}/mosh-server"
26
27NEEDED_PERL_MODULES = "\
28 perl-module-socket \
29 perl-module-getopt-long \
30 perl-module-errno \
31 perl-module-io-socket-inet \
32 perl-module-posix \
33"
34
35# mosh uses SSH to authenticate and the client uses OpenSSH-specific features
36RDEPENDS_${PN} += "openssh-ssh ${NEEDED_PERL_MODULES}"
37# The server seemed not to work with dropbear either
38RDEPENDS_${PN}-server += "openssh-sshd ${NEEDED_PERL_MODULES}"
diff --git a/meta-oe/recipes-connectivity/networkmanager/modemmanager_0.6.0.0.bb b/meta-oe/recipes-connectivity/networkmanager/modemmanager_0.6.0.0.bb
new file mode 100644
index 000000000..5ff18da05
--- /dev/null
+++ b/meta-oe/recipes-connectivity/networkmanager/modemmanager_0.6.0.0.bb
@@ -0,0 +1,29 @@
1DEPENDS = "ppp udev glib-2.0 dbus-glib"
2
3LICENSE = "GPLv2"
4LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
5
6
7inherit gnomebase gettext
8
9SRC_URI = "${GNOME_MIRROR}/ModemManager/${@gnome_verdir("${PV}")}/ModemManager-${PV}.tar.xz"
10
11SRC_URI[md5sum] = "f32640f6672d997ec0887307186e9639"
12SRC_URI[sha256sum] = "d4468300cf4aa7baf21c8564fa515e578056f34de5a64f452b053331f89e8ae2"
13
14S = "${WORKDIR}/ModemManager-${PV}"
15
16FILES_${PN}-dbg += "${libdir}/ModemManager/.debug \
17 ${libdir}/pppd/*/.debug"
18FILES_${PN}-dev += "${datadir}/dbus-1/interfaces \
19 ${libdir}/pppd/*/*.la \
20 ${libdir}/ModemManager/*.la"
21FILES_${PN}-staticdev += "\
22 ${libdir}/ModemManager/*.a \
23 ${libdir}/pppd/*/*.a"
24FILES_${PN} += "${datadir}/dbus-1/*services/ \
25 ${libdir}/ModemManager/*.so \
26 ${libdir}/pppd/*/*.so \
27 ${base_libdir}/udev"
28
29RRECOMMENDS_${PN} += "ppp"
diff --git a/meta-oe/recipes-connectivity/networkmanager/networkmanager/0001-configure.ac-Check-only-for-libsystemd-not-libsystem.patch b/meta-oe/recipes-connectivity/networkmanager/networkmanager/0001-configure.ac-Check-only-for-libsystemd-not-libsystem.patch
new file mode 100644
index 000000000..e356e9b25
--- /dev/null
+++ b/meta-oe/recipes-connectivity/networkmanager/networkmanager/0001-configure.ac-Check-only-for-libsystemd-not-libsystem.patch
@@ -0,0 +1,30 @@
1From 10427a19302e871daeeb245581e930dc58195492 Mon Sep 17 00:00:00 2001
2From: Martin Jansa <Martin.Jansa@gmail.com>
3Date: Thu, 27 Feb 2014 12:59:03 +0100
4Subject: [PATCH] configure.ac: Check only for libsystemd not libsystemd-login
5
6* they were merged into libsystemd in systemd-209
7
8Upstream-Status: Pending (it would need to be conditional on systemd version for upstream to accept this)
9
10Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
11---
12 configure.ac | 2 +-
13 1 file changed, 1 insertion(+), 1 deletion(-)
14
15diff --git a/configure.ac b/configure.ac
16index cc66e9b..3fe958e 100644
17--- a/configure.ac
18+++ b/configure.ac
19@@ -283,7 +283,7 @@ AS_IF([! (echo "$with_session_tracking" | grep -q -E "^(systemd|consolekit|no)$"
20 AM_CONDITIONAL(SESSION_TRACKING_CK, test "$with_session_tracking" = "consolekit")
21 AM_CONDITIONAL(SESSION_TRACKING_SYSTEMD, test "xwith_session_tracking" = "systemd")
22 if test "$with_session_tracking" = "systemd"; then
23- PKG_CHECK_MODULES(SYSTEMD_LOGIN, [libsystemd-login])
24+ PKG_CHECK_MODULES(SYSTEMD_LOGIN, [libsystemd])
25 AC_SUBST(SYSTEMD_LOGIN_CFLAGS)
26 AC_SUBST(SYSTEMD_LOGIN_LIBS)
27 fi
28--
291.9.0
30
diff --git a/meta-oe/recipes-connectivity/networkmanager/networkmanager/0001-don-t-try-to-run-sbin-dhclient-to-get-the-version-nu.patch b/meta-oe/recipes-connectivity/networkmanager/networkmanager/0001-don-t-try-to-run-sbin-dhclient-to-get-the-version-nu.patch
new file mode 100644
index 000000000..5c9ed92b8
--- /dev/null
+++ b/meta-oe/recipes-connectivity/networkmanager/networkmanager/0001-don-t-try-to-run-sbin-dhclient-to-get-the-version-nu.patch
@@ -0,0 +1,36 @@
1From 7dd40db6606c3b3559365a03944cb99aee5ceabc Mon Sep 17 00:00:00 2001
2From: =?UTF-8?q?Andreas=20M=C3=BCller?= <schnitzeltony@googlemail.com>
3Date: Thu, 4 Apr 2013 12:57:58 +0200
4Subject: [PATCH] don't try to run /sbin/dhclient to get the version number,
5 this break cross-compiling
6MIME-Version: 1.0
7Content-Type: text/plain; charset=UTF-8
8Content-Transfer-Encoding: 8bit
9
10Upstream-Status: Inappropriate [build system specific]
11
12Signed-off-by: Andreas Müller <schnitzeltony@googlemail.com>
13---
14 configure.ac | 6 ------
15 1 files changed, 0 insertions(+), 6 deletions(-)
16
17diff --git a/configure.ac b/configure.ac
18index cc66e9b..7163287 100644
19--- a/configure.ac
20+++ b/configure.ac
21@@ -488,12 +488,6 @@ AS_IF([test -z "$with_dhcpcd"], with_dhcpcd=yes)
22 # Search and check the executables
23 if test "$with_dhclient" = "yes"; then
24 AC_PATH_PROGS(with_dhclient, dhclient, no, /sbin:/usr/sbin:/usr/local/sbin)
25- if test "$with_dhclient" != "no"; then
26- if ! $with_dhclient --version 2>&1 | grep -q "^isc-dhclient-4\."; then
27- AC_MSG_WARN([Cannot use dhclient, version 4.x is required])
28- with_dhclient=no
29- fi
30- fi
31 fi
32 if test "$with_dhcpcd" = "yes"; then
33 AC_PATH_PROGS(with_dhcpcd, dhcpcd, no, /sbin:/usr/sbin:/usr/local/sbin)
34--
351.7.6.5
36
diff --git a/meta-oe/recipes-connectivity/networkmanager/networkmanager_0.9.8.10.bb b/meta-oe/recipes-connectivity/networkmanager/networkmanager_0.9.8.10.bb
new file mode 100644
index 000000000..8d37c980e
--- /dev/null
+++ b/meta-oe/recipes-connectivity/networkmanager/networkmanager_0.9.8.10.bb
@@ -0,0 +1,111 @@
1SUMMARY = "NetworkManager"
2SECTION = "net/misc"
3
4LICENSE = "GPLv2+"
5LIC_FILES_CHKSUM = "file://COPYING;md5=cbbffd568227ada506640fe950a4823b"
6
7DEPENDS = "libnl dbus dbus-glib udev wireless-tools nss util-linux ppp"
8
9inherit gnome gettext systemd
10
11SRC_URI = " \
12 ${GNOME_MIRROR}/NetworkManager/${@gnome_verdir("${PV}")}/NetworkManager-${PV}.tar.xz \
13 file://0001-don-t-try-to-run-sbin-dhclient-to-get-the-version-nu.patch \
14 file://0001-configure.ac-Check-only-for-libsystemd-not-libsystem.patch \
15"
16SRC_URI[md5sum] = "aad2558887e25417c52eb2deaade2f85"
17SRC_URI[sha256sum] = "064d27223d3824859df12e1fb25b787fec1c68bbc864dc52a0289b9211c4c972"
18
19
20S = "${WORKDIR}/NetworkManager-${PV}"
21
22EXTRA_OECONF = " \
23 --enable-ifupdown \
24 --disable-ifcfg-rh \
25 --disable-ifnet \
26 --disable-ifcfg-suse \
27 --with-netconfig \
28 --with-crypto=nss \
29 --disable-more-warnings \
30 --with-dhclient=${base_sbindir}/dhclient \
31 --with-iptables=${sbindir}/iptables \
32 --with-tests \
33 --with-dnsmasq=${bindir}/dnsmasq \
34"
35
36PACKAGECONFIG ??= "${@base_contains('DISTRO_FEATURES','systemd','systemd','consolekit',d)}"
37PACKAGECONFIG[systemd] = " \
38 --with-systemdsystemunitdir=${systemd_unitdir}/system --with-session-tracking=systemd --enable-polkit, \
39 --without-systemdsystemunitdir, \
40 polkit \
41"
42# consolekit is not picked by shlibs, so add it to RDEPENDS too
43PACKAGECONFIG[consolekit] = "--with-session-tracking=consolekit,,consolekit,consolekit"
44PACKAGECONFIG[concheck] = "--enable-concheck,--disable-concheck,libsoup-2.4"
45
46# Work around dbus permission problems since we lack a proper at_console
47do_install_prepend() {
48 sed -i 's:deny send_destination:allow send_destination:g' ${S}/src/org.freedesktop.NetworkManager.conf
49 sed -i 's:deny send_destination:allow send_destination:g' ${S}/callouts/nm-dispatcher.conf
50 sed -i 's:deny send_destination:allow send_destination:g' ${S}/callouts/nm-dhcp-client.conf
51 sed -i 's:deny send_destination:allow send_destination:g' ${S}/callouts/nm-avahi-autoipd.conf
52}
53
54do_install_append () {
55 install -d ${D}${sysconfdir}/dbus-1/event.d
56 # Additional test binaries
57 install -d ${D}${bindir}
58 install -m 0755 ${B}/test/.libs/libnm* ${D}${bindir}
59
60 # Install an empty VPN folder as nm-connection-editor will happily segfault without it :o.
61 # With or without VPN support built in ;).
62 install -d ${D}${sysconfdir}/NetworkManager/VPN
63
64 rm -rf "${D}${localstatedir}/run"
65 rmdir --ignore-fail-on-non-empty "${D}${localstatedir}"
66}
67
68PACKAGES =+ "libnmutil libnmglib libnmglib-vpn ${PN}-tests ${PN}-bash-completion"
69
70FILES_libnmutil += "${libdir}/libnm-util.so.*"
71FILES_libnmglib += "${libdir}/libnm-glib.so.*"
72FILES_libnmglib-vpn += "${libdir}/libnm-glib-vpn.so.*"
73
74FILES_${PN} += " \
75 ${libexecdir} \
76 ${libdir}/pppd/*/nm-pppd-plugin.so \
77 ${libdir}/NetworkManager/*.so \
78 ${datadir}/polkit-1 \
79 ${datadir}/dbus-1 \
80 ${base_libdir}/udev/* \
81 ${systemd_unitdir}/system/NetworkManager-wait-online.service \
82"
83
84RRECOMMENDS_${PN} += "iptables dnsmasq"
85RCONFLICTS_${PN} = "connman"
86RDEPENDS_${PN} = " \
87 wpa-supplicant \
88 dhcp-client \
89 ${@base_contains('COMBINED_FEATURES', '3gmodem', 'ppp', '', d)} \
90"
91
92FILES_${PN}-dbg += " \
93 ${libdir}/NetworkManager/.debug/ \
94 ${libdir}/pppd/*/.debug/ \
95"
96
97FILES_${PN}-dev += " \
98 ${datadir}/NetworkManager/gdb-cmd \
99 ${libdir}/pppd/*/*.la \
100 ${libdir}/NetworkManager/*.la \
101"
102
103FILES_${PN}-tests = " \
104 ${bindir}/nm-tool \
105 ${bindir}/libnm-glib-test \
106 ${bindir}/nm-online \
107"
108
109FILES_${PN}-bash-completion = "${datadir}/bash-completion"
110
111SYSTEMD_SERVICE_${PN} = "NetworkManager.service"
diff --git a/meta-oe/recipes-connectivity/obex/obex-data-server_0.4.6.bb b/meta-oe/recipes-connectivity/obex/obex-data-server_0.4.6.bb
new file mode 100644
index 000000000..afe610689
--- /dev/null
+++ b/meta-oe/recipes-connectivity/obex/obex-data-server_0.4.6.bb
@@ -0,0 +1,15 @@
1DESCRIPTION = "obex-data-server is a D-Bus service providing high-level OBEX client and server side functionality"
2LICENSE = "GPLv2"
3LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
4
5DEPENDS = "gtk+ bluez4 dbus-glib imagemagick openobex"
6RCONFLICTS_${PN} = "bluez5"
7
8SRC_URI = "http://tadas.dailyda.com/software/obex-data-server-${PV}.tar.gz"
9SRC_URI[md5sum] = "961ca5db6fe9c97024e133cc6203cc4d"
10SRC_URI[sha256sum] = "b399465ddbd6d0217abedd9411d9d74a820effa0a6a142adc448268d3920094f"
11
12inherit autotools-brokensep pkgconfig
13
14FILES_${PN} += "${datadir}/dbus-1/"
15
diff --git a/meta-oe/recipes-connectivity/obex/openobex-1.5/disable-cable-test.patch b/meta-oe/recipes-connectivity/obex/openobex-1.5/disable-cable-test.patch
new file mode 100644
index 000000000..95b636dd8
--- /dev/null
+++ b/meta-oe/recipes-connectivity/obex/openobex-1.5/disable-cable-test.patch
@@ -0,0 +1,16 @@
1
2#
3# Patch managed by http://www.holgerschurig.de/patcher.html
4#
5
6--- openobex-1.2/apps/Makefile.am~disable-cable-test
7+++ openobex-1.2/apps/Makefile.am
8@@ -6,7 +6,7 @@
9 obex_io.c obex_io.h \
10 obex_put_common.c obex_put_common.h
11
12-bin_PROGRAMS = irxfer obex_tcp irobex_palm3 obex_test
13+bin_PROGRAMS = irxfer obex_tcp irobex_palm3
14
15 obex_test_SOURCES = \
16 obex_test.c obex_test.h \
diff --git a/meta-oe/recipes-connectivity/obex/openobex-1.5/libusb_crosscompile_check.patch b/meta-oe/recipes-connectivity/obex/openobex-1.5/libusb_crosscompile_check.patch
new file mode 100644
index 000000000..1177dfeac
--- /dev/null
+++ b/meta-oe/recipes-connectivity/obex/openobex-1.5/libusb_crosscompile_check.patch
@@ -0,0 +1,11 @@
1--- /tmp/acinclude.m4 2009-04-12 10:32:15.000000000 +0200
2+++ openobex-1.5/acinclude.m4 2009-04-12 10:32:38.000000000 +0200
3@@ -158,7 +158,7 @@
4 ;;
5 *)
6 PKG_CHECK_MODULES(USB, libusb, usb_lib_found=yes, AC_MSG_RESULT(no))
7- AC_CHECK_FILE(${prefix}/lib/pkgconfig/libusb.pc, REQUIRES="libusb")
8+ REQUIRES="libusb"
9 ;;
10 esac
11 AC_SUBST(USB_CFLAGS)
diff --git a/meta-oe/recipes-connectivity/obex/openobex-1.5/separate_builddir.patch b/meta-oe/recipes-connectivity/obex/openobex-1.5/separate_builddir.patch
new file mode 100644
index 000000000..8abf8ae24
--- /dev/null
+++ b/meta-oe/recipes-connectivity/obex/openobex-1.5/separate_builddir.patch
@@ -0,0 +1,16 @@
1Fix detection of IrDA failing with B!=S
2
3Upstream-Status: Pending
4
5Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
6
7--- a/acinclude.m4 2014-05-24 14:05:41.757796816 +0100
8+++ b/acinclude.m4 2014-05-24 14:03:06.556795536 +0100
9@@ -54,6 +54,7 @@
10 ])
11
12 AC_DEFUN([AC_PATH_IRDA_LINUX], [
13+ CPPFLAGS="${CPPFLAGS} -I${srcdir}"
14 AC_CACHE_CHECK([for IrDA support], irda_found, [
15 AC_TRY_COMPILE([
16 #include <sys/socket.h>
diff --git a/meta-oe/recipes-connectivity/obex/openobex_1.5.bb b/meta-oe/recipes-connectivity/obex/openobex_1.5.bb
new file mode 100644
index 000000000..b07779dd5
--- /dev/null
+++ b/meta-oe/recipes-connectivity/obex/openobex_1.5.bb
@@ -0,0 +1,33 @@
1DESCRIPTION = "The Openobex project is an open source implementation of the \
2Object Exchange (OBEX) protocol."
3HOMEPAGE = "http://openobex.triq.net"
4SECTION = "libs"
5DEPENDS = "virtual/libusb0 bluez4"
6LICENSE = "GPLv2 & LGPLv2.1"
7LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a \
8 file://COPYING.LIB;md5=a6f89e2100d9b6cdffcea4f398e37343 \
9"
10
11SRC_URI = "http://www.kernel.org/pub/linux/bluetooth/openobex-${PV}.tar.gz \
12 file://disable-cable-test.patch \
13 file://libusb_crosscompile_check.patch \
14 file://separate_builddir.patch"
15
16SRC_URI[md5sum] = "0d83dc86445a46a1b9750107ba7ab65c"
17SRC_URI[sha256sum] = "e602047570799a47ecb028420bda8f2cef41310e5a99d084de10aa9422935e65"
18
19inherit autotools binconfig pkgconfig
20
21EXTRA_OECONF = "--enable-apps --enable-syslog"
22
23do_install_append() {
24 install -d ${D}${datadir}/aclocal
25 install -m 0644 ${S}/openobex.m4 ${D}${datadir}/aclocal
26}
27
28PACKAGES += "openobex-apps"
29FILES_${PN} = "${libdir}/lib*.so.*"
30FILES_${PN}-dev += "${bindir}/openobex-config"
31FILES_${PN}-apps = "${bindir}/*"
32DEBIAN_NOAUTONAME_${PN}-apps = "1"
33
diff --git a/meta-oe/recipes-connectivity/obexftp/obexftp/Remove_some_printf_in_obexftpd.patch b/meta-oe/recipes-connectivity/obexftp/obexftp/Remove_some_printf_in_obexftpd.patch
new file mode 100644
index 000000000..8b73af32e
--- /dev/null
+++ b/meta-oe/recipes-connectivity/obexftp/obexftp/Remove_some_printf_in_obexftpd.patch
@@ -0,0 +1,34 @@
1Signed-off-by: Jun Zhu <R01007@freescale.com>
2
3Upstream-Status: Not applicable
4
5diff -Nru obexftp-0.23/apps.orig/obexftpd.c obexftp-0.23/apps/obexftpd.c
6--- obexftp-0.23/apps.orig/obexftpd.c 2013-11-08 10:42:20.816631583 +0800
7+++ obexftp-0.23/apps/obexftpd.c 2013-11-08 10:44:18.628634893 +0800
8@@ -639,7 +639,7 @@
9 struct stat statbuf;
10 //char *namebuf = NULL;
11
12- fprintf(stderr, "put_done>>>\n");
13+ //fprintf(stderr, "put_done>>>\n");
14 while(OBEX_ObjectGetNextHeader(handle, object, &hi, &hv, &hlen)) {
15 switch(hi) {
16 case OBEX_HDR_BODY:
17@@ -671,7 +671,7 @@
18 }
19 }
20 if(!body) {
21- printf("Got a PUT without a body\n");
22+ //printf("Got a PUT without a body\n");
23 OBEX_ObjectSetRsp(object, OBEX_RSP_CONTINUE, OBEX_RSP_SUCCESS);
24 }
25 if(!name) {
26@@ -807,7 +807,7 @@
27
28 switch(obex_cmd) {
29 case OBEX_CMD_PUT:
30- fprintf(stderr, "obex_ev_progress: obex_cmd_put\n");
31+ //fprintf(stderr, "obex_ev_progress: obex_cmd_put\n");
32 put_done(handle, obj, 0);
33 break;
34 default:
diff --git a/meta-oe/recipes-connectivity/obexftp/obexftp_0.23.bb b/meta-oe/recipes-connectivity/obexftp/obexftp_0.23.bb
new file mode 100644
index 000000000..b6bdfb7e4
--- /dev/null
+++ b/meta-oe/recipes-connectivity/obexftp/obexftp_0.23.bb
@@ -0,0 +1,16 @@
1DESCRIPTION = "A tool for transfer files to/from any OBEX enabled device"
2LICENSE = "GPLv2 & LGPLv2 & LGPLv2.1"
3LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552"
4
5DEPENDS += "openobex"
6
7SRC_URI = "http://sourceforge.net/projects/openobex/files/obexftp/${PV}/obexftp-${PV}.tar.bz2 \
8 file://Remove_some_printf_in_obexftpd.patch "
9
10SRC_URI[md5sum] = "f20762061b68bc921e80be4aebc349eb"
11SRC_URI[sha256sum] = "44a74ff288d38c0f75354d6bc2efe7d6dec10112eaff2e7b10e292b0d2105b36"
12
13inherit autotools pkgconfig
14
15EXTRA_OECONF += "--disable-tcl --disable-perl --disable-python --disable-ruby"
16
diff --git a/meta-oe/recipes-connectivity/phonet-utils/phonet-utils_git.bb b/meta-oe/recipes-connectivity/phonet-utils/phonet-utils_git.bb
new file mode 100644
index 000000000..558996391
--- /dev/null
+++ b/meta-oe/recipes-connectivity/phonet-utils/phonet-utils_git.bb
@@ -0,0 +1,13 @@
1SUMMARY = "This small package provides a few command line tools for Linux Phonet"
2HOMEPAGE = ""
3LICENSE = "GPLv2"
4LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
5SRC_URI = "git://gitorious.org/meego-cellular/phonet-utils.git;branch=master"
6PR = "r2"
7S = "${WORKDIR}/git"
8SRCREV = "4acfa720fd37d178a048fc2be17180137d4a70ea"
9PV = "0.0.0+gitr${SRCPV}"
10
11inherit autotools-brokensep
12
13FILES_${PN} += "${base_libdir}/udev/rules.d/85-phonet-utils.rules"
diff --git a/meta-oe/recipes-connectivity/rabbitmq-c/rabbitmq-c_0.5.0.bb b/meta-oe/recipes-connectivity/rabbitmq-c/rabbitmq-c_0.5.0.bb
new file mode 100644
index 000000000..91acb1d3c
--- /dev/null
+++ b/meta-oe/recipes-connectivity/rabbitmq-c/rabbitmq-c_0.5.0.bb
@@ -0,0 +1,17 @@
1DESCRIPTION = "A C-language AMQP client library for use with v2.0+ of the RabbitMQ broker"
2HOMEPAGE = "https://github.com/alanxz/rabbitmq-c"
3LIC_FILES_CHKSUM = "file://LICENSE-MIT;md5=6b7424f9db80cfb11fdd5c980b583f53"
4LICENSE = "MIT"
5
6SRC_URI = "https://github.com/alanxz/${BPN}/releases/download/v${PV}/${BP}.tar.gz"
7SRC_URI[md5sum] = "b1f902c658c772cda464754678d8deb6"
8SRC_URI[sha256sum] = "53702ea2ab809af0f923e387458e2cad191d9549f50410035fe82ce5e6ccc4fa"
9
10DEPENDS = "popt openssl"
11
12EXTRA_OECONF = "--disable-examples --enable-tools --disable-docs"
13
14inherit autotools pkgconfig
15
16PACKAGE_BEFORE_PN += "${PN}-tools"
17FILES_${PN}-tools = "${bindir}"
diff --git a/meta-oe/recipes-connectivity/rfkill/rfkill/0001-rfkill-makefile-don-t-use-t-the-OE-install-wrapper-d.patch b/meta-oe/recipes-connectivity/rfkill/rfkill/0001-rfkill-makefile-don-t-use-t-the-OE-install-wrapper-d.patch
new file mode 100644
index 000000000..c6f60d0ed
--- /dev/null
+++ b/meta-oe/recipes-connectivity/rfkill/rfkill/0001-rfkill-makefile-don-t-use-t-the-OE-install-wrapper-d.patch
@@ -0,0 +1,30 @@
1From db764080e54f8f998c28ef8dab78da8b8d1d1420 Mon Sep 17 00:00:00 2001
2From: Koen Kooi <koen@dominion.thruhere.net>
3Date: Fri, 11 Feb 2011 10:35:40 +0100
4Subject: [PATCH] rfkill makefile: don't use -t, the OE install wrapper dislikes it
5
6Signed-off-by: Koen Kooi <koen@dominion.thruhere.net>
7---
8 Makefile | 4 ++--
9 1 files changed, 2 insertions(+), 2 deletions(-)
10
11diff --git a/Makefile b/Makefile
12index 8d6c700..3652690 100644
13--- a/Makefile
14+++ b/Makefile
15@@ -53,10 +53,10 @@ check:
16 install: rfkill rfkill.8.gz
17 @$(NQ) ' INST rfkill'
18 $(Q)$(MKDIR) $(DESTDIR)$(SBINDIR)
19- $(Q)$(INSTALL) -m 755 -t $(DESTDIR)$(SBINDIR) rfkill
20+ $(Q)$(INSTALL) -m 755 rfkill $(DESTDIR)$(SBINDIR)
21 @$(NQ) ' INST rfkill.8'
22 $(Q)$(MKDIR) $(DESTDIR)$(MANDIR)/man8/
23- $(Q)$(INSTALL) -m 644 -t $(DESTDIR)$(MANDIR)/man8/ rfkill.8.gz
24+ $(Q)$(INSTALL) -m 644 rfkill.8.gz $(DESTDIR)$(MANDIR)/man8/
25
26 clean:
27 $(Q)rm -f rfkill *.o *~ *.gz version.c *-stamp
28--
291.6.6.1
30
diff --git a/meta-oe/recipes-connectivity/rfkill/rfkill/dont.call.git.rev-parse.on.parent.dir.patch b/meta-oe/recipes-connectivity/rfkill/rfkill/dont.call.git.rev-parse.on.parent.dir.patch
new file mode 100644
index 000000000..2b8368912
--- /dev/null
+++ b/meta-oe/recipes-connectivity/rfkill/rfkill/dont.call.git.rev-parse.on.parent.dir.patch
@@ -0,0 +1,31 @@
1When WORKDIR is included in some other git checkout, version.sh calls git rev-parse
2and it returns some description from that upper git checkout even when rfkill is
3being built from release tarball.
4
5When returned description doesn't match with expected v0.4, version.sh exits
6without creating version.c
7 # on git builds check that the version number above
8 # is correct...
9 [ "${descr%%-*}" = "v$VERSION" ] || exit 2
10
11and build fails a bit later:
12 | NOTE: make -j 32 -e MAKEFLAGS=
13 | CC rfkill.o
14 | GEN version.c
15 | make: *** [version.c] Error 2
16 | make: *** Waiting for unfinished jobs....
17 | ERROR: oe_runmake failed
18
19Don't try git rev-parse, if there isn't .git in ${S}.
20
21--- a/version.sh 2013-11-15 03:43:12.587744366 -0800
22+++ b/version.sh 2013-11-15 03:42:40.699743320 -0800
23@@ -12,7 +12,7 @@
24
25 if test "x$SUFFIX" != 'x'; then
26 v="$VERSION$SUFFIX"
27-elif head=`git rev-parse --verify HEAD 2>/dev/null`; then
28+elif test -d .git && head=`git rev-parse --verify HEAD 2>/dev/null`; then
29 git update-index --refresh --unmerged > /dev/null
30 descr=$(git describe 2>/dev/null || echo "v$VERSION")
31
diff --git a/meta-oe/recipes-connectivity/rfkill/rfkill_0.4.bb b/meta-oe/recipes-connectivity/rfkill/rfkill_0.4.bb
new file mode 100644
index 000000000..3b2943c99
--- /dev/null
+++ b/meta-oe/recipes-connectivity/rfkill/rfkill_0.4.bb
@@ -0,0 +1,27 @@
1SUMMARY = "rfkill CLI utility"
2HOMEPAGE = "http://linuxwireless.org/en/users/Documentation/rfkill"
3SECTION = "base"
4LICENSE = "BSD"
5LIC_FILES_CHKSUM = "file://COPYING;md5=c6036d0eb7edbfced28c4160e5d3fa94"
6PR = "r1"
7
8SRC_URI = "http://www.kernel.org/pub/software/network/${BPN}/${BP}.tar.bz2 \
9 file://0001-rfkill-makefile-don-t-use-t-the-OE-install-wrapper-d.patch \
10 file://dont.call.git.rev-parse.on.parent.dir.patch"
11
12SRC_URI[md5sum] = "727892c0fb35c80ee3849fbe89b45350"
13SRC_URI[sha256sum] = "ca10e4827a5f0a36e093aee6ad81b5febf81f8097d7d858889ac51ff364168c1"
14
15do_compile() {
16 oe_runmake
17}
18do_install() {
19 oe_runmake DESTDIR=${D} install
20}
21
22inherit update-alternatives
23
24ALTERNATIVE_${PN} = "rfkill"
25ALTERNATIVE_PRIORITY = "60"
26ALTERNATIVE_LINK_NAME[rfkill] = "${sbindir}/rfkill"
27
diff --git a/meta-oe/recipes-connectivity/rtorrent/rtorrent/don-t-run-code-while-configuring-package.patch b/meta-oe/recipes-connectivity/rtorrent/rtorrent/don-t-run-code-while-configuring-package.patch
new file mode 100644
index 000000000..79d4f29fa
--- /dev/null
+++ b/meta-oe/recipes-connectivity/rtorrent/rtorrent/don-t-run-code-while-configuring-package.patch
@@ -0,0 +1,94 @@
1Using AC_RUN_IFELSE prevent people from configuring package for
2cross-compiling. Don't run code while configuring package.
3
4Upstream-Status: Pending
5Signed-off-by: Andrei Gherzan <andrei@gherzan.ro>
6
7Index: libtorrent-0.13.3/scripts/checks.m4
8===================================================================
9--- libtorrent-0.13.3.orig/scripts/checks.m4 2012-05-14 14:17:04.000000000 +0300
10+++ libtorrent-0.13.3/scripts/checks.m4 2013-02-10 15:28:37.414445524 +0200
11@@ -95,40 +95,6 @@
12
13 AC_DEFUN([TORRENT_CHECK_KQUEUE_SOCKET_ONLY], [
14 AC_MSG_CHECKING(whether kqueue supports pipes and ptys)
15-
16- AC_RUN_IFELSE([AC_LANG_SOURCE([
17- #include <fcntl.h>
18- #include <stdlib.h>
19- #include <unistd.h>
20- #include <sys/event.h>
21- #include <sys/time.h>
22- int main() {
23- struct kevent ev@<:@2@:>@, ev_out@<:@2@:>@;
24- struct timespec ts = { 0, 0 };
25- int pfd@<:@2@:>@, pty@<:@2@:>@, kfd, n;
26- char buffer@<:@9001@:>@;
27- if (pipe(pfd) == -1) return 1;
28- if (fcntl(pfd@<:@1@:>@, F_SETFL, O_NONBLOCK) == -1) return 2;
29- while ((n = write(pfd@<:@1@:>@, buffer, sizeof(buffer))) == sizeof(buffer));
30- if ((pty@<:@0@:>@=posix_openpt(O_RDWR | O_NOCTTY)) == -1) return 3;
31- if ((pty@<:@1@:>@=grantpt(pty@<:@0@:>@)) == -1) return 4;
32- EV_SET(ev+0, pfd@<:@1@:>@, EVFILT_WRITE, EV_ADD | EV_ENABLE, 0, 0, NULL);
33- EV_SET(ev+1, pty@<:@1@:>@, EVFILT_READ, EV_ADD | EV_ENABLE, 0, 0, NULL);
34- if ((kfd = kqueue()) == -1) return 5;
35- if ((n = kevent(kfd, ev, 2, NULL, 0, NULL)) == -1) return 6;
36- if (ev_out@<:@0@:>@.flags & EV_ERROR) return 7;
37- if (ev_out@<:@1@:>@.flags & EV_ERROR) return 8;
38- read(pfd@<:@0@:>@, buffer, sizeof(buffer));
39- if ((n = kevent(kfd, NULL, 0, ev_out, 2, &ts)) < 1) return 9;
40- return 0;
41- }
42- ])],
43- [
44- AC_MSG_RESULT(yes)
45- ], [
46- AC_DEFINE(KQUEUE_SOCKET_ONLY, 1, kqueue only supports sockets.)
47- AC_MSG_RESULT(no)
48- ])
49 ])
50
51 AC_DEFUN([TORRENT_WITH_KQUEUE], [
52Index: libtorrent-0.13.3/scripts/common.m4
53===================================================================
54--- libtorrent-0.13.3.orig/scripts/common.m4 2012-05-14 14:17:04.000000000 +0300
55+++ libtorrent-0.13.3/scripts/common.m4 2013-02-10 15:27:55.874446741 +0200
56@@ -222,38 +222,10 @@
57
58 AC_DEFUN([TORRENT_CHECK_EXECINFO], [
59 AC_MSG_CHECKING(for execinfo.h)
60-
61- AC_RUN_IFELSE([AC_LANG_SOURCE([
62- #include <execinfo.h>
63- int main() { backtrace((void**)0, 0); backtrace_symbols((char**)0, 0); return 0;}
64- ])],
65- [
66- AC_MSG_RESULT(yes)
67- AC_DEFINE(USE_EXECINFO, 1, Use execinfo.h)
68- ], [
69- AC_MSG_RESULT(no)
70- ])
71 ])
72
73 AC_DEFUN([TORRENT_CHECK_ALIGNED], [
74 AC_MSG_CHECKING(the byte alignment)
75-
76- AC_RUN_IFELSE([AC_LANG_SOURCE([
77- #include <inttypes.h>
78- int main() {
79- char buf@<:@8@:>@ = { 0, 0, 0, 0, 1, 0, 0, 0 };
80- int i;
81- for (i = 1; i < 4; ++i)
82- if (*(uint32_t*)(buf + i) == 0) return -1;
83- return 0;
84- }
85- ])],
86- [
87- AC_MSG_RESULT(none needed)
88- ], [
89- AC_DEFINE(USE_ALIGNED, 1, Require byte alignment)
90- AC_MSG_RESULT(required)
91- ])
92 ])
93
94
diff --git a/meta-oe/recipes-connectivity/rtorrent/rtorrent_0.9.3.bb b/meta-oe/recipes-connectivity/rtorrent/rtorrent_0.9.3.bb
new file mode 100644
index 000000000..eb0a39af5
--- /dev/null
+++ b/meta-oe/recipes-connectivity/rtorrent/rtorrent_0.9.3.bb
@@ -0,0 +1,15 @@
1SUMMARY = "Torrent client"
2HOMEPAGE = "http://libtorrent.rakshasa.no/"
3LICENSE = "GPL-2.0"
4LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
5
6DEPENDS = "libsigc++-2.0 curl cppunit libtorrent ncurses"
7
8SRC_URI = "http://libtorrent.rakshasa.no/downloads/${BP}.tar.gz \
9 file://don-t-run-code-while-configuring-package.patch \
10"
11
12SRC_URI[md5sum] = "0bf2f262faa8c8c8d3b11ce286ea2bf2"
13SRC_URI[sha256sum] = "9e93ca41beb1afe74ad7ad8013e0d53ae3586c9b0e97263d722f721535cc7310"
14
15inherit autotools pkgconfig
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_221618_precise-64bit-prototype.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_221618_precise-64bit-prototype.patch
new file mode 100644
index 000000000..31108f2e8
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_221618_precise-64bit-prototype.patch
@@ -0,0 +1,20 @@
1Description: 64 bit fix for libsmbclient
2Author: Christian Perrier <bubulle@debian.org>
3Bug-Debian: http://bugs.debian.org/221618
4Forwarded: http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=221618#27
5
6Index: samba/source3/include/libsmbclient.h
7===================================================================
8--- samba.orig/source3/include/libsmbclient.h
9+++ samba/source3/include/libsmbclient.h
10@@ -79,6 +79,10 @@
11 #include <fcntl.h>
12 #include <utime.h>
13
14+ /* Debian bug #221618 */
15+#define _LARGEFILE64_SOURCE
16+#define _FILE_OFFSET_BITS 64
17+
18 #define SMBC_BASE_FD 10000 /* smallest file descriptor returned */
19
20 #define SMBC_WORKGROUP 1
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_387266_upstream_4104_mention-kerberos-in-smbspool-manpage.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_387266_upstream_4104_mention-kerberos-in-smbspool-manpage.patch
new file mode 100644
index 000000000..d9cc633d4
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_387266_upstream_4104_mention-kerberos-in-smbspool-manpage.patch
@@ -0,0 +1,34 @@
1Description: Add mention about some user for user information in smbspool manpage
2Author: Christian Perrier <bubulle@debian.org>
3Bug-Debian: http://bugs.debian.org/387266
4Forwarded: yes
5Bug: https://bugzilla.samba.org/show_bug.cgi?id=4104
6
7Index: samba/docs-xml/manpages-3/smbspool.8.xml
8===================================================================
9--- samba.orig/docs-xml/manpages-3/smbspool.8.xml
10+++ samba/docs-xml/manpages-3/smbspool.8.xml
11@@ -73,7 +73,9 @@
12 </para></listitem>
13
14 <listitem><para>The user argument (argv[2]) contains the
15- print user's name and is presently not used by smbspool.
16+ print user's name and is presently not used by smbspool
17+ except in Kerberos environments to access the user's
18+ ticket cache.
19 </para></listitem>
20
21 <listitem><para>The title argument (argv[3]) contains the
22Index: samba/docs/manpages/smbspool.8
23===================================================================
24--- samba.orig/docs/manpages/smbspool.8
25+++ samba/docs/manpages/smbspool.8
26@@ -114,7 +114,7 @@
27 .sp -1
28 .IP \(bu 2.3
29 .\}
30-The user argument (argv[2]) contains the print user\*(Aqs name and is presently not used by smbspool\&.
31+The user argument (argv[2]) contains the print user\*(Aqs name and is presently not used by smbspool except in Kerberos environments to access the user\'s ticket cache\&.
32 .RE
33 .sp
34 .RS 4
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_598313_upstream_7499-nss_wins-dont-clobber-daemons-logs.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_598313_upstream_7499-nss_wins-dont-clobber-daemons-logs.patch
new file mode 100644
index 000000000..dcd94e425
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_598313_upstream_7499-nss_wins-dont-clobber-daemons-logs.patch
@@ -0,0 +1,47 @@
1Description: nss_wins stop clobbering other daemon's log
2Author: Christian Perrier <bubulle@debian.org>,Buchan Milne
3Bug-Debian: http://bugs.debian.org/598313
4Forwarded: yes
5Bug: https://bugzilla.samba.org/show_bug.cgi?id=7499
6
7Index: samba/lib/util/debug.c
8===================================================================
9--- samba.orig/lib/util/debug.c
10+++ samba/lib/util/debug.c
11@@ -474,15 +474,17 @@
12
13 if (state.logtype == DEBUG_FILE) {
14 #ifdef WITH_SYSLOG
15- const char *p = strrchr_m( prog_name,'/' );
16- if (p)
17- prog_name = p + 1;
18+ if (prog_name) {
19+ const char *p = strrchr_m( prog_name,'/' );
20+ if (p)
21+ prog_name = p + 1;
22 #ifdef LOG_DAEMON
23- openlog( prog_name, LOG_PID, SYSLOG_FACILITY );
24+ openlog( prog_name, LOG_PID, SYSLOG_FACILITY );
25 #else
26- /* for old systems that have no facility codes. */
27- openlog( prog_name, LOG_PID );
28+ /* for old systems that have no facility codes. */
29+ openlog( prog_name, LOG_PID );
30 #endif
31+ }
32 #endif
33 }
34 }
35Index: samba/nsswitch/wins.c
36===================================================================
37--- samba.orig/nsswitch/wins.c
38+++ samba/nsswitch/wins.c
39@@ -52,7 +52,7 @@
40 lp_set_cmdline("log level", "0");
41
42 TimeInit();
43- setup_logging("nss_wins",False);
44+ setup_logging(NULL,False);
45 lp_load(get_dyn_CONFIGFILE(),True,False,False,True);
46 load_interfaces();
47 }
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_601406_fix-perl-path-in-example.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_601406_fix-perl-path-in-example.patch
new file mode 100644
index 000000000..ba8b1f425
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_601406_fix-perl-path-in-example.patch
@@ -0,0 +1,15 @@
1Description: Fix path to perl binary in example file
2Author: Christian Perrier <bubulle@debian.org>
3Bug-Debian: http://bugs.debian.org/601406
4Forwarded: not-needed
5
6Index: samba/examples/misc/wall.perl
7===================================================================
8--- samba.orig/examples/misc/wall.perl
9+++ samba/examples/misc/wall.perl
10@@ -1,4 +1,4 @@
11-#!/usr/local/bin/perl
12+#!/usr/bin/perl
13 #
14 #@(#) smb-wall.pl Description:
15 #@(#) A perl script which allows you to announce whatever you choose to
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_604768_upstream_7826_drop-using-samba-link.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_604768_upstream_7826_drop-using-samba-link.patch
new file mode 100644
index 000000000..0c54b6b0b
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_604768_upstream_7826_drop-using-samba-link.patch
@@ -0,0 +1,21 @@
1Description: Drop Using Samba link in HTML documentation summary
2Author: Christian Perrier <bubulle@debian.org>
3Bug-Debian: http://bugs.debian.org/604768
4Forwarded: yes
5Bug: https://bugzilla.samba.org/show_bug.cgi?id=7826
6
7Index: samba/docs/htmldocs/index.html
8===================================================================
9--- samba.orig/docs/htmldocs/index.html
10+++ samba/docs/htmldocs/index.html
11@@ -23,10 +23,6 @@
12 <td valign="top">This book provides example configurations, it documents key aspects of Microsoft Windows networking, provides in-depth insight into the important configuration of Samba-3, and helps to put all of these into a useful framework.</td>
13 </tr>
14 <tr>
15- <td valign="top"><a href="using_samba/toc.html">Using Samba</a>, 2nd Edition</td>
16- <td valign="top"><i>Using Samba</i>, Second Edition is a comprehensive guide to Samba administration. It covers all versions of Samba from 2.0 to 2.2, including selected features from an alpha version of 3.0, as well as the SWAT graphical configuration tool. Updated for Windows 2000, ME, and XP, the book also explores Samba's new role as a primary domain controller and domain member server, its support for the use of Windows NT/2000/XP authentication and filesystem security on the host Unix system, and accessing shared files and printers from Unix clients.</td>
17-</tr>
18-<tr>
19 <td valign="top"><a href="manpages/index.html">Man pages</a></td>
20 <td valign="top">The Samba man pages in HTML.</td>
21 </tr>
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_604768_upstream_7826_fix-WHATSNEW-link.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_604768_upstream_7826_fix-WHATSNEW-link.patch
new file mode 100644
index 000000000..c7dd043fb
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/bug_604768_upstream_7826_fix-WHATSNEW-link.patch
@@ -0,0 +1,18 @@
1Description: Fix WHATSNEW.txt link in HTML documentation summary to fit Debian files organization
2Author: Christian Perrier <bubulle@debian.org>
3Bug-Debian: http://bugs.debian.org/604768
4Forwarded: not-needed
5
6Index: samba/docs/htmldocs/index.html
7===================================================================
8--- samba.orig/docs/htmldocs/index.html
9+++ samba/docs/htmldocs/index.html
10@@ -27,7 +27,7 @@
11 <td valign="top">The Samba man pages in HTML.</td>
12 </tr>
13 <tr>
14- <td valign="top"><a href="../../WHATSNEW.txt">WHATSNEW</a></td>
15+ <td valign="top"><a href="../WHATSNEW.txt">WHATSNEW</a></td>
16 <td valign="top">Samba Release Notes.</td>
17 </tr>
18 </table></body></html>
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-disable-core_pattern-cross-check.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-disable-core_pattern-cross-check.patch
new file mode 100644
index 000000000..2d9618973
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-disable-core_pattern-cross-check.patch
@@ -0,0 +1,20 @@
1--- samba-3.6.8/source3/configure.orig 2012-10-01 16:28:17.559074996 -0300
2+++ samba-3.6.8/source3/configure 2012-10-01 16:39:07.747700087 -0300
3@@ -16718,7 +16718,7 @@
4 #################################################
5 # Check to see if core dump directory is defined in linux
6 # with /proc/sys/kernel/core_pattern
7-
8+if false; then
9 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for /proc/sys/kernel/core_pattern" >&5
10 $as_echo_n "checking for /proc/sys/kernel/core_pattern... " >&6; }
11 if ${ac_cv_file__proc_sys_kernel_core_pattern+:} false; then :
12@@ -16739,7 +16739,7 @@
13 $as_echo "#define HAVE_SYS_KERNEL_PROC_CORE_PATTERN 1" >>confdefs.h
14
15 fi
16-
17+fi
18
19 #############################
20 # check if building with gpfs
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-disable-getaddrinfo-cross.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-disable-getaddrinfo-cross.patch
new file mode 100644
index 000000000..84ecd498f
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-disable-getaddrinfo-cross.patch
@@ -0,0 +1,11 @@
1--- samba-3.6.6/source3/configure.orig 2012-10-01 15:50:15.371574883 -0300
2+++ samba-3.6.6/source3/configure 2012-10-01 15:50:35.563699659 -0300
3@@ -13302,7 +13302,7 @@
4 # getaddrinfo is broken on some AIX systems
5 # see bug 5910, use our replacements if we detect
6 # a broken system.
7- if test "$cross_compiling" = yes; then :
8+ if test "$cross_compiling" = foo; then :
9 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
10 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
11 as_fn_error "cannot run test program while cross compiling
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-libunwind.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-libunwind.patch
new file mode 100644
index 000000000..9a2cb00eb
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/configure-libunwind.patch
@@ -0,0 +1,82 @@
1samba: add --enable-libunwind option
2
3Upstream-Status: Pending
4
5This let the end user explicitly enable/disable libunwind support.
6---
7 configure | 28 ++++++++++++++++++++++++++--
8 1 file changed, 26 insertions(+), 2 deletions(-)
9
10diff -urpN a/source3/configure b/source3/configure
11--- a/source3/configure
12+++ b/source3/configure
13@@ -1007,6 +1007,7 @@ with_included_iniparser
14 with_static_modules
15 with_shared_modules
16 enable_dmalloc
17+enable_libunwind
18 '
19 ac_precious_vars='build_alias
20 host_alias
21@@ -1670,6 +1671,7 @@ Optional Features:
22 --enable-avahi Enable Avahi support (default=auto)
23 --enable-pthreadpool Enable pthreads pool helper support (default=no)
24 --enable-dmalloc Enable heap debugging [default=no]
25+ --enable-libunwind Enable libunwind support if available (default=no)
26
27 Optional Packages:
28 --with-PACKAGE[=ARG] use PACKAGE [ARG=yes]
29@@ -16458,7 +16460,7 @@ done
30
31
32 # Find a method of generating a stack trace
33-for ac_header in execinfo.h libexc.h libunwind.h
34+for ac_header in execinfo.h libexc.h
35 do :
36 as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
37 ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default"
38@@ -16684,6 +16686,13 @@ fi
39
40 LIBS="$save_LIBS"
41
42+# Check whether --enable-libunwind was given.
43+if test "${enable_libunwind+set}" = set; then :
44+ enableval=$enable_libunwind;
45+fi
46+
47+if test "x$enable_libunwind" != xno
48+then
49 # Note that all the libunwind symbols in the API are defined to internal
50 # platform-specific version, so we must include libunwind.h before checking
51 # any of them.
52@@ -16691,6 +16700,21 @@ LIBS="$save_LIBS"
53 $as_echo_n "checking for libunwind... " >&6; }
54 save_LIBS=$LIBS
55
56+# Check for libunwind.h present
57+for ac_header in libunwind.h
58+do :
59+ as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
60+ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default"
61+eval as_val=\$$as_ac_Header
62+ if test "x$as_val" = x""yes; then :
63+ cat >>confdefs.h <<_ACEOF
64+#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
65+_ACEOF
66+
67+fi
68+
69+done
70+
71 UNWIND_ARCH="unknown"
72 if test x"$UNAME_I" != x"unknown"; then
73 UNWIND_ARCH="$UNAME_I"
74@@ -16877,7 +16901,7 @@ fi
75 rm -f core conftest.err conftest.$ac_objext \
76 conftest$ac_exeext conftest.$ac_ext
77 fi
78-
79+fi
80
81
82 for ac_func in _dup _dup2 _opendir _readdir _seekdir _telldir _closedir
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/documentation.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/documentation.patch
new file mode 100644
index 000000000..73111fed7
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/documentation.patch
@@ -0,0 +1,302 @@
1Description: Remove documentation parts that do not apply to Debian
2Author: Christian Perrier <bubulle@debian.org>
3Forwarded: not-needed
4
5Index: experimental/docs/manpages/swat.8
6===================================================================
7--- experimental.orig/docs/manpages/swat.8
8+++ experimental/docs/manpages/swat.8
9@@ -120,86 +120,6 @@
10 .RS 4
11 Print a summary of command line options\&.
12 .RE
13-.SH "INSTALLATION"
14-.PP
15-Swat is included as binary package with most distributions\&. The package manager in this case takes care of the installation and configuration\&. This section is only for those who have compiled swat from scratch\&.
16-.PP
17-After you compile SWAT you need to run
18-make install
19-to install the
20-swat
21-binary and the various help files and images\&. A default install would put these in:
22-.sp
23-.RS 4
24-.ie n \{\
25-\h'-04'\(bu\h'+03'\c
26-.\}
27-.el \{\
28-.sp -1
29-.IP \(bu 2.3
30-.\}
31-/usr/local/samba/sbin/swat
32-.RE
33-.sp
34-.RS 4
35-.ie n \{\
36-\h'-04'\(bu\h'+03'\c
37-.\}
38-.el \{\
39-.sp -1
40-.IP \(bu 2.3
41-.\}
42-/usr/local/samba/swat/images/*
43-.RE
44-.sp
45-.RS 4
46-.ie n \{\
47-\h'-04'\(bu\h'+03'\c
48-.\}
49-.el \{\
50-.sp -1
51-.IP \(bu 2.3
52-.\}
53-/usr/local/samba/swat/help/*
54-.RE
55-.sp
56-.RE
57-.SS "Inetd Installation"
58-.PP
59-You need to edit your
60-/etc/inetd\&.conf
61-and
62-/etc/services
63-to enable SWAT to be launched via
64-inetd\&.
65-.PP
66-In
67-/etc/services
68-you need to add a line like this:
69-.PP
70-swat 901/tcp
71-.PP
72-Note for NIS/YP and LDAP users \- you may need to rebuild the NIS service maps rather than alter your local
73-/etc/services
74-file\&.
75-.PP
76-the choice of port number isn\*(Aqt really important except that it should be less than 1024 and not currently used (using a number above 1024 presents an obscure security hole depending on the implementation details of your
77-inetd
78-daemon)\&.
79-.PP
80-In
81-/etc/inetd\&.conf
82-you should add a line like this:
83-.PP
84-swat stream tcp nowait\&.400 root /usr/local/samba/sbin/swat swat
85-.PP
86-Once you have edited
87-/etc/services
88-and
89-/etc/inetd\&.conf
90-you need to send a HUP signal to inetd\&. To do this use
91-kill \-1 PID
92-where PID is the process ID of the inetd daemon\&.
93 .SH "LAUNCHING"
94 .PP
95 To launch SWAT just run your favorite web browser and point it at "http://localhost:901/"\&.
96@@ -217,14 +137,11 @@
97 This file must contain a mapping of service name (e\&.g\&., swat) to service port (e\&.g\&., 901) and protocol type (e\&.g\&., tcp)\&.
98 .RE
99 .PP
100-/usr/local/samba/lib/smb\&.conf
101+/etc/samba/smb\&.conf
102 .RS 4
103 This is the default location of the
104 \fBsmb.conf\fR(5)
105-server configuration file that swat edits\&. Other common places that systems install this file are
106-/usr/samba/lib/smb\&.conf
107-and
108-/etc/smb\&.conf\&. This file describes all the services the server is to make available to clients\&.
109+server configuration file that swat edits\&. This file describes all the services the server is to make available to clients\&.
110 .RE
111 .SH "WARNINGS"
112 .PP
113Index: experimental/docs/manpages/nmbd.8
114===================================================================
115--- experimental.orig/docs/manpages/nmbd.8
116+++ experimental/docs/manpages/nmbd.8
117@@ -115,10 +115,7 @@
118 to answer any name queries\&. Adding a line to this file affects name NetBIOS resolution from this host
119 \fIONLY\fR\&.
120 .sp
121-The default path to this file is compiled into Samba as part of the build process\&. Common defaults are
122-/usr/local/samba/lib/lmhosts,
123-/usr/samba/lib/lmhosts
124-or
125+The default path to this file is
126 /etc/samba/lmhosts\&. See the
127 \fBlmhosts\fR(5)
128 man page for details on the contents of this file\&.
129@@ -187,14 +184,11 @@
130 inetd, this file must contain a mapping of service name (e\&.g\&., netbios\-ssn) to service port (e\&.g\&., 139) and protocol type (e\&.g\&., tcp)\&.
131 .RE
132 .PP
133-/usr/local/samba/lib/smb\&.conf
134+/etc/samba/smb\&.conf
135 .RS 4
136 This is the default location of the
137 \fBsmb.conf\fR(5)
138-server configuration file\&. Other common places that systems install this file are
139-/usr/samba/lib/smb\&.conf
140-and
141-/etc/samba/smb\&.conf\&.
142+server configuration file\&.
143 .sp
144 When run as a WINS server (see the
145 \m[blue]\fBwins support\fR\m[]
146@@ -238,10 +232,8 @@
147 will accept SIGHUP, which will cause it to dump out its namelists into the file
148 namelist\&.debug
149 in the
150-/usr/local/samba/var/locks
151-directory (or the
152-var/locks
153-directory configured under wherever Samba was configured to install itself)\&. This will also cause
154+/var/run/samba
155+directory\&. This will also cause
156 nmbd
157 to dump out its server database in the
158 log\&.nmb
159Index: experimental/docs/manpages/smbd.8
160===================================================================
161--- experimental.orig/docs/manpages/smbd.8
162+++ experimental/docs/manpages/smbd.8
163@@ -169,14 +169,11 @@
164 inetd, this file must contain a mapping of service name (e\&.g\&., netbios\-ssn) to service port (e\&.g\&., 139) and protocol type (e\&.g\&., tcp)\&.
165 .RE
166 .PP
167-/usr/local/samba/lib/smb\&.conf
168+/etc/samba/smb\&.conf
169 .RS 4
170 This is the default location of the
171 \fBsmb.conf\fR(5)
172-server configuration file\&. Other common places that systems install this file are
173-/usr/samba/lib/smb\&.conf
174-and
175-/etc/samba/smb\&.conf\&.
176+server configuration file\&.
177 .sp
178 This file describes all the services the server is to make available to clients\&. See
179 \fBsmb.conf\fR(5)
180Index: experimental/docs/manpages/lmhosts.5
181===================================================================
182--- experimental.orig/docs/manpages/lmhosts.5
183+++ experimental/docs/manpages/lmhosts.5
184@@ -96,10 +96,8 @@
185 file\&.
186 .SH "FILES"
187 .PP
188-lmhosts is loaded from the configuration directory\&. This is usually
189-/etc/samba
190-or
191-/usr/local/samba/lib\&.
192+lmhosts is loaded from the configuration directory\&. This is
193+/etc/samba\&.
194 .SH "VERSION"
195 .PP
196 This man page is correct for version 3 of the Samba suite\&.
197Index: experimental/docs/manpages/ntlm_auth.1
198===================================================================
199--- experimental.orig/docs/manpages/ntlm_auth.1
200+++ experimental/docs/manpages/ntlm_auth.1
201@@ -43,7 +43,7 @@
202 Some of these commands also require access to the directory
203 winbindd_privileged
204 in
205-$LOCKDIR\&. This should be done either by running this command as root or providing group access to the
206+/var/run/samba\F[]\&. This should be done either by running this command as root or providing group access to the
207 winbindd_privileged
208 directory\&. For security reasons, this directory should not be world\-accessable\&.
209 .SH "OPTIONS"
210@@ -69,7 +69,7 @@
211 Requires access to the directory
212 winbindd_privileged
213 in
214-$LOCKDIR\&. The protocol used is described here:
215+/var/run/samba\&. The protocol used is described here:
216 http://devel\&.squid\-cache\&.org/ntlm/squid_helper_protocol\&.html\&. This protocol has been extended to allow the NTLMSSP Negotiate packet to be included as an argument to the
217 YR
218 command\&. (Thus avoiding loss of information in the protocol exchange)\&.
219@@ -92,7 +92,7 @@
220 Requires access to the directory
221 winbindd_privileged
222 in
223-$LOCKDIR\&.
224+/var/run/samba\&.
225 .RE
226 .PP
227 gss\-spnego\-client
228Index: experimental/docs/manpages/tdbbackup.8
229===================================================================
230--- experimental.orig/docs/manpages/tdbbackup.8
231+++ experimental/docs/manpages/tdbbackup.8
232@@ -77,7 +77,7 @@
233 .\}
234
235 secrets\&.tdb
236-\- usual location is in the /usr/local/samba/private directory, or on some systems in /etc/samba\&.
237+\- usual location is in the /var/lib/samba directory\&.
238 .RE
239 .sp
240 .RS 4
241@@ -90,7 +90,7 @@
242 .\}
243
244 passdb\&.tdb
245-\- usual location is in the /usr/local/samba/private directory, or on some systems in /etc/samba\&.
246+\- usual location is in the /var/lib/samba directory\&.
247 .RE
248 .sp
249 .RS 4
250@@ -103,7 +103,7 @@
251 .\}
252
253 *\&.tdb
254-located in the /usr/local/samba/var directory or on some systems in the /var/cache or /var/lib/samba directories\&.
255+located in the /var/lib/samba and /var/run/samba directories\&.
256 .RE
257 .SH "VERSION"
258 .PP
259Index: experimental/docs/manpages/winbindd.8
260===================================================================
261--- experimental.orig/docs/manpages/winbindd.8
262+++ experimental/docs/manpages/winbindd.8
263@@ -539,16 +539,16 @@
264 file are owned by root\&.
265 .RE
266 .PP
267-$LOCKDIR/winbindd_privileged/pipe
268+/var/run/samba/winbindd_privileged/pipe
269 .RS 4
270 The UNIX pipe over which \*(Aqprivileged\*(Aq clients communicate with the
271 winbindd
272 program\&. For security reasons, access to some winbindd functions \- like those needed by the
273 ntlm_auth
274-utility \- is restricted\&. By default, only users in the \*(Aqroot\*(Aq group will get this access, however the administrator may change the group permissions on $LOCKDIR/winbindd_privileged to allow programs like \*(Aqsquid\*(Aq to use ntlm_auth\&. Note that the winbind client will only attempt to connect to the winbindd daemon if both the
275-$LOCKDIR/winbindd_privileged
276+utility \- is restricted\&. By default, only users in the \'root\' group will get this access, however the administrator may change the group permissions on /var/run/samba/winbindd_privileged to allow programs like \'squid\' to use ntlm_auth\&. Note that the winbind client will only attempt to connect to the winbindd daemon if both the
277+/var/run/samba/winbindd_privileged
278 directory and
279-$LOCKDIR/winbindd_privileged/pipe
280+/var/run/samba/winbindd_privileged/pipe
281 file are owned by root\&.
282 .RE
283 .PP
284@@ -557,15 +557,12 @@
285 Implementation of name service switch library\&.
286 .RE
287 .PP
288-$LOCKDIR/winbindd_idmap\&.tdb
289+/var/run/samba/winbindd_idmap\&.tdb
290 .RS 4
291-Storage for the Windows NT rid to UNIX user/group id mapping\&. The lock directory is specified when Samba is initially compiled using the
292-\fI\-\-with\-lockdir\fR
293-option\&. This directory is by default
294-/usr/local/samba/var/locks\&.
295+Storage for the Windows NT rid to UNIX user/group id mapping\&.
296 .RE
297 .PP
298-$LOCKDIR/winbindd_cache\&.tdb
299+/var/run/samba/winbindd_cache\&.tdb
300 .RS 4
301 Storage for cached user and group information\&.
302 .RE
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/documentation2.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/documentation2.patch
new file mode 100644
index 000000000..af8da32d5
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/documentation2.patch
@@ -0,0 +1,314 @@
1Description: Remove documentation parts that do not apply to Debian
2Author: Christian Perrier <bubulle@debian.org>
3Bug: https://bugzilla.samba.org/show_bug.cgi?id=8789
4Forwarded: yes
5
6Index: samba/docs-xml/manpages-3/nmbd.8.xml
7===================================================================
8--- samba.orig/docs-xml/manpages-3/nmbd.8.xml
9+++ samba/docs-xml/manpages-3/nmbd.8.xml
10@@ -266,7 +266,6 @@
11 <manvolnum>8</manvolnum></citerefentry>, <citerefentry><refentrytitle>smb.conf</refentrytitle>
12 <manvolnum>5</manvolnum></citerefentry>, <citerefentry><refentrytitle>smbclient</refentrytitle>
13 <manvolnum>1</manvolnum></citerefentry>, <citerefentry><refentrytitle>testparm</refentrytitle>
14- <manvolnum>1</manvolnum></citerefentry>, <citerefentry><refentrytitle>testprns</refentrytitle>
15 <manvolnum>1</manvolnum></citerefentry>, and the Internet
16 RFC's <filename>rfc1001.txt</filename>, <filename>rfc1002.txt</filename>.
17 In addition the CIFS (formerly SMB) specification is available
18Index: samba/docs-xml/manpages-3/samba.7.xml
19===================================================================
20--- samba.orig/docs-xml/manpages-3/samba.7.xml
21+++ samba/docs-xml/manpages-3/samba.7.xml
22@@ -76,16 +76,6 @@
23 </varlistentry>
24
25 <varlistentry>
26- <term><citerefentry><refentrytitle>testprns</refentrytitle>
27- <manvolnum>1</manvolnum></citerefentry></term>
28- <listitem><para>The <command>testprns</command>
29- utility supports testing printer names defined
30- in your <filename>printcap</filename> file used
31- by Samba.</para>
32- </listitem>
33- </varlistentry>
34-
35- <varlistentry>
36 <term><citerefentry><refentrytitle>smbstatus</refentrytitle>
37 <manvolnum>1</manvolnum></citerefentry></term>
38 <listitem><para>The <command>smbstatus</command>
39@@ -125,7 +115,8 @@
40 <manvolnum>1</manvolnum></citerefentry></term>
41 <listitem><para>The <command>smbsh</command> command is
42 a program that allows you to run a unix shell with
43- with an overloaded VFS.</para></listitem>
44+ with an overloaded VFS. Note that, it is not installed by
45+ the current samba package.</para></listitem>
46 </varlistentry>
47
48 <varlistentry>
49Index: samba/docs-xml/manpages-3/smb.conf.5.xml
50===================================================================
51--- samba.orig/docs-xml/manpages-3/smb.conf.5.xml
52+++ samba/docs-xml/manpages-3/smb.conf.5.xml
53@@ -856,7 +856,6 @@
54 <manvolnum>8</manvolnum></citerefentry>, <citerefentry><refentrytitle>smbclient</refentrytitle>
55 <manvolnum>1</manvolnum></citerefentry>, <citerefentry><refentrytitle>nmblookup</refentrytitle>
56 <manvolnum>1</manvolnum></citerefentry>, <citerefentry><refentrytitle>testparm</refentrytitle>
57- <manvolnum>1</manvolnum></citerefentry>, <citerefentry><refentrytitle>testprns</refentrytitle>
58 <manvolnum>1</manvolnum></citerefentry>.</para>
59 </refsect1>
60
61Index: samba/docs-xml/manpages-3/smbd.8.xml
62===================================================================
63--- samba.orig/docs-xml/manpages-3/smbd.8.xml
64+++ samba/docs-xml/manpages-3/smbd.8.xml
65@@ -417,7 +417,6 @@
66 <manvolnum>8</manvolnum></citerefentry>, <citerefentry><refentrytitle>smb.conf</refentrytitle>
67 <manvolnum>5</manvolnum></citerefentry>, <citerefentry><refentrytitle>smbclient</refentrytitle>
68 <manvolnum>1</manvolnum></citerefentry>, <citerefentry><refentrytitle>testparm</refentrytitle>
69- <manvolnum>1</manvolnum></citerefentry>, <citerefentry><refentrytitle>testprns</refentrytitle>
70 <manvolnum>1</manvolnum></citerefentry>, and the
71 Internet RFC's <filename>rfc1001.txt</filename>, <filename>rfc1002.txt</filename>.
72 In addition the CIFS (formerly SMB) specification is available
73Index: samba/docs-xml/using_samba/appd.xml
74===================================================================
75--- samba.orig/docs-xml/using_samba/appd.xml
76+++ samba/docs-xml/using_samba/appd.xml
77@@ -296,7 +296,7 @@
78
79
80 <para>The <emphasis>smbsh</emphasis>
81-<indexterm id="appd-idx-993744-0"><primary>smbsh program</primary></indexterm> program lets you use a remote Windows share on your Samba server as if the share was a regular Unix directory. When it's run, it provides an extra directory tree under <filename>/smb</filename>. Subdirectories of <filename>/smb</filename> are servers, and subdirectories of the servers are their individual disk and printer shares. Commands run by <emphasis>smbsh</emphasis> treat the <filename>/smb</filename> filesystem as if it were local to Unix. This means that you don't need <emphasis>smbmount</emphasis> in your kernel to mount Windows filesystems the way you mount with NFS filesystems. However, you do need to configure Samba with the <literal>--with-smbwrappers</literal> option to enable <filename>smbsh</filename>.</para>
82+<indexterm id="appd-idx-993744-0"><primary>smbsh program</primary></indexterm> program (<emphasis>not available in this samba package</emphasis>) lets you use a remote Windows share on your Samba server as if the share was a regular Unix directory. When it's run, it provides an extra directory tree under <filename>/smb</filename>. Subdirectories of <filename>/smb</filename> are servers, and subdirectories of the servers are their individual disk and printer shares. Commands run by <emphasis>smbsh</emphasis> treat the <filename>/smb</filename> filesystem as if it were local to Unix. This means that you don't need <emphasis>smbmount</emphasis> in your kernel to mount Windows filesystems the way you mount with NFS filesystems. However, you do need to configure Samba with the <literal>--with-smbwrappers</literal> option to enable <filename>smbsh</filename>.</para>
83
84
85 <sect3 role="" label="D.1.4.1" id="appd-SECT-1.4.1">
86@@ -1320,24 +1320,6 @@
87 </sect2>
88
89
90-
91-
92-
93-<sect2 role="" label="D.1.11" id="appd-SECT-1.11">
94-<title>testprns</title>
95-
96-
97-<para>The<indexterm id="appd-idx-993761-0"><primary>testprns program</primary></indexterm>
98-<indexterm id="appd-idx-993761-1"><primary>printers</primary><secondary>names</secondary><tertiary>checking</tertiary></indexterm> <emphasis>testprns</emphasis> program checks a specified printer name against the system printer capabilities (<filename>printcap</filename>) file. Its command line is:</para>
99-
100-
101-<programlisting>testprns <replaceable>printername</replaceable> [<replaceable>printcapname</replaceable>]</programlisting>
102-
103-
104-<para>If the <literal>printcapname</literal> isn't specified, Samba attempts to use one located in the <filename>smb.conf</filename> file. If one isn't specified there, Samba will try <filename>/etc/printcap</filename>. If that fails, the program will generate an error.</para>
105-</sect2>
106-
107-
108
109
110
111Index: samba/docs-xml/using_samba/ch01.xml
112===================================================================
113--- samba.orig/docs-xml/using_samba/ch01.xml
114+++ samba/docs-xml/using_samba/ch01.xml
115@@ -1375,12 +1375,6 @@
116 </varlistentry>
117
118
119-<varlistentry><term>testprns</term>
120-<listitem><para>A program that tests whether various printers are recognized by the <filename>smbd</filename> daemon</para></listitem>
121-</varlistentry>
122-</variablelist>
123-
124-
125 <para>Each significant release of Samba goes through a significant exposure test before it's announced. In addition, it is quickly updated afterward if problems or unwanted side-effects are found. The latest stable distribution as of this writing is Samba 2.0.5, the long-awaited production version of Samba 2.0. This book focuses on the functionality supported in Samba 2.0, as opposed to the older 1.9.<emphasis>x</emphasis> versions of Samba, which are now obsolete.</para>
126 </sect1>
127
128Index: samba/docs-xml/using_samba/ch07.xml
129===================================================================
130--- samba.orig/docs-xml/using_samba/ch07.xml
131+++ samba/docs-xml/using_samba/ch07.xml
132@@ -306,7 +306,7 @@
133 public: true</programlisting>
134
135
136-<para>Second, try the command <literal>testprns</literal> <replaceable>printername</replaceable>. This is a simple program that verifies that the specified printer is available in your <emphasis>printcap</emphasis> file. If your <emphasis>printcap</emphasis> file is not in the usual place, you can specify its full pathname as the second argument to the <emphasis>testprns</emphasis> command:</para>
137+<para>Second, try the command <literal>testprns</literal> <replaceable>printername</replaceable>. <emphasis>Note</emphasis>: This command is not available in this package. This is a simple program that verifies that the specified printer is available in your <emphasis>printcap</emphasis> file. If your <emphasis>printcap</emphasis> file is not in the usual place, you can specify its full pathname as the second argument to the <emphasis>testprns</emphasis> command:</para>
138
139
140 <programlisting># testprns lp /etc/printcap
141Index: samba/docs/htmldocs/manpages/nmbd.8.html
142===================================================================
143--- samba.orig/docs/htmldocs/manpages/nmbd.8.html
144+++ samba/docs/htmldocs/manpages/nmbd.8.html
145@@ -131,7 +131,7 @@
146 transient problems to be diagnosed, whilst still running
147 at a normally low log level.</p></div><div class="refsect1" title="VERSION"><a name="id307511"></a><h2>VERSION</h2><p>This man page is correct for version 3 of
148 the Samba suite.</p></div><div class="refsect1" title="SEE ALSO"><a name="id307521"></a><h2>SEE ALSO</h2><p>
149- <a class="citerefentry" href="inetd.8.html"><span class="citerefentry"><span class="refentrytitle">inetd</span>(8)</span></a>, <a class="citerefentry" href="smbd.8.html"><span class="citerefentry"><span class="refentrytitle">smbd</span>(8)</span></a>, <a class="citerefentry" href="smb.conf.5.html"><span class="citerefentry"><span class="refentrytitle">smb.conf</span>(5)</span></a>, <a class="citerefentry" href="smbclient.1.html"><span class="citerefentry"><span class="refentrytitle">smbclient</span>(1)</span></a>, <a class="citerefentry" href="testparm.1.html"><span class="citerefentry"><span class="refentrytitle">testparm</span>(1)</span></a>, <a class="citerefentry" href="testprns.1.html"><span class="citerefentry"><span class="refentrytitle">testprns</span>(1)</span></a>, and the Internet
150+ <a class="citerefentry" href="inetd.8.html"><span class="citerefentry"><span class="refentrytitle">inetd</span>(8)</span></a>, <a class="citerefentry" href="smbd.8.html"><span class="citerefentry"><span class="refentrytitle">smbd</span>(8)</span></a>, <a class="citerefentry" href="smb.conf.5.html"><span class="citerefentry"><span class="refentrytitle">smb.conf</span>(5)</span></a>, <a class="citerefentry" href="smbclient.1.html"><span class="citerefentry"><span class="refentrytitle">smbclient</span>(1)</span></a>, <a class="citerefentry" href="testparm.1.html"><span class="citerefentry"><span class="refentrytitle">testparm</span>(1)</span></a>, and the Internet
151 RFC's <code class="filename">rfc1001.txt</code>, <code class="filename">rfc1002.txt</code>.
152 In addition the CIFS (formerly SMB) specification is available
153 as a link from the Web page <a class="ulink" href="http://samba.org/cifs/" target="_top">
154Index: samba/docs/htmldocs/manpages/samba.7.html
155===================================================================
156--- samba.orig/docs/htmldocs/manpages/samba.7.html
157+++ samba/docs/htmldocs/manpages/samba.7.html
158@@ -17,10 +17,7 @@
159 servers (such as Windows NT), and can also be used
160 to allow a UNIX box to print to a printer attached to
161 any SMB server (such as a PC running Windows NT).</p></dd><dt><span class="term"><a class="citerefentry" href="testparm.1.html"><span class="citerefentry"><span class="refentrytitle">testparm</span>(1)</span></a></span></dt><dd><p>The <code class="literal">testparm</code>
162- utility is a simple syntax checker for Samba's <a class="citerefentry" href="smb.conf.5.html"><span class="citerefentry"><span class="refentrytitle">smb.conf</span>(5)</span></a> configuration file.</p></dd><dt><span class="term"><a class="citerefentry" href="testprns.1.html"><span class="citerefentry"><span class="refentrytitle">testprns</span>(1)</span></a></span></dt><dd><p>The <code class="literal">testprns</code>
163- utility supports testing printer names defined
164- in your <code class="filename">printcap</code> file used
165- by Samba.</p></dd><dt><span class="term"><a class="citerefentry" href="smbstatus.1.html"><span class="citerefentry"><span class="refentrytitle">smbstatus</span>(1)</span></a></span></dt><dd><p>The <code class="literal">smbstatus</code>
166+ utility is a simple syntax checker for Samba's <a class="citerefentry" href="smb.conf.5.html"><span class="citerefentry"><span class="refentrytitle">smb.conf</span>(5)</span></a> configuration file.</p></dd><dt><span class="term"><a class="citerefentry" href="smbstatus.1.html"><span class="citerefentry"><span class="refentrytitle">smbstatus</span>(1)</span></a></span></dt><dd><p>The <code class="literal">smbstatus</code>
167 tool provides access to information about the
168 current connections to <code class="literal">smbd</code>.</p></dd><dt><span class="term"><a class="citerefentry" href="nmblookup.1.html"><span class="citerefentry"><span class="refentrytitle">nmblookup</span>(1)</span></a></span></dt><dd><p>The <code class="literal">nmblookup</code>
169 tools allows NetBIOS name queries to be made
170@@ -29,7 +26,8 @@
171 password hashes on Samba and Windows NT servers.</p></dd><dt><span class="term"><a class="citerefentry" href="smbcacls.1.html"><span class="citerefentry"><span class="refentrytitle">smbcacls</span>(1)</span></a></span></dt><dd><p>The <code class="literal">smbcacls</code> command is
172 a tool to set ACL's on remote CIFS servers. </p></dd><dt><span class="term"><a class="citerefentry" href="smbsh.1.html"><span class="citerefentry"><span class="refentrytitle">smbsh</span>(1)</span></a></span></dt><dd><p>The <code class="literal">smbsh</code> command is
173 a program that allows you to run a unix shell with
174- with an overloaded VFS.</p></dd><dt><span class="term"><a class="citerefentry" href="smbtree.1.html"><span class="citerefentry"><span class="refentrytitle">smbtree</span>(1)</span></a></span></dt><dd><p>The <code class="literal">smbtree</code> command
175+ with an overloaded VFS. Note that, it is not installed by
176+ the current samba package.</p></dd><dt><span class="term"><a class="citerefentry" href="smbtree.1.html"><span class="citerefentry"><span class="refentrytitle">smbtree</span>(1)</span></a></span></dt><dd><p>The <code class="literal">smbtree</code> command
177 is a text-based network neighborhood tool.</p></dd><dt><span class="term"><a class="citerefentry" href="smbtar.1.html"><span class="citerefentry"><span class="refentrytitle">smbtar</span>(1)</span></a></span></dt><dd><p>The <code class="literal">smbtar</code> can make
178 backups of data on CIFS/SMB servers.</p></dd><dt><span class="term"><a class="citerefentry" href="smbspool.8.html"><span class="citerefentry"><span class="refentrytitle">smbspool</span>(8)</span></a></span></dt><dd><p><code class="literal">smbspool</code> is a
179 helper utility for printing on printers connected
180Index: samba/docs/htmldocs/manpages/smb.conf.5.html
181===================================================================
182--- samba.orig/docs/htmldocs/manpages/smb.conf.5.html
183+++ samba/docs/htmldocs/manpages/smb.conf.5.html
184@@ -6964,7 +6964,7 @@
185 care when designing these sections. In particular, ensure that the permissions on spool directories are
186 correct.
187 </p></div><div class="refsect1" title="VERSION"><a name="id340677"></a><h2>VERSION</h2><p>This man page is correct for version 3 of the Samba suite.</p></div><div class="refsect1" title="SEE ALSO"><a name="id340688"></a><h2>SEE ALSO</h2><p>
188- <a class="citerefentry" href="samba.7.html"><span class="citerefentry"><span class="refentrytitle">samba</span>(7)</span></a>, <a class="citerefentry" href="smbpasswd.8.html"><span class="citerefentry"><span class="refentrytitle">smbpasswd</span>(8)</span></a>, <a class="citerefentry" href="swat.8.html"><span class="citerefentry"><span class="refentrytitle">swat</span>(8)</span></a>, <a class="citerefentry" href="smbd.8.html"><span class="citerefentry"><span class="refentrytitle">smbd</span>(8)</span></a>, <a class="citerefentry" href="nmbd.8.html"><span class="citerefentry"><span class="refentrytitle">nmbd</span>(8)</span></a>, <a class="citerefentry" href="smbclient.1.html"><span class="citerefentry"><span class="refentrytitle">smbclient</span>(1)</span></a>, <a class="citerefentry" href="nmblookup.1.html"><span class="citerefentry"><span class="refentrytitle">nmblookup</span>(1)</span></a>, <a class="citerefentry" href="testparm.1.html"><span class="citerefentry"><span class="refentrytitle">testparm</span>(1)</span></a>, <a class="citerefentry" href="testprns.1.html"><span class="citerefentry"><span class="refentrytitle">testprns</span>(1)</span></a>.</p></div><div class="refsect1" title="AUTHOR"><a name="id340767"></a><h2>AUTHOR</h2><p>
189+ <a class="citerefentry" href="samba.7.html"><span class="citerefentry"><span class="refentrytitle">samba</span>(7)</span></a>, <a class="citerefentry" href="smbpasswd.8.html"><span class="citerefentry"><span class="refentrytitle">smbpasswd</span>(8)</span></a>, <a class="citerefentry" href="swat.8.html"><span class="citerefentry"><span class="refentrytitle">swat</span>(8)</span></a>, <a class="citerefentry" href="smbd.8.html"><span class="citerefentry"><span class="refentrytitle">smbd</span>(8)</span></a>, <a class="citerefentry" href="nmbd.8.html"><span class="citerefentry"><span class="refentrytitle">nmbd</span>(8)</span></a>, <a class="citerefentry" href="smbclient.1.html"><span class="citerefentry"><span class="refentrytitle">smbclient</span>(1)</span></a>, <a class="citerefentry" href="nmblookup.1.html"><span class="citerefentry"><span class="refentrytitle">nmblookup</span>(1)</span></a>, <a class="citerefentry" href="testparm.1.html"><span class="citerefentry"><span class="refentrytitle">testparm</span>(1)</span></a>.</p></div><div class="refsect1" title="AUTHOR"><a name="id340767"></a><h2>AUTHOR</h2><p>
190 The original Samba software and related utilities were created by Andrew Tridgell. Samba is now developed
191 by the Samba Team as an Open Source project similar to the way the Linux kernel is developed.
192 </p><p>
193Index: samba/docs/htmldocs/manpages/smbd.8.html
194===================================================================
195--- samba.orig/docs/htmldocs/manpages/smbd.8.html
196+++ samba/docs/htmldocs/manpages/smbd.8.html
197@@ -147,7 +147,7 @@
198 <code class="literal">smbd</code> is in a state of waiting for an incoming SMB before
199 issuing them. It is possible to make the signal handlers safe
200 by un-blocking the signals before the select call and re-blocking
201- them after, however this would affect performance.</p></div><div class="refsect1" title="SEE ALSO"><a name="id307739"></a><h2>SEE ALSO</h2><p><a class="citerefentry" href="hosts_access.5.html"><span class="citerefentry"><span class="refentrytitle">hosts_access</span>(5)</span></a>, <a class="citerefentry" href="inetd.8.html"><span class="citerefentry"><span class="refentrytitle">inetd</span>(8)</span></a>, <a class="citerefentry" href="nmbd.8.html"><span class="citerefentry"><span class="refentrytitle">nmbd</span>(8)</span></a>, <a class="citerefentry" href="smb.conf.5.html"><span class="citerefentry"><span class="refentrytitle">smb.conf</span>(5)</span></a>, <a class="citerefentry" href="smbclient.1.html"><span class="citerefentry"><span class="refentrytitle">smbclient</span>(1)</span></a>, <a class="citerefentry" href="testparm.1.html"><span class="citerefentry"><span class="refentrytitle">testparm</span>(1)</span></a>, <a class="citerefentry" href="testprns.1.html"><span class="citerefentry"><span class="refentrytitle">testprns</span>(1)</span></a>, and the
202+ them after, however this would affect performance.</p></div><div class="refsect1" title="SEE ALSO"><a name="id307739"></a><h2>SEE ALSO</h2><p><a class="citerefentry" href="hosts_access.5.html"><span class="citerefentry"><span class="refentrytitle">hosts_access</span>(5)</span></a>, <a class="citerefentry" href="inetd.8.html"><span class="citerefentry"><span class="refentrytitle">inetd</span>(8)</span></a>, <a class="citerefentry" href="nmbd.8.html"><span class="citerefentry"><span class="refentrytitle">nmbd</span>(8)</span></a>, <a class="citerefentry" href="smb.conf.5.html"><span class="citerefentry"><span class="refentrytitle">smb.conf</span>(5)</span></a>, <a class="citerefentry" href="smbclient.1.html"><span class="citerefentry"><span class="refentrytitle">smbclient</span>(1)</span></a>, <a class="citerefentry" href="testparm.1.html"><span class="citerefentry"><span class="refentrytitle">testparm</span>(1)</span></a>, and the
203 Internet RFC's <code class="filename">rfc1001.txt</code>, <code class="filename">rfc1002.txt</code>.
204 In addition the CIFS (formerly SMB) specification is available
205 as a link from the Web page <a class="ulink" href="http://samba.org/cifs/" target="_top">
206Index: samba/docs/manpages/nmbd.8
207===================================================================
208--- samba.orig/docs/manpages/nmbd.8
209+++ samba/docs/manpages/nmbd.8
210@@ -252,8 +252,7 @@
211 \fBsmbd\fR(8),
212 \fBsmb.conf\fR(5),
213 \fBsmbclient\fR(1),
214-\fBtestparm\fR(1),
215-\fBtestprns\fR(1), and the Internet RFC\*(Aqs
216+\fBtestparm\fR(1), and the Internet RFC\*(Aqs
217 rfc1001\&.txt,
218 rfc1002\&.txt\&. In addition the CIFS (formerly SMB) specification is available as a link from the Web page
219 http://samba\&.org/cifs/\&.
220Index: samba/docs/manpages/samba.7
221===================================================================
222--- samba.orig/docs/manpages/samba.7
223+++ samba/docs/manpages/samba.7
224@@ -60,15 +60,6 @@
225 configuration file\&.
226 .RE
227 .PP
228-\fBtestprns\fR(1)
229-.RS 4
230-The
231-testprns
232-utility supports testing printer names defined in your
233-printcap
234-file used by Samba\&.
235-.RE
236-.PP
237 \fBsmbstatus\fR(1)
238 .RS 4
239 The
240@@ -102,7 +93,7 @@
241 .RS 4
242 The
243 smbsh
244-command is a program that allows you to run a unix shell with with an overloaded VFS\&.
245+command is a program that allows you to run a unix shell with with an overloaded VFS. Note that, it is not installed by the current samba package\&.
246 .RE
247 .PP
248 \fBsmbtree\fR(1)
249Index: samba/docs/manpages/smb.conf.5
250===================================================================
251--- samba.orig/docs/manpages/smb.conf.5
252+++ samba/docs/manpages/smb.conf.5
253@@ -11021,8 +11021,7 @@
254 \fBnmbd\fR(8),
255 \fBsmbclient\fR(1),
256 \fBnmblookup\fR(1),
257-\fBtestparm\fR(1),
258-\fBtestprns\fR(1)\&.
259+\fBtestparm\fR(1)\&.
260 .SH "AUTHOR"
261 .PP
262 The original Samba software and related utilities were created by Andrew Tridgell\&. Samba is now developed by the Samba Team as an Open Source project similar to the way the Linux kernel is developed\&.
263Index: samba/docs/manpages/smbd.8
264===================================================================
265--- samba.orig/docs/manpages/smbd.8
266+++ samba/docs/manpages/smbd.8
267@@ -370,8 +370,7 @@
268 \fBnmbd\fR(8),
269 \fBsmb.conf\fR(5),
270 \fBsmbclient\fR(1),
271-\fBtestparm\fR(1),
272-\fBtestprns\fR(1), and the Internet RFC\*(Aqs
273+\fBtestparm\fR(1), and the Internet RFC\*(Aqs
274 rfc1001\&.txt,
275 rfc1002\&.txt\&. In addition the CIFS (formerly SMB) specification is available as a link from the Web page
276 http://samba\&.org/cifs/\&.
277Index: samba/examples/tridge/smb.conf
278===================================================================
279--- samba.orig/examples/tridge/smb.conf
280+++ samba/examples/tridge/smb.conf
281@@ -31,14 +31,6 @@
282 print ok = yes
283 print command = xmenu -heading "%s" OK&
284
285-[testprn]
286- comment = Test printer
287- path = /tmp
288- user = susan
289- print ok = yes
290- print command = cp %s /tmp/smb.%U.prn
291- lpq command = cat /tmp/xxyz
292-
293 [amd]
294 comment = amd area
295 path = /mount
296Index: samba/swat/lang/tr/help/welcome.html
297===================================================================
298--- samba.orig/swat/lang/tr/help/welcome.html
299+++ samba/swat/lang/tr/help/welcome.html
300@@ -40,7 +40,6 @@
301 <ul>
302 <li><a href="/swat/help/smbstatus.1.html" target="docs">smbstatus</a> - Samba gözlemcisi
303 <li><a href="/swat/help/testparm.1.html" target="docs">testparm</a> - ayar dosyasını kontrol eder
304- <li><a href="/swat/help/testprns.1.html" target="docs">testprns</a> - yazıcı ayarlarını kontrol eder
305 <li><a href="/swat/help/nmblookup.1.html" target="docs">nmblookup</a> - NetBIOS isim sorgulama aracı
306 </ul>
307 <li><b>Kitaplar</b>
308@@ -66,4 +65,4 @@
309
310 Eğer SWAT'ın bu sürümü ile ilgili konuları tartışmak istiyorsanız, lütfen
311 <A HREF="http://lists.samba.org/">samba</A> eposta listesine üye olun.
312-
313\ No newline at end of file
314+
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/dont-build-VFS-examples.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/dont-build-VFS-examples.patch
new file mode 100644
index 000000000..beff7db67
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/dont-build-VFS-examples.patch
@@ -0,0 +1,31 @@
1Description: Do not build VFS examples
2Author: Christian Perrier <bubulle@debian.org>
3Forwarded: not-needed
4
5Index: samba/source3/Makefile.in
6===================================================================
7--- samba.orig/source3/Makefile.in
8+++ samba/source3/Makefile.in
9@@ -1616,8 +1616,7 @@
10
11
12 everything:: all libtalloc libsmbclient libnetapi debug2html smbfilter talloctort replacetort smbconftort modules torture \
13- $(EVERYTHING_PROGS) \
14- vfs_examples
15+ $(EVERYTHING_PROGS)
16
17 .SUFFIXES:
18 .SUFFIXES: .c .o .lo
19@@ -3552,12 +3551,3 @@
20 bin/ndrdump4: $(BINARY_PREREQS)
21 $(MAKE) -f Makefile-smbtorture4 bin/ndrdump4
22
23-.PHONY: vfs_examples
24-
25-vfs_examples:
26- ( \
27- cd ../examples/VFS && \
28- ./configure && \
29- make clean && \
30- make \
31- )
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/fhs-filespaths.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/fhs-filespaths.patch
new file mode 100644
index 000000000..e7c6b9995
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/fhs-filespaths.patch
@@ -0,0 +1,65 @@
1Description: Prepare the sources to better respect FHS
2 This patch was historically very long but most parts have
3 been integrated upstream.
4 .
5 The last remaining bit is the location of "private files
6 We historically have them in /var/lib/samba while upstream
7 has them in /etc/samba
8 .
9 We need to provide a migraiton path and go back to the "normal"
10 file layout
11Author: Eloy A. Paris <peloy@debian.org>
12Bug-Debian: http://bugs.debian.org/49011
13Forwarded: not-needed
14
15
16Index: samba/source3/passdb/pdb_tdb.c
17===================================================================
18--- samba.orig/source3/passdb/pdb_tdb.c
19+++ samba/source3/passdb/pdb_tdb.c
20@@ -1260,7 +1260,7 @@
21 /* save the path for later */
22
23 if (!location) {
24- if (asprintf(&tdbfile, "%s/%s", lp_private_dir(),
25+ if (asprintf(&tdbfile, "%s/%s", lp_statedir(),
26 PASSDB_FILE_NAME) < 0) {
27 return NT_STATUS_NO_MEMORY;
28 }
29Index: samba/source3/passdb/secrets.c
30===================================================================
31--- samba.orig/source3/passdb/secrets.c
32+++ samba/source3/passdb/secrets.c
33@@ -64,7 +64,7 @@
34 return True;
35
36 fname = talloc_asprintf(talloc_tos(), "%s/secrets.tdb",
37- lp_private_dir());
38+ lp_statedir());
39 if (fname == NULL) {
40 return false;
41 }
42Index: samba/docs/manpages/smb.conf.5
43===================================================================
44--- samba.orig/docs/manpages/smb.conf.5
45+++ samba/docs/manpages/smb.conf.5
46@@ -7167,7 +7167,7 @@
47 .\}
48 tdbsam
49 \- The TDB based password storage backend\&. Takes a path to the TDB as an optional argument (defaults to passdb\&.tdb in the
50-\m[blue]\fBprivate dir\fR\m[]
51+\m[blue]\fBstate directory\fR\m[]
52 directory\&.
53 .RE
54 .sp
55@@ -8038,9 +8038,7 @@
56 .PP
57 .RS 4
58 This parameters defines the directory smbd will use for storing such files as
59-smbpasswd
60-and
61-secrets\&.tdb\&.
62+smbpasswd\&. secrets\&.tdb is stored in state directory on Debian systems\&.
63 .sp
64 Default:
65 \fI\fIprivate dir\fR\fR\fI = \fR\fI${prefix}/private\fR\fI \fR
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/installswat.sh.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/installswat.sh.patch
new file mode 100644
index 000000000..3f08e493a
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/installswat.sh.patch
@@ -0,0 +1,23 @@
1Description: Do not install the Using Samba book when installing SWAT
2 Using Samba is packaged in samba-doc, however upstream also
3 installs it in SWAT install dirs
4Author: Christian Perrier <bubulle@debian.org>
5Forwarded: not-needed
6
7Index: experimental/source3/script/installswat.sh
8===================================================================
9--- experimental.orig/source3/script/installswat.sh
10+++ experimental/source3/script/installswat.sh
11@@ -198,7 +198,11 @@
12
13 # Install/ remove Using Samba book (but only if it is there)
14
15-if [ "x$BOOKDIR" != "x" -a -f $SRCDIR../docs/htmldocs/using_samba/toc.html ]; then
16+# Under Debian we don't actually install the book. The book is part of
17+# the samba-doc package, so we just provide a symlink that points to
18+# where the book is actually installed. The symlink is created in
19+# debian/rules.
20+if /bin/false; then
21
22 # Create directories
23
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/libutil_drop_AI_ADDRCONFIG.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/libutil_drop_AI_ADDRCONFIG.patch
new file mode 100644
index 000000000..d3473ea40
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/libutil_drop_AI_ADDRCONFIG.patch
@@ -0,0 +1,54 @@
1From 185cd4c79492a7de5988f9407d764cdb3a0e2e10 Mon Sep 17 00:00:00 2001
2From: Simo Sorce <idra@samba.org>
3Date: Wed, 11 May 2011 17:50:07 -0400
4Subject: [PATCH] libutil: use AI_ADDRCONFIG only when AI_NUMERIC is not defined
5
6This flag prevents startup w/o ip addresses assigned to any interface.
7If AI_NUMERIC is passed it should be safe to avoid it.
8
9Signed-off-by: Andreas Schneider <asn@samba.org>
10---
11 lib/util/util_net.c | 16 +++++++++++-----
12 1 files changed, 11 insertions(+), 5 deletions(-)
13
14Index: samba/lib/util/util_net.c
15===================================================================
16--- samba.orig/lib/util/util_net.c
17+++ samba/lib/util/util_net.c
18@@ -64,10 +64,9 @@
19 ppres);
20
21 if (ret) {
22- DEBUG(3,("interpret_string_addr_internal: getaddrinfo failed "
23- "for name %s [%s]\n",
24- str,
25- gai_strerror(ret) ));
26+ DEBUG(3, ("interpret_string_addr_internal: "
27+ "getaddrinfo failed for name %s (flags %d) [%s]\n",
28+ str, flags, gai_strerror(ret)));
29 return false;
30 }
31 return true;
32@@ -88,6 +87,7 @@
33 #if defined(HAVE_IPV6)
34 char addr[INET6_ADDRSTRLEN];
35 unsigned int scope_id = 0;
36+ int int_flags;
37
38 if (strchr_m(str, ':')) {
39 char *p = strchr_m(str, '%');
40@@ -108,7 +108,13 @@
41
42 zero_sockaddr(pss);
43
44- if (!interpret_string_addr_internal(&res, str, flags|AI_ADDRCONFIG)) {
45+ if (flags & AI_NUMERICHOST) {
46+ int_flags = flags;
47+ } else {
48+ int_flags = flags|AI_ADDRCONFIG;
49+ }
50+
51+ if (!interpret_string_addr_internal(&res, str, int_flags)) {
52 return false;
53 }
54 if (!res) {
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/only_export_public_symbols.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/only_export_public_symbols.patch
new file mode 100644
index 000000000..f4fbd56a1
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/only_export_public_symbols.patch
@@ -0,0 +1,21 @@
1Description: only export public symbols
2 Force usage of the symbols list when linking shared libraries. Otherwise,
3 private symbols get exported in libsmbclient and libwbclient.
4Forwarded: no
5Author: Ivo De Decker <ivo.dedecker@ugent.be>
6Last-Update: 2012-06-27
7
8--- samba-3.6.6.orig/source3/Makefile.in
9+++ samba-3.6.6/source3/Makefile.in
10@@ -28,8 +28,9 @@ SHLD=@SHLD@
11 LIB_PATH_VAR=@LIB_PATH_VAR@
12
13 ## Dynamic shared libraries build settings
14-DSO_EXPORTS_CMD=-Wl,--version-script,$(srcdir)/exports/`basename $@ | sed 's:\.@SHLIBEXT@[\.0-9]*$$:.@SYMSEXT@:'`
15-DSO_EXPORTS=@DSO_EXPORTS@
16+# force using syms file
17+DSO_EXPORTS=-Wl,--version-script,$(srcdir)/exports/`basename $@ | sed 's:\.@SHLIBEXT@[\.0-9]*$$:.@SYMSEXT@:'`
18+#DSO_EXPORTS=@DSO_EXPORTS@
19 SHLD_DSO = $(SHLD) $(LDSHFLAGS) $(DSO_EXPORTS) -o $@
20
21 # The MODULE_EXPORTS variable contains the platform-specific linker flags
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/pam-examples.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/pam-examples.patch
new file mode 100644
index 000000000..9b36e14e3
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/pam-examples.patch
@@ -0,0 +1,17 @@
1Description: Fix examples directory location in pam_smbpass README
2Author: Christian Perrier <bubulle@debian.org>
3Forwarded: not-needed
4
5Index: experimental/source3/pam_smbpass/README
6===================================================================
7--- experimental.orig/source3/pam_smbpass/README
8+++ experimental/source3/pam_smbpass/README
9@@ -37,7 +37,7 @@
10 smbconf=<file> - specify an alternate path to the smb.conf
11 file.
12
13-See the samples/ directory for example PAM configurations using this
14+See the examples/ directory for example PAM configurations using this
15 module.
16
17 Thanks go to the following people:
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/shadow_copy2_backport.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/shadow_copy2_backport.patch
new file mode 100644
index 000000000..dbd10489f
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/shadow_copy2_backport.patch
@@ -0,0 +1,2101 @@
1Description: Backport new shadow_copy2 implementation from master
2 The shadow_copy2 vfs module in samba 3.6 doesn't work if wide links is
3 disabled. This problem is fixed by a rewrite in the master branch.
4 This patch is a backport of this new version to samba 3.6.
5 It is based on these commits in the upstream samba git:
6 dc461cade5becec21f8d1f2bb74fcf1a977a5ec2
7 617b63658b02957422359a76fd8b8e4748d228ee
8Author: Ivo De Decker <ivo.dedecker@ugent.be>
9Origin: upstream
10Bug: https://bugzilla.samba.org/show_bug.cgi?id=7287
11Forwarded: not-needed
12Last-Update: 2012-05-27
13
14--- samba-3.6.5.orig/source3/modules/vfs_shadow_copy2.c
15+++ samba-3.6.5/source3/modules/vfs_shadow_copy2.c
16@@ -1,32 +1,29 @@
17-/*
18- * implementation of an Shadow Copy module - version 2
19+/*
20+ * Third attempt at a shadow copy module
21 *
22- * Copyright (C) Andrew Tridgell 2007
23- * Copyright (C) Ed Plese 2009
24+ * Copyright (C) Andrew Tridgell 2007 (portions taken from shadow_copy2)
25+ * Copyright (C) Ed Plese 2009
26+ * Copyright (C) Volker Lendecke 2011
27+ * Copyright (C) Christian Ambach 2011
28 *
29 * This program is free software; you can redistribute it and/or modify
30 * it under the terms of the GNU General Public License as published by
31 * the Free Software Foundation; either version 2 of the License, or
32 * (at your option) any later version.
33- *
34+ *
35 * This program is distributed in the hope that it will be useful,
36 * but WITHOUT ANY WARRANTY; without even the implied warranty of
37 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
38 * GNU General Public License for more details.
39- *
40+ *
41 * You should have received a copy of the GNU General Public License
42 * along with this program; if not, write to the Free Software
43 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
44 */
45
46-#include "includes.h"
47-#include "smbd/smbd.h"
48-#include "system/filesys.h"
49-#include "ntioctl.h"
50-
51 /*
52
53- This is a 2nd implemetation of a shadow copy module for exposing
54+ This is a 3rd implemetation of a shadow copy module for exposing
55 snapshots to windows clients as shadow copies. This version has the
56 following features:
57
58@@ -96,243 +93,169 @@
59 The following command would generate a correctly formatted directory name
60 for use with the default parameters:
61 date -u +@GMT-%Y.%m.%d-%H.%M.%S
62-
63 */
64
65-static int vfs_shadow_copy2_debug_level = DBGC_VFS;
66-
67-#undef DBGC_CLASS
68-#define DBGC_CLASS vfs_shadow_copy2_debug_level
69+#include "includes.h"
70+#include "system/filesys.h"
71+#include "include/ntioctl.h"
72+#include "smbd/proto.h"
73+#include <tdb.h>
74+#include "util_tdb.h"
75
76 #define GMT_NAME_LEN 24 /* length of a @GMT- name */
77-#define SHADOW_COPY2_GMT_FORMAT "@GMT-%Y.%m.%d-%H.%M.%S"
78-
79-#define SHADOW_COPY2_DEFAULT_SORT NULL
80-#define SHADOW_COPY2_DEFAULT_FORMAT "@GMT-%Y.%m.%d-%H.%M.%S"
81-#define SHADOW_COPY2_DEFAULT_LOCALTIME false
82+#define GMT_FORMAT "@GMT-%Y.%m.%d-%H.%M.%S"
83
84-/*
85- make very sure it is one of our special names
86- */
87-static inline bool shadow_copy2_match_name(const char *name, const char **gmt_start)
88+static bool shadow_copy2_find_slashes(TALLOC_CTX *mem_ctx, const char *str,
89+ size_t **poffsets,
90+ unsigned *pnum_offsets)
91 {
92- unsigned year, month, day, hr, min, sec;
93+ unsigned num_offsets;
94+ size_t *offsets;
95 const char *p;
96- if (gmt_start) {
97- (*gmt_start) = NULL;
98- }
99- p = strstr_m(name, "@GMT-");
100- if (p == NULL) return false;
101- if (p > name && p[-1] != '/') return False;
102- if (sscanf(p, "@GMT-%04u.%02u.%02u-%02u.%02u.%02u", &year, &month,
103- &day, &hr, &min, &sec) != 6) {
104- return False;
105- }
106- if (p[24] != 0 && p[24] != '/') {
107- return False;
108- }
109- if (gmt_start) {
110- (*gmt_start) = p;
111- }
112- return True;
113-}
114
115-static char *shadow_copy2_snapshot_to_gmt(TALLOC_CTX *mem_ctx,
116- vfs_handle_struct *handle, const char *name)
117-{
118- struct tm timestamp;
119- time_t timestamp_t;
120- char gmt[GMT_NAME_LEN + 1];
121- const char *fmt;
122+ num_offsets = 0;
123
124- fmt = lp_parm_const_string(SNUM(handle->conn), "shadow",
125- "format", SHADOW_COPY2_DEFAULT_FORMAT);
126+ p = str;
127+ while ((p = strchr(p, '/')) != NULL) {
128+ num_offsets += 1;
129+ p += 1;
130+ }
131
132- ZERO_STRUCT(timestamp);
133- if (strptime(name, fmt, &timestamp) == NULL) {
134- DEBUG(10, ("shadow_copy2_snapshot_to_gmt: no match %s: %s\n",
135- fmt, name));
136- return NULL;
137+ offsets = talloc_array(mem_ctx, size_t, num_offsets);
138+ if (offsets == NULL) {
139+ return false;
140 }
141
142- DEBUG(10, ("shadow_copy2_snapshot_to_gmt: match %s: %s\n", fmt, name));
143- if (lp_parm_bool(SNUM(handle->conn), "shadow", "localtime",
144- SHADOW_COPY2_DEFAULT_LOCALTIME))
145- {
146- timestamp.tm_isdst = -1;
147- timestamp_t = mktime(&timestamp);
148- gmtime_r(&timestamp_t, &timestamp);
149+ p = str;
150+ num_offsets = 0;
151+ while ((p = strchr(p, '/')) != NULL) {
152+ offsets[num_offsets] = p-str;
153+ num_offsets += 1;
154+ p += 1;
155 }
156- strftime(gmt, sizeof(gmt), SHADOW_COPY2_GMT_FORMAT, &timestamp);
157
158- return talloc_strdup(mem_ctx, gmt);
159+ *poffsets = offsets;
160+ *pnum_offsets = num_offsets;
161+ return true;
162 }
163
164-/*
165- shadow copy paths can also come into the server in this form:
166-
167- /foo/bar/@GMT-XXXXX/some/file
168-
169- This function normalises the filename to be of the form:
170-
171- @GMT-XXXX/foo/bar/some/file
172- */
173-static const char *shadow_copy2_normalise_path(TALLOC_CTX *mem_ctx, const char *path, const char *gmt_start)
174+static char *shadow_copy2_insert_string(TALLOC_CTX *mem_ctx,
175+ struct vfs_handle_struct *handle,
176+ time_t snapshot)
177 {
178- char *pcopy;
179- char buf[GMT_NAME_LEN];
180- size_t prefix_len;
181+ struct tm snap_tm;
182+ fstring gmt;
183+ size_t gmt_len;
184
185- if (path == gmt_start) {
186- return path;
187+ if (localtime_r(&snapshot, &snap_tm) == 0) {
188+ DEBUG(10, ("gmtime_r failed\n"));
189+ return NULL;
190 }
191-
192- prefix_len = gmt_start - path - 1;
193-
194- DEBUG(10, ("path=%s, gmt_start=%s, prefix_len=%d\n", path, gmt_start,
195- (int)prefix_len));
196-
197- /*
198- * We've got a/b/c/@GMT-YYYY.MM.DD-HH.MM.SS/d/e. convert to
199- * @GMT-YYYY.MM.DD-HH.MM.SS/a/b/c/d/e before further
200- * processing. As many VFS calls provide a const char *,
201- * unfortunately we have to make a copy.
202- */
203-
204- pcopy = talloc_strdup(talloc_tos(), path);
205- if (pcopy == NULL) {
206+ gmt_len = strftime(gmt, sizeof(gmt),
207+ lp_parm_const_string(SNUM(handle->conn), "shadow",
208+ "format", GMT_FORMAT),
209+ &snap_tm);
210+ if (gmt_len == 0) {
211+ DEBUG(10, ("strftime failed\n"));
212 return NULL;
213 }
214-
215- gmt_start = pcopy + prefix_len;
216-
217- /*
218- * Copy away "@GMT-YYYY.MM.DD-HH.MM.SS"
219- */
220- memcpy(buf, gmt_start+1, GMT_NAME_LEN);
221-
222- /*
223- * Make space for it including a trailing /
224- */
225- memmove(pcopy + GMT_NAME_LEN + 1, pcopy, prefix_len);
226-
227- /*
228- * Move in "@GMT-YYYY.MM.DD-HH.MM.SS/" at the beginning again
229- */
230- memcpy(pcopy, buf, GMT_NAME_LEN);
231- pcopy[GMT_NAME_LEN] = '/';
232-
233- DEBUG(10, ("shadow_copy2_normalise_path: %s -> %s\n", path, pcopy));
234-
235- return pcopy;
236+ return talloc_asprintf(talloc_tos(), "/%s/%s",
237+ lp_parm_const_string(
238+ SNUM(handle->conn), "shadow", "snapdir",
239+ ".snapshots"),
240+ gmt);
241 }
242
243-/*
244- convert a name to the shadow directory
245- */
246+static bool shadow_copy2_strip_snapshot(TALLOC_CTX *mem_ctx,
247+ struct vfs_handle_struct *handle,
248+ const char *name,
249+ time_t *ptimestamp,
250+ char **pstripped)
251+{
252+ struct tm tm;
253+ time_t timestamp;
254+ const char *p;
255+ char *q;
256+ char *stripped;
257+ size_t rest_len, dst_len;
258
259-#define _SHADOW2_NEXT(op, args, rtype, eret, extra) do { \
260- const char *name = fname; \
261- const char *gmt_start; \
262- if (shadow_copy2_match_name(fname, &gmt_start)) { \
263- char *name2; \
264- rtype ret; \
265- name2 = convert_shadow2_name(handle, fname, gmt_start); \
266- if (name2 == NULL) { \
267- errno = EINVAL; \
268- return eret; \
269- } \
270- name = name2; \
271- ret = SMB_VFS_NEXT_ ## op args; \
272- talloc_free(name2); \
273- if (ret != eret) extra; \
274- return ret; \
275- } else { \
276- return SMB_VFS_NEXT_ ## op args; \
277- } \
278-} while (0)
279-
280-#define _SHADOW2_NEXT_SMB_FNAME(op, args, rtype, eret, extra) do { \
281- const char *gmt_start; \
282- if (shadow_copy2_match_name(smb_fname->base_name, &gmt_start)) { \
283- char *name2; \
284- char *smb_base_name_tmp = NULL; \
285- rtype ret; \
286- name2 = convert_shadow2_name(handle, smb_fname->base_name, gmt_start); \
287- if (name2 == NULL) { \
288- errno = EINVAL; \
289- return eret; \
290- } \
291- smb_base_name_tmp = smb_fname->base_name; \
292- smb_fname->base_name = name2; \
293- ret = SMB_VFS_NEXT_ ## op args; \
294- smb_fname->base_name = smb_base_name_tmp; \
295- talloc_free(name2); \
296- if (ret != eret) extra; \
297- return ret; \
298- } else { \
299- return SMB_VFS_NEXT_ ## op args; \
300- } \
301-} while (0)
302+ p = strstr_m(name, "@GMT-");
303+ if (p == NULL) {
304+ goto no_snapshot;
305+ }
306+ if ((p > name) && (p[-1] != '/')) {
307+ goto no_snapshot;
308+ }
309+ q = strptime(p, GMT_FORMAT, &tm);
310+ if (q == NULL) {
311+ goto no_snapshot;
312+ }
313+ tm.tm_isdst = -1;
314+ timestamp = mktime(&tm);
315+ if (timestamp == (time_t)-1) {
316+ goto no_snapshot;
317+ }
318+ if ((p == name) && (q[0] == '\0')) {
319+ if (pstripped != NULL) {
320+ stripped = talloc_strdup(mem_ctx, "");
321+ if (stripped == NULL) {
322+ return false;
323+ }
324+ *pstripped = stripped;
325+ }
326+ *ptimestamp = timestamp;
327+ return true;
328+ }
329+ if (q[0] != '/') {
330+ goto no_snapshot;
331+ }
332+ q += 1;
333
334-/*
335- convert a name to the shadow directory: NTSTATUS-specific handling
336- */
337+ rest_len = strlen(q);
338+ dst_len = (p-name) + rest_len;
339+
340+ if (lp_parm_bool(SNUM(handle->conn), "shadow", "snapdirseverywhere",
341+ false)) {
342+ char *insert;
343+ bool have_insert;
344+ insert = shadow_copy2_insert_string(talloc_tos(), handle,
345+ timestamp);
346+ if (insert == NULL) {
347+ errno = ENOMEM;
348+ return false;
349+ }
350
351-#define _SHADOW2_NTSTATUS_NEXT(op, args, eret, extra) do { \
352- const char *name = fname; \
353- const char *gmt_start; \
354- if (shadow_copy2_match_name(fname, &gmt_start)) { \
355- char *name2; \
356- NTSTATUS ret; \
357- name2 = convert_shadow2_name(handle, fname, gmt_start); \
358- if (name2 == NULL) { \
359- errno = EINVAL; \
360- return eret; \
361- } \
362- name = name2; \
363- ret = SMB_VFS_NEXT_ ## op args; \
364- talloc_free(name2); \
365- if (!NT_STATUS_EQUAL(ret, eret)) extra; \
366- return ret; \
367- } else { \
368- return SMB_VFS_NEXT_ ## op args; \
369- } \
370-} while (0)
371-
372-#define SHADOW2_NTSTATUS_NEXT(op, args, eret) _SHADOW2_NTSTATUS_NEXT(op, args, eret, )
373-
374-#define SHADOW2_NEXT(op, args, rtype, eret) _SHADOW2_NEXT(op, args, rtype, eret, )
375-
376-#define SHADOW2_NEXT_SMB_FNAME(op, args, rtype, eret) _SHADOW2_NEXT_SMB_FNAME(op, args, rtype, eret, )
377-
378-#define SHADOW2_NEXT2(op, args) do { \
379- const char *gmt_start1, *gmt_start2; \
380- if (shadow_copy2_match_name(oldname, &gmt_start1) || \
381- shadow_copy2_match_name(newname, &gmt_start2)) { \
382- errno = EROFS; \
383- return -1; \
384- } else { \
385- return SMB_VFS_NEXT_ ## op args; \
386- } \
387-} while (0)
388-
389-#define SHADOW2_NEXT2_SMB_FNAME(op, args) do { \
390- const char *gmt_start1, *gmt_start2; \
391- if (shadow_copy2_match_name(smb_fname_src->base_name, &gmt_start1) || \
392- shadow_copy2_match_name(smb_fname_dst->base_name, &gmt_start2)) { \
393- errno = EROFS; \
394- return -1; \
395- } else { \
396- return SMB_VFS_NEXT_ ## op args; \
397- } \
398-} while (0)
399+ have_insert = (strstr(name, insert+1) != NULL);
400+ TALLOC_FREE(insert);
401+ if (have_insert) {
402+ goto no_snapshot;
403+ }
404+ }
405
406+ if (pstripped != NULL) {
407+ stripped = talloc_array(mem_ctx, char, dst_len+1);
408+ if (stripped == NULL) {
409+ errno = ENOMEM;
410+ return false;
411+ }
412+ if (p > name) {
413+ memcpy(stripped, name, p-name);
414+ }
415+ if (rest_len > 0) {
416+ memcpy(stripped + (p-name), q, rest_len);
417+ }
418+ stripped[dst_len] = '\0';
419+ *pstripped = stripped;
420+ }
421+ *ptimestamp = timestamp;
422+ return true;
423+no_snapshot:
424+ *ptimestamp = 0;
425+ return true;
426+}
427
428-/*
429- find the mount point of a filesystem
430- */
431-static char *find_mount_point(TALLOC_CTX *mem_ctx, vfs_handle_struct *handle)
432+static char *shadow_copy2_find_mount_point(TALLOC_CTX *mem_ctx,
433+ vfs_handle_struct *handle)
434 {
435 char *path = talloc_strdup(mem_ctx, handle->conn->connectpath);
436 dev_t dev;
437@@ -358,164 +281,152 @@ static char *find_mount_point(TALLOC_CTX
438 }
439 }
440
441- return path;
442+ return path;
443 }
444
445-/*
446- work out the location of the snapshot for this share
447- */
448-static const char *shadow_copy2_find_snapdir(TALLOC_CTX *mem_ctx, vfs_handle_struct *handle)
449-{
450- const char *snapdir;
451- char *mount_point;
452- const char *ret;
453-
454- snapdir = lp_parm_const_string(SNUM(handle->conn), "shadow", "snapdir", NULL);
455- if (snapdir == NULL) {
456- return NULL;
457- }
458- /* if its an absolute path, we're done */
459- if (*snapdir == '/') {
460- return snapdir;
461+static char *shadow_copy2_convert(TALLOC_CTX *mem_ctx,
462+ struct vfs_handle_struct *handle,
463+ const char *name, time_t timestamp)
464+{
465+ struct smb_filename converted_fname;
466+ char *result = NULL;
467+ size_t *slashes = NULL;
468+ unsigned num_slashes;
469+ char *path = NULL;
470+ size_t pathlen;
471+ char *insert = NULL;
472+ char *converted = NULL;
473+ size_t insertlen;
474+ int i, saved_errno;
475+ size_t min_offset;
476+
477+ path = talloc_asprintf(mem_ctx, "%s/%s", handle->conn->connectpath,
478+ name);
479+ if (path == NULL) {
480+ errno = ENOMEM;
481+ goto fail;
482+ }
483+ pathlen = talloc_get_size(path)-1;
484+
485+ DEBUG(10, ("converting %s\n", path));
486+
487+ if (!shadow_copy2_find_slashes(talloc_tos(), path,
488+ &slashes, &num_slashes)) {
489+ goto fail;
490+ }
491+ insert = shadow_copy2_insert_string(talloc_tos(), handle, timestamp);
492+ if (insert == NULL) {
493+ goto fail;
494+ }
495+ insertlen = talloc_get_size(insert)-1;
496+ converted = talloc_array(mem_ctx, char, pathlen + insertlen + 1);
497+ if (converted == NULL) {
498+ goto fail;
499+ }
500+
501+ if (path[pathlen-1] != '/') {
502+ /*
503+ * Append a fake slash to find the snapshot root
504+ */
505+ size_t *tmp;
506+ tmp = talloc_realloc(talloc_tos(), slashes,
507+ size_t, num_slashes+1);
508+ if (tmp == NULL) {
509+ goto fail;
510+ }
511+ slashes = tmp;
512+ slashes[num_slashes] = pathlen;
513+ num_slashes += 1;
514 }
515
516- /* other its relative to the filesystem mount point */
517- mount_point = find_mount_point(mem_ctx, handle);
518- if (mount_point == NULL) {
519- return NULL;
520- }
521+ min_offset = 0;
522
523- ret = talloc_asprintf(mem_ctx, "%s/%s", mount_point, snapdir);
524- talloc_free(mount_point);
525- return ret;
526-}
527-
528-/*
529- work out the location of the base directory for snapshots of this share
530- */
531-static const char *shadow_copy2_find_basedir(TALLOC_CTX *mem_ctx, vfs_handle_struct *handle)
532-{
533- const char *basedir = lp_parm_const_string(SNUM(handle->conn), "shadow", "basedir", NULL);
534+ if (!lp_parm_bool(SNUM(handle->conn), "shadow", "crossmountpoints",
535+ false)) {
536+ char *mount_point;
537
538- /* other its the filesystem mount point */
539- if (basedir == NULL) {
540- basedir = find_mount_point(mem_ctx, handle);
541+ mount_point = shadow_copy2_find_mount_point(talloc_tos(),
542+ handle);
543+ if (mount_point == NULL) {
544+ goto fail;
545+ }
546+ min_offset = strlen(mount_point);
547+ TALLOC_FREE(mount_point);
548 }
549
550- return basedir;
551-}
552+ memcpy(converted, path, pathlen+1);
553+ converted[pathlen+insertlen] = '\0';
554
555-/*
556- convert a filename from a share relative path, to a path in the
557- snapshot directory
558- */
559-static char *convert_shadow2_name(vfs_handle_struct *handle, const char *fname, const char *gmt_path)
560-{
561- TALLOC_CTX *tmp_ctx = talloc_new(handle->data);
562- const char *snapdir, *relpath, *baseoffset, *basedir;
563- size_t baselen;
564- char *ret, *prefix;
565+ ZERO_STRUCT(converted_fname);
566+ converted_fname.base_name = converted;
567
568- struct tm timestamp;
569- time_t timestamp_t;
570- char snapshot[MAXPATHLEN];
571- const char *fmt;
572+ for (i = num_slashes-1; i>=0; i--) {
573+ int ret;
574+ size_t offset;
575
576- fmt = lp_parm_const_string(SNUM(handle->conn), "shadow",
577- "format", SHADOW_COPY2_DEFAULT_FORMAT);
578+ offset = slashes[i];
579
580- snapdir = shadow_copy2_find_snapdir(tmp_ctx, handle);
581- if (snapdir == NULL) {
582- DEBUG(2,("no snapdir found for share at %s\n", handle->conn->connectpath));
583- talloc_free(tmp_ctx);
584- return NULL;
585- }
586-
587- basedir = shadow_copy2_find_basedir(tmp_ctx, handle);
588- if (basedir == NULL) {
589- DEBUG(2,("no basedir found for share at %s\n", handle->conn->connectpath));
590- talloc_free(tmp_ctx);
591- return NULL;
592- }
593-
594- prefix = talloc_asprintf(tmp_ctx, "%s/@GMT-", snapdir);
595- if (strncmp(fname, prefix, (talloc_get_size(prefix)-1)) == 0) {
596- /* this looks like as we have already normalized it, leave it untouched*/
597- talloc_free(tmp_ctx);
598- return talloc_strdup(handle->data, fname);
599- }
600-
601- if (strncmp(fname, "@GMT-", 5) != 0) {
602- fname = shadow_copy2_normalise_path(tmp_ctx, fname, gmt_path);
603- if (fname == NULL) {
604- talloc_free(tmp_ctx);
605- return NULL;
606+ if (offset < min_offset) {
607+ errno = ENOENT;
608+ goto fail;
609 }
610- }
611
612- ZERO_STRUCT(timestamp);
613- relpath = strptime(fname, SHADOW_COPY2_GMT_FORMAT, &timestamp);
614- if (relpath == NULL) {
615- talloc_free(tmp_ctx);
616- return NULL;
617- }
618+ memcpy(converted+offset, insert, insertlen);
619
620- /* relpath is the remaining portion of the path after the @GMT-xxx */
621-
622- if (lp_parm_bool(SNUM(handle->conn), "shadow", "localtime",
623- SHADOW_COPY2_DEFAULT_LOCALTIME))
624- {
625- timestamp_t = timegm(&timestamp);
626- localtime_r(&timestamp_t, &timestamp);
627+ offset += insertlen;
628+ memcpy(converted+offset, path + slashes[i],
629+ pathlen - slashes[i]);
630+
631+ ret = SMB_VFS_NEXT_LSTAT(handle, &converted_fname);
632+
633+ DEBUG(10, ("Trying %s: %d (%s)\n", converted,
634+ ret, ret == 0 ? "ok" : strerror(errno)));
635+ if (ret == 0) {
636+ /* success */
637+ break;
638+ }
639+ if (errno == ENOTDIR) {
640+ /*
641+ * This is a valid condition: We appended the
642+ * .snaphots/@GMT.. to a file name. Just try
643+ * with the upper levels.
644+ */
645+ continue;
646+ }
647+ if (errno != ENOENT) {
648+ /* Other problem than "not found" */
649+ goto fail;
650+ }
651 }
652
653- strftime(snapshot, MAXPATHLEN, fmt, &timestamp);
654-
655- baselen = strlen(basedir);
656- baseoffset = handle->conn->connectpath + baselen;
657-
658- /* some sanity checks */
659- if (strncmp(basedir, handle->conn->connectpath, baselen) != 0 ||
660- (handle->conn->connectpath[baselen] != 0 && handle->conn->connectpath[baselen] != '/')) {
661- DEBUG(0,("convert_shadow2_name: basedir %s is not a parent of %s\n",
662- basedir, handle->conn->connectpath));
663- talloc_free(tmp_ctx);
664- return NULL;
665+ if (i >= 0) {
666+ /*
667+ * Found something
668+ */
669+ DEBUG(10, ("Found %s\n", converted));
670+ result = converted;
671+ converted = NULL;
672+ } else {
673+ errno = ENOENT;
674 }
675-
676- if (*relpath == '/') relpath++;
677- if (*baseoffset == '/') baseoffset++;
678-
679- ret = talloc_asprintf(handle->data, "%s/%s/%s/%s",
680- snapdir,
681- snapshot,
682- baseoffset,
683- relpath);
684- DEBUG(6,("convert_shadow2_name: '%s' -> '%s'\n", fname, ret));
685- talloc_free(tmp_ctx);
686- return ret;
687-}
688-
689-
690-/*
691- simple string hash
692- */
693-static uint32 string_hash(const char *s)
694-{
695- uint32 n = 0;
696- while (*s) {
697- n = ((n << 5) + n) ^ (uint32)(*s++);
698- }
699- return n;
700+fail:
701+ saved_errno = errno;
702+ TALLOC_FREE(converted);
703+ TALLOC_FREE(insert);
704+ TALLOC_FREE(slashes);
705+ TALLOC_FREE(path);
706+ errno = saved_errno;
707+ return result;
708 }
709
710 /*
711 modify a sbuf return to ensure that inodes in the shadow directory
712 are different from those in the main directory
713 */
714-static void convert_sbuf(vfs_handle_struct *handle, const char *fname, SMB_STRUCT_STAT *sbuf)
715+static void convert_sbuf(vfs_handle_struct *handle, const char *fname,
716+ SMB_STRUCT_STAT *sbuf)
717 {
718- if (lp_parm_bool(SNUM(handle->conn), "shadow", "fixinodes", False)) {
719+ if (lp_parm_bool(SNUM(handle->conn), "shadow", "fixinodes", False)) {
720 /* some snapshot systems, like GPFS, return the name
721 device:inode for the snapshot files as the current
722 files. That breaks the 'restore' button in the shadow copy
723@@ -526,7 +437,10 @@ static void convert_sbuf(vfs_handle_stru
724 number collision, but I can't see a better approach
725 without significant VFS changes
726 */
727- uint32_t shash = string_hash(fname) & 0xFF000000;
728+ uint32_t shash;
729+ TDB_DATA data = string_tdb_data(fname);
730+
731+ shash = tdb_jenkins_hash(&data) & 0xFF000000;
732 if (shash == 0) {
733 shash = 1;
734 }
735@@ -534,303 +448,594 @@ static void convert_sbuf(vfs_handle_stru
736 }
737 }
738
739+static SMB_STRUCT_DIR *shadow_copy2_opendir(vfs_handle_struct *handle,
740+ const char *fname,
741+ const char *mask,
742+ uint32 attr)
743+{
744+ time_t timestamp;
745+ char *stripped;
746+ SMB_STRUCT_DIR *ret;
747+ int saved_errno;
748+ char *conv;
749+
750+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
751+ &timestamp, &stripped)) {
752+ return NULL;
753+ }
754+ if (timestamp == 0) {
755+ return SMB_VFS_NEXT_OPENDIR(handle, fname, mask, attr);
756+ }
757+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
758+ TALLOC_FREE(stripped);
759+ if (conv == NULL) {
760+ return NULL;
761+ }
762+ ret = SMB_VFS_NEXT_OPENDIR(handle, conv, mask, attr);
763+ saved_errno = errno;
764+ TALLOC_FREE(conv);
765+ errno = saved_errno;
766+ return ret;
767+}
768+
769 static int shadow_copy2_rename(vfs_handle_struct *handle,
770 const struct smb_filename *smb_fname_src,
771 const struct smb_filename *smb_fname_dst)
772 {
773- if (shadow_copy2_match_name(smb_fname_src->base_name, NULL)) {
774+ time_t timestamp_src, timestamp_dst;
775+
776+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
777+ smb_fname_src->base_name,
778+ &timestamp_src, NULL)) {
779+ return -1;
780+ }
781+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
782+ smb_fname_dst->base_name,
783+ &timestamp_dst, NULL)) {
784+ return -1;
785+ }
786+ if (timestamp_src != 0) {
787 errno = EXDEV;
788 return -1;
789 }
790- SHADOW2_NEXT2_SMB_FNAME(RENAME,
791- (handle, smb_fname_src, smb_fname_dst));
792+ if (timestamp_dst != 0) {
793+ errno = EROFS;
794+ return -1;
795+ }
796+ return SMB_VFS_NEXT_RENAME(handle, smb_fname_src, smb_fname_dst);
797 }
798
799 static int shadow_copy2_symlink(vfs_handle_struct *handle,
800 const char *oldname, const char *newname)
801 {
802- SHADOW2_NEXT2(SYMLINK, (handle, oldname, newname));
803-}
804+ time_t timestamp_old, timestamp_new;
805
806-static int shadow_copy2_link(vfs_handle_struct *handle,
807- const char *oldname, const char *newname)
808-{
809- SHADOW2_NEXT2(LINK, (handle, oldname, newname));
810+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, oldname,
811+ &timestamp_old, NULL)) {
812+ return -1;
813+ }
814+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, newname,
815+ &timestamp_new, NULL)) {
816+ return -1;
817+ }
818+ if ((timestamp_old != 0) || (timestamp_new != 0)) {
819+ errno = EROFS;
820+ return -1;
821+ }
822+ return SMB_VFS_NEXT_SYMLINK(handle, oldname, newname);
823 }
824
825-static int shadow_copy2_open(vfs_handle_struct *handle,
826- struct smb_filename *smb_fname, files_struct *fsp,
827- int flags, mode_t mode)
828+static int shadow_copy2_link(vfs_handle_struct *handle,
829+ const char *oldname, const char *newname)
830 {
831- SHADOW2_NEXT_SMB_FNAME(OPEN,
832- (handle, smb_fname, fsp, flags, mode),
833- int, -1);
834-}
835+ time_t timestamp_old, timestamp_new;
836
837-static SMB_STRUCT_DIR *shadow_copy2_opendir(vfs_handle_struct *handle,
838- const char *fname, const char *mask, uint32 attr)
839-{
840- SHADOW2_NEXT(OPENDIR, (handle, name, mask, attr), SMB_STRUCT_DIR *, NULL);
841+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, oldname,
842+ &timestamp_old, NULL)) {
843+ return -1;
844+ }
845+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, newname,
846+ &timestamp_new, NULL)) {
847+ return -1;
848+ }
849+ if ((timestamp_old != 0) || (timestamp_new != 0)) {
850+ errno = EROFS;
851+ return -1;
852+ }
853+ return SMB_VFS_NEXT_LINK(handle, oldname, newname);
854 }
855
856 static int shadow_copy2_stat(vfs_handle_struct *handle,
857 struct smb_filename *smb_fname)
858 {
859- _SHADOW2_NEXT_SMB_FNAME(STAT, (handle, smb_fname), int, -1,
860- convert_sbuf(handle, smb_fname->base_name,
861- &smb_fname->st));
862+ time_t timestamp;
863+ char *stripped, *tmp;
864+ int ret, saved_errno;
865+
866+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
867+ smb_fname->base_name,
868+ &timestamp, &stripped)) {
869+ return -1;
870+ }
871+ if (timestamp == 0) {
872+ return SMB_VFS_NEXT_STAT(handle, smb_fname);
873+ }
874+
875+ tmp = smb_fname->base_name;
876+ smb_fname->base_name = shadow_copy2_convert(
877+ talloc_tos(), handle, stripped, timestamp);
878+ TALLOC_FREE(stripped);
879+
880+ if (smb_fname->base_name == NULL) {
881+ smb_fname->base_name = tmp;
882+ return -1;
883+ }
884+
885+ ret = SMB_VFS_NEXT_STAT(handle, smb_fname);
886+ saved_errno = errno;
887+
888+ TALLOC_FREE(smb_fname->base_name);
889+ smb_fname->base_name = tmp;
890+
891+ if (ret == 0) {
892+ convert_sbuf(handle, smb_fname->base_name, &smb_fname->st);
893+ }
894+ errno = saved_errno;
895+ return ret;
896 }
897
898 static int shadow_copy2_lstat(vfs_handle_struct *handle,
899 struct smb_filename *smb_fname)
900 {
901- _SHADOW2_NEXT_SMB_FNAME(LSTAT, (handle, smb_fname), int, -1,
902- convert_sbuf(handle, smb_fname->base_name,
903- &smb_fname->st));
904+ time_t timestamp;
905+ char *stripped, *tmp;
906+ int ret, saved_errno;
907+
908+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
909+ smb_fname->base_name,
910+ &timestamp, &stripped)) {
911+ return -1;
912+ }
913+ if (timestamp == 0) {
914+ return SMB_VFS_NEXT_LSTAT(handle, smb_fname);
915+ }
916+
917+ tmp = smb_fname->base_name;
918+ smb_fname->base_name = shadow_copy2_convert(
919+ talloc_tos(), handle, stripped, timestamp);
920+ TALLOC_FREE(stripped);
921+
922+ if (smb_fname->base_name == NULL) {
923+ smb_fname->base_name = tmp;
924+ return -1;
925+ }
926+
927+ ret = SMB_VFS_NEXT_LSTAT(handle, smb_fname);
928+ saved_errno = errno;
929+
930+ TALLOC_FREE(smb_fname->base_name);
931+ smb_fname->base_name = tmp;
932+
933+ if (ret == 0) {
934+ convert_sbuf(handle, smb_fname->base_name, &smb_fname->st);
935+ }
936+ errno = saved_errno;
937+ return ret;
938 }
939
940-static int shadow_copy2_fstat(vfs_handle_struct *handle, files_struct *fsp, SMB_STRUCT_STAT *sbuf)
941+static int shadow_copy2_fstat(vfs_handle_struct *handle, files_struct *fsp,
942+ SMB_STRUCT_STAT *sbuf)
943 {
944- int ret = SMB_VFS_NEXT_FSTAT(handle, fsp, sbuf);
945- if (ret == 0 && shadow_copy2_match_name(fsp->fsp_name->base_name, NULL)) {
946+ time_t timestamp;
947+ int ret;
948+
949+ ret = SMB_VFS_NEXT_FSTAT(handle, fsp, sbuf);
950+ if (ret == -1) {
951+ return ret;
952+ }
953+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
954+ fsp->fsp_name->base_name,
955+ &timestamp, NULL)) {
956+ return 0;
957+ }
958+ if (timestamp != 0) {
959 convert_sbuf(handle, fsp->fsp_name->base_name, sbuf);
960 }
961+ return 0;
962+}
963+
964+static int shadow_copy2_open(vfs_handle_struct *handle,
965+ struct smb_filename *smb_fname, files_struct *fsp,
966+ int flags, mode_t mode)
967+{
968+ time_t timestamp;
969+ char *stripped, *tmp;
970+ int ret, saved_errno;
971+
972+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
973+ smb_fname->base_name,
974+ &timestamp, &stripped)) {
975+ return -1;
976+ }
977+ if (timestamp == 0) {
978+ return SMB_VFS_NEXT_OPEN(handle, smb_fname, fsp, flags, mode);
979+ }
980+
981+ tmp = smb_fname->base_name;
982+ smb_fname->base_name = shadow_copy2_convert(
983+ talloc_tos(), handle, stripped, timestamp);
984+ TALLOC_FREE(stripped);
985+
986+ if (smb_fname->base_name == NULL) {
987+ smb_fname->base_name = tmp;
988+ return -1;
989+ }
990+
991+ ret = SMB_VFS_NEXT_OPEN(handle, smb_fname, fsp, flags, mode);
992+ saved_errno = errno;
993+
994+ TALLOC_FREE(smb_fname->base_name);
995+ smb_fname->base_name = tmp;
996+
997+ errno = saved_errno;
998 return ret;
999 }
1000
1001 static int shadow_copy2_unlink(vfs_handle_struct *handle,
1002- const struct smb_filename *smb_fname_in)
1003+ const struct smb_filename *smb_fname)
1004 {
1005- struct smb_filename *smb_fname = NULL;
1006+ time_t timestamp;
1007+ char *stripped;
1008+ int ret, saved_errno;
1009+ struct smb_filename *conv;
1010 NTSTATUS status;
1011
1012- status = copy_smb_filename(talloc_tos(), smb_fname_in, &smb_fname);
1013+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
1014+ smb_fname->base_name,
1015+ &timestamp, &stripped)) {
1016+ return -1;
1017+ }
1018+ if (timestamp == 0) {
1019+ return SMB_VFS_NEXT_UNLINK(handle, smb_fname);
1020+ }
1021+ status = copy_smb_filename(talloc_tos(), smb_fname, &conv);
1022 if (!NT_STATUS_IS_OK(status)) {
1023- errno = map_errno_from_nt_status(status);
1024+ errno = ENOMEM;
1025 return -1;
1026 }
1027-
1028- SHADOW2_NEXT_SMB_FNAME(UNLINK, (handle, smb_fname), int, -1);
1029+ conv->base_name = shadow_copy2_convert(
1030+ conv, handle, stripped, timestamp);
1031+ TALLOC_FREE(stripped);
1032+ if (conv->base_name == NULL) {
1033+ return -1;
1034+ }
1035+ ret = SMB_VFS_NEXT_UNLINK(handle, conv);
1036+ saved_errno = errno;
1037+ TALLOC_FREE(conv);
1038+ errno = saved_errno;
1039+ return ret;
1040 }
1041
1042-static int shadow_copy2_chmod(vfs_handle_struct *handle,
1043- const char *fname, mode_t mode)
1044+static int shadow_copy2_chmod(vfs_handle_struct *handle, const char *fname,
1045+ mode_t mode)
1046 {
1047- SHADOW2_NEXT(CHMOD, (handle, name, mode), int, -1);
1048+ time_t timestamp;
1049+ char *stripped;
1050+ int ret, saved_errno;
1051+ char *conv;
1052+
1053+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1054+ &timestamp, &stripped)) {
1055+ return -1;
1056+ }
1057+ if (timestamp == 0) {
1058+ return SMB_VFS_NEXT_CHMOD(handle, fname, mode);
1059+ }
1060+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1061+ TALLOC_FREE(stripped);
1062+ if (conv == NULL) {
1063+ return -1;
1064+ }
1065+ ret = SMB_VFS_NEXT_CHMOD(handle, conv, mode);
1066+ saved_errno = errno;
1067+ TALLOC_FREE(conv);
1068+ errno = saved_errno;
1069+ return ret;
1070 }
1071
1072-static int shadow_copy2_chown(vfs_handle_struct *handle,
1073- const char *fname, uid_t uid, gid_t gid)
1074+static int shadow_copy2_chown(vfs_handle_struct *handle, const char *fname,
1075+ uid_t uid, gid_t gid)
1076 {
1077- SHADOW2_NEXT(CHOWN, (handle, name, uid, gid), int, -1);
1078+ time_t timestamp;
1079+ char *stripped;
1080+ int ret, saved_errno;
1081+ char *conv;
1082+
1083+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1084+ &timestamp, &stripped)) {
1085+ return -1;
1086+ }
1087+ if (timestamp == 0) {
1088+ return SMB_VFS_NEXT_CHOWN(handle, fname, uid, gid);
1089+ }
1090+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1091+ TALLOC_FREE(stripped);
1092+ if (conv == NULL) {
1093+ return -1;
1094+ }
1095+ ret = SMB_VFS_NEXT_CHOWN(handle, conv, uid, gid);
1096+ saved_errno = errno;
1097+ TALLOC_FREE(conv);
1098+ errno = saved_errno;
1099+ return ret;
1100 }
1101
1102 static int shadow_copy2_chdir(vfs_handle_struct *handle,
1103- const char *fname)
1104+ const char *fname)
1105 {
1106- SHADOW2_NEXT(CHDIR, (handle, name), int, -1);
1107+ time_t timestamp;
1108+ char *stripped;
1109+ int ret, saved_errno;
1110+ char *conv;
1111+
1112+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1113+ &timestamp, &stripped)) {
1114+ return -1;
1115+ }
1116+ if (timestamp == 0) {
1117+ return SMB_VFS_NEXT_CHDIR(handle, fname);
1118+ }
1119+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1120+ TALLOC_FREE(stripped);
1121+ if (conv == NULL) {
1122+ return -1;
1123+ }
1124+ ret = SMB_VFS_NEXT_CHDIR(handle, conv);
1125+ saved_errno = errno;
1126+ TALLOC_FREE(conv);
1127+ errno = saved_errno;
1128+ return ret;
1129 }
1130
1131 static int shadow_copy2_ntimes(vfs_handle_struct *handle,
1132- const struct smb_filename *smb_fname_in,
1133+ const struct smb_filename *smb_fname,
1134 struct smb_file_time *ft)
1135 {
1136- struct smb_filename *smb_fname = NULL;
1137+ time_t timestamp;
1138+ char *stripped;
1139+ int ret, saved_errno;
1140+ struct smb_filename *conv;
1141 NTSTATUS status;
1142
1143- status = copy_smb_filename(talloc_tos(), smb_fname_in, &smb_fname);
1144+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
1145+ smb_fname->base_name,
1146+ &timestamp, &stripped)) {
1147+ return -1;
1148+ }
1149+ if (timestamp == 0) {
1150+ return SMB_VFS_NEXT_NTIMES(handle, smb_fname, ft);
1151+ }
1152+ status = copy_smb_filename(talloc_tos(), smb_fname, &conv);
1153 if (!NT_STATUS_IS_OK(status)) {
1154- errno = map_errno_from_nt_status(status);
1155+ errno = ENOMEM;
1156 return -1;
1157 }
1158-
1159- SHADOW2_NEXT_SMB_FNAME(NTIMES, (handle, smb_fname, ft), int, -1);
1160+ conv->base_name = shadow_copy2_convert(
1161+ conv, handle, stripped, timestamp);
1162+ TALLOC_FREE(stripped);
1163+ if (conv->base_name == NULL) {
1164+ return -1;
1165+ }
1166+ ret = SMB_VFS_NEXT_NTIMES(handle, conv, ft);
1167+ saved_errno = errno;
1168+ TALLOC_FREE(conv);
1169+ errno = saved_errno;
1170+ return ret;
1171 }
1172
1173 static int shadow_copy2_readlink(vfs_handle_struct *handle,
1174 const char *fname, char *buf, size_t bufsiz)
1175 {
1176- SHADOW2_NEXT(READLINK, (handle, name, buf, bufsiz), int, -1);
1177-}
1178+ time_t timestamp;
1179+ char *stripped;
1180+ int ret, saved_errno;
1181+ char *conv;
1182
1183-static int shadow_copy2_mknod(vfs_handle_struct *handle,
1184- const char *fname, mode_t mode, SMB_DEV_T dev)
1185-{
1186- SHADOW2_NEXT(MKNOD, (handle, name, mode, dev), int, -1);
1187+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1188+ &timestamp, &stripped)) {
1189+ return -1;
1190+ }
1191+ if (timestamp == 0) {
1192+ return SMB_VFS_NEXT_READLINK(handle, fname, buf, bufsiz);
1193+ }
1194+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1195+ TALLOC_FREE(stripped);
1196+ if (conv == NULL) {
1197+ return -1;
1198+ }
1199+ ret = SMB_VFS_NEXT_READLINK(handle, conv, buf, bufsiz);
1200+ saved_errno = errno;
1201+ TALLOC_FREE(conv);
1202+ errno = saved_errno;
1203+ return ret;
1204 }
1205
1206-static char *shadow_copy2_realpath(vfs_handle_struct *handle,
1207- const char *fname)
1208+static int shadow_copy2_mknod(vfs_handle_struct *handle,
1209+ const char *fname, mode_t mode, SMB_DEV_T dev)
1210 {
1211- const char *gmt;
1212+ time_t timestamp;
1213+ char *stripped;
1214+ int ret, saved_errno;
1215+ char *conv;
1216
1217- if (shadow_copy2_match_name(fname, &gmt)
1218- && (gmt[GMT_NAME_LEN] == '\0')) {
1219- char *copy;
1220-
1221- copy = talloc_strdup(talloc_tos(), fname);
1222- if (copy == NULL) {
1223- errno = ENOMEM;
1224- return NULL;
1225- }
1226-
1227- copy[gmt - fname] = '.';
1228- copy[gmt - fname + 1] = '\0';
1229-
1230- DEBUG(10, ("calling NEXT_REALPATH with %s\n", copy));
1231- SHADOW2_NEXT(REALPATH, (handle, name), char *,
1232- NULL);
1233+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1234+ &timestamp, &stripped)) {
1235+ return -1;
1236+ }
1237+ if (timestamp == 0) {
1238+ return SMB_VFS_NEXT_MKNOD(handle, fname, mode, dev);
1239+ }
1240+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1241+ TALLOC_FREE(stripped);
1242+ if (conv == NULL) {
1243+ return -1;
1244 }
1245- SHADOW2_NEXT(REALPATH, (handle, name), char *, NULL);
1246+ ret = SMB_VFS_NEXT_MKNOD(handle, conv, mode, dev);
1247+ saved_errno = errno;
1248+ TALLOC_FREE(conv);
1249+ errno = saved_errno;
1250+ return ret;
1251 }
1252
1253-static const char *shadow_copy2_connectpath(struct vfs_handle_struct *handle,
1254- const char *fname)
1255+static char *shadow_copy2_realpath(vfs_handle_struct *handle,
1256+ const char *fname)
1257 {
1258- TALLOC_CTX *tmp_ctx;
1259- const char *snapdir, *baseoffset, *basedir, *gmt_start;
1260- size_t baselen;
1261- char *ret;
1262+ time_t timestamp;
1263+ char *stripped = NULL;
1264+ char *tmp = NULL;
1265+ char *result = NULL;
1266+ char *inserted = NULL;
1267+ char *inserted_to, *inserted_end;
1268+ int saved_errno;
1269
1270- DEBUG(10, ("shadow_copy2_connectpath called with %s\n", fname));
1271-
1272- if (!shadow_copy2_match_name(fname, &gmt_start)) {
1273- return handle->conn->connectpath;
1274+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1275+ &timestamp, &stripped)) {
1276+ goto done;
1277 }
1278-
1279- /*
1280- * We have to create a real temporary context because we have
1281- * to put our result on talloc_tos(). Thus we can't use a
1282- * talloc_stackframe() here.
1283- */
1284- tmp_ctx = talloc_new(talloc_tos());
1285-
1286- fname = shadow_copy2_normalise_path(tmp_ctx, fname, gmt_start);
1287- if (fname == NULL) {
1288- TALLOC_FREE(tmp_ctx);
1289- return NULL;
1290+ if (timestamp == 0) {
1291+ return SMB_VFS_NEXT_REALPATH(handle, fname);
1292 }
1293
1294- snapdir = shadow_copy2_find_snapdir(tmp_ctx, handle);
1295- if (snapdir == NULL) {
1296- DEBUG(2,("no snapdir found for share at %s\n",
1297- handle->conn->connectpath));
1298- TALLOC_FREE(tmp_ctx);
1299- return NULL;
1300+ tmp = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1301+ if (tmp == NULL) {
1302+ goto done;
1303 }
1304
1305- basedir = shadow_copy2_find_basedir(tmp_ctx, handle);
1306- if (basedir == NULL) {
1307- DEBUG(2,("no basedir found for share at %s\n",
1308- handle->conn->connectpath));
1309- TALLOC_FREE(tmp_ctx);
1310- return NULL;
1311+ result = SMB_VFS_NEXT_REALPATH(handle, tmp);
1312+ if (result == NULL) {
1313+ goto done;
1314 }
1315
1316- baselen = strlen(basedir);
1317- baseoffset = handle->conn->connectpath + baselen;
1318-
1319- /* some sanity checks */
1320- if (strncmp(basedir, handle->conn->connectpath, baselen) != 0 ||
1321- (handle->conn->connectpath[baselen] != 0
1322- && handle->conn->connectpath[baselen] != '/')) {
1323- DEBUG(0,("shadow_copy2_connectpath: basedir %s is not a "
1324- "parent of %s\n", basedir,
1325- handle->conn->connectpath));
1326- TALLOC_FREE(tmp_ctx);
1327+ /*
1328+ * Take away what we've inserted. This removes the @GMT-thingy
1329+ * completely, but will give a path under the share root.
1330+ */
1331+ inserted = shadow_copy2_insert_string(talloc_tos(), handle, timestamp);
1332+ if (inserted == NULL) {
1333+ goto done;
1334+ }
1335+ inserted_to = strstr_m(result, inserted);
1336+ if (inserted_to == NULL) {
1337+ DEBUG(2, ("SMB_VFS_NEXT_REALPATH removed %s\n", inserted));
1338+ goto done;
1339+ }
1340+ inserted_end = inserted_to + talloc_get_size(inserted) - 1;
1341+ memmove(inserted_to, inserted_end, strlen(inserted_end)+1);
1342+
1343+done:
1344+ saved_errno = errno;
1345+ TALLOC_FREE(inserted);
1346+ TALLOC_FREE(tmp);
1347+ TALLOC_FREE(stripped);
1348+ errno = saved_errno;
1349+ return result;
1350+}
1351+
1352+static char *have_snapdir(struct vfs_handle_struct *handle,
1353+ const char *path)
1354+{
1355+ struct smb_filename smb_fname;
1356+ int ret;
1357+
1358+ ZERO_STRUCT(smb_fname);
1359+ smb_fname.base_name = talloc_asprintf(
1360+ talloc_tos(), "%s/%s", path,
1361+ lp_parm_const_string(SNUM(handle->conn), "shadow", "snapdir",
1362+ ".snapshots"));
1363+ if (smb_fname.base_name == NULL) {
1364 return NULL;
1365 }
1366
1367- if (*baseoffset == '/') baseoffset++;
1368-
1369- ret = talloc_asprintf(talloc_tos(), "%s/%.*s/%s",
1370- snapdir,
1371- GMT_NAME_LEN, fname,
1372- baseoffset);
1373- DEBUG(6,("shadow_copy2_connectpath: '%s' -> '%s'\n", fname, ret));
1374- TALLOC_FREE(tmp_ctx);
1375- return ret;
1376-}
1377-
1378-static NTSTATUS shadow_copy2_get_nt_acl(vfs_handle_struct *handle,
1379- const char *fname, uint32 security_info,
1380- struct security_descriptor **ppdesc)
1381-{
1382- SHADOW2_NTSTATUS_NEXT(GET_NT_ACL, (handle, name, security_info, ppdesc), NT_STATUS_ACCESS_DENIED);
1383+ ret = SMB_VFS_NEXT_STAT(handle, &smb_fname);
1384+ if ((ret == 0) && (S_ISDIR(smb_fname.st.st_ex_mode))) {
1385+ return smb_fname.base_name;
1386+ }
1387+ TALLOC_FREE(smb_fname.base_name);
1388+ return NULL;
1389 }
1390
1391-static int shadow_copy2_mkdir(vfs_handle_struct *handle, const char *fname, mode_t mode)
1392+static char *shadow_copy2_find_snapdir(TALLOC_CTX *mem_ctx,
1393+ struct vfs_handle_struct *handle,
1394+ struct smb_filename *smb_fname)
1395 {
1396- SHADOW2_NEXT(MKDIR, (handle, name, mode), int, -1);
1397-}
1398+ char *path, *p;
1399+ char *snapdir;
1400
1401-static int shadow_copy2_rmdir(vfs_handle_struct *handle, const char *fname)
1402-{
1403- SHADOW2_NEXT(RMDIR, (handle, name), int, -1);
1404-}
1405+ path = talloc_asprintf(mem_ctx, "%s/%s",
1406+ handle->conn->connectpath,
1407+ smb_fname->base_name);
1408+ if (path == NULL) {
1409+ return NULL;
1410+ }
1411
1412-static int shadow_copy2_chflags(vfs_handle_struct *handle, const char *fname,
1413- unsigned int flags)
1414-{
1415- SHADOW2_NEXT(CHFLAGS, (handle, name, flags), int, -1);
1416-}
1417+ snapdir = have_snapdir(handle, path);
1418+ if (snapdir != NULL) {
1419+ TALLOC_FREE(path);
1420+ return snapdir;
1421+ }
1422
1423-static ssize_t shadow_copy2_getxattr(vfs_handle_struct *handle,
1424- const char *fname, const char *aname, void *value, size_t size)
1425-{
1426- SHADOW2_NEXT(GETXATTR, (handle, name, aname, value, size), ssize_t, -1);
1427-}
1428+ while ((p = strrchr(path, '/')) && (p > path)) {
1429
1430-static ssize_t shadow_copy2_lgetxattr(vfs_handle_struct *handle,
1431- const char *fname, const char *aname, void *value, size_t size)
1432-{
1433- SHADOW2_NEXT(LGETXATTR, (handle, name, aname, value, size), ssize_t, -1);
1434-}
1435+ p[0] = '\0';
1436
1437-static ssize_t shadow_copy2_listxattr(struct vfs_handle_struct *handle, const char *fname,
1438- char *list, size_t size)
1439-{
1440- SHADOW2_NEXT(LISTXATTR, (handle, name, list, size), ssize_t, -1);
1441+ snapdir = have_snapdir(handle, path);
1442+ if (snapdir != NULL) {
1443+ TALLOC_FREE(path);
1444+ return snapdir;
1445+ }
1446+ }
1447+ TALLOC_FREE(path);
1448+ return NULL;
1449 }
1450
1451-static int shadow_copy2_removexattr(struct vfs_handle_struct *handle, const char *fname,
1452- const char *aname)
1453+static bool shadow_copy2_snapshot_to_gmt(TALLOC_CTX *mem_ctx,
1454+ vfs_handle_struct *handle,
1455+ const char *name,
1456+ char *gmt, size_t gmt_len)
1457 {
1458- SHADOW2_NEXT(REMOVEXATTR, (handle, name, aname), int, -1);
1459-}
1460+ struct tm timestamp;
1461+ time_t timestamp_t;
1462+ const char *fmt;
1463
1464-static int shadow_copy2_lremovexattr(struct vfs_handle_struct *handle, const char *fname,
1465- const char *aname)
1466-{
1467- SHADOW2_NEXT(LREMOVEXATTR, (handle, name, aname), int, -1);
1468-}
1469+ fmt = lp_parm_const_string(SNUM(handle->conn), "shadow",
1470+ "format", GMT_FORMAT);
1471
1472-static int shadow_copy2_setxattr(struct vfs_handle_struct *handle, const char *fname,
1473- const char *aname, const void *value, size_t size, int flags)
1474-{
1475- SHADOW2_NEXT(SETXATTR, (handle, name, aname, value, size, flags), int, -1);
1476-}
1477+ ZERO_STRUCT(timestamp);
1478+ if (strptime(name, fmt, &timestamp) == NULL) {
1479+ DEBUG(10, ("shadow_copy2_snapshot_to_gmt: no match %s: %s\n",
1480+ fmt, name));
1481+ return false;
1482+ }
1483
1484-static int shadow_copy2_lsetxattr(struct vfs_handle_struct *handle, const char *fname,
1485- const char *aname, const void *value, size_t size, int flags)
1486-{
1487- SHADOW2_NEXT(LSETXATTR, (handle, name, aname, value, size, flags), int, -1);
1488-}
1489+ DEBUG(10, ("shadow_copy2_snapshot_to_gmt: match %s: %s\n", fmt, name));
1490
1491-static int shadow_copy2_chmod_acl(vfs_handle_struct *handle,
1492- const char *fname, mode_t mode)
1493-{
1494- SHADOW2_NEXT(CHMOD_ACL, (handle, name, mode), int, -1);
1495+ if (lp_parm_bool(SNUM(handle->conn), "shadow", "localtime", false)) {
1496+ timestamp.tm_isdst = -1;
1497+ timestamp_t = mktime(&timestamp);
1498+ gmtime_r(&timestamp_t, &timestamp);
1499+ }
1500+ strftime(gmt, gmt_len, GMT_FORMAT, &timestamp);
1501+ return true;
1502 }
1503
1504 static int shadow_copy2_label_cmp_asc(const void *x, const void *y)
1505 {
1506- return strncmp((char *)x, (char *)y, sizeof(SHADOW_COPY_LABEL));
1507+ return strncmp((const char *)x, (const char *)y, sizeof(SHADOW_COPY_LABEL));
1508 }
1509
1510 static int shadow_copy2_label_cmp_desc(const void *x, const void *y)
1511 {
1512- return -strncmp((char *)x, (char *)y, sizeof(SHADOW_COPY_LABEL));
1513+ return -strncmp((const char *)x, (const char *)y, sizeof(SHADOW_COPY_LABEL));
1514 }
1515
1516 /*
1517@@ -843,7 +1048,7 @@ static void shadow_copy2_sort_data(vfs_h
1518 const char *sort;
1519
1520 sort = lp_parm_const_string(SNUM(handle->conn), "shadow",
1521- "sort", SHADOW_COPY2_DEFAULT_SORT);
1522+ "sort", "desc");
1523 if (sort == NULL) {
1524 return;
1525 }
1526@@ -867,18 +1072,17 @@ static void shadow_copy2_sort_data(vfs_h
1527 return;
1528 }
1529
1530-static int shadow_copy2_get_shadow_copy2_data(vfs_handle_struct *handle,
1531- files_struct *fsp,
1532- struct shadow_copy_data *shadow_copy2_data,
1533- bool labels)
1534+static int shadow_copy2_get_shadow_copy_data(
1535+ vfs_handle_struct *handle, files_struct *fsp,
1536+ struct shadow_copy_data *shadow_copy2_data,
1537+ bool labels)
1538 {
1539 SMB_STRUCT_DIR *p;
1540 const char *snapdir;
1541 SMB_STRUCT_DIRENT *d;
1542- TALLOC_CTX *tmp_ctx = talloc_new(handle->data);
1543- char *snapshot;
1544+ TALLOC_CTX *tmp_ctx = talloc_stackframe();
1545
1546- snapdir = shadow_copy2_find_snapdir(tmp_ctx, handle);
1547+ snapdir = shadow_copy2_find_snapdir(tmp_ctx, handle, fsp->fsp_name);
1548 if (snapdir == NULL) {
1549 DEBUG(0,("shadow:snapdir not found for %s in get_shadow_copy_data\n",
1550 handle->conn->connectpath));
1551@@ -901,16 +1105,23 @@ static int shadow_copy2_get_shadow_copy2
1552 shadow_copy2_data->labels = NULL;
1553
1554 while ((d = SMB_VFS_NEXT_READDIR(handle, p, NULL))) {
1555+ char snapshot[GMT_NAME_LEN+1];
1556 SHADOW_COPY_LABEL *tlabels;
1557
1558- /* ignore names not of the right form in the snapshot directory */
1559- snapshot = shadow_copy2_snapshot_to_gmt(tmp_ctx, handle,
1560- d->d_name);
1561- DEBUG(6,("shadow_copy2_get_shadow_copy2_data: %s -> %s\n",
1562- d->d_name, snapshot));
1563- if (!snapshot) {
1564+ /*
1565+ * ignore names not of the right form in the snapshot
1566+ * directory
1567+ */
1568+ if (!shadow_copy2_snapshot_to_gmt(
1569+ tmp_ctx, handle, d->d_name,
1570+ snapshot, sizeof(snapshot))) {
1571+
1572+ DEBUG(6, ("shadow_copy2_get_shadow_copy_data: "
1573+ "ignoring %s\n", d->d_name));
1574 continue;
1575 }
1576+ DEBUG(6,("shadow_copy2_get_shadow_copy_data: %s -> %s\n",
1577+ d->d_name, snapshot));
1578
1579 if (!labels) {
1580 /* the caller doesn't want the labels */
1581@@ -920,7 +1131,8 @@ static int shadow_copy2_get_shadow_copy2
1582
1583 tlabels = talloc_realloc(shadow_copy2_data,
1584 shadow_copy2_data->labels,
1585- SHADOW_COPY_LABEL, shadow_copy2_data->num_volumes+1);
1586+ SHADOW_COPY_LABEL,
1587+ shadow_copy2_data->num_volumes+1);
1588 if (tlabels == NULL) {
1589 DEBUG(0,("shadow_copy2: out of memory\n"));
1590 SMB_VFS_NEXT_CLOSEDIR(handle, p);
1591@@ -930,7 +1142,6 @@ static int shadow_copy2_get_shadow_copy2
1592
1593 strlcpy(tlabels[shadow_copy2_data->num_volumes], snapshot,
1594 sizeof(*tlabels));
1595- talloc_free(snapshot);
1596
1597 shadow_copy2_data->num_volumes++;
1598 shadow_copy2_data->labels = tlabels;
1599@@ -944,59 +1155,455 @@ static int shadow_copy2_get_shadow_copy2
1600 return 0;
1601 }
1602
1603-static struct vfs_fn_pointers vfs_shadow_copy2_fns = {
1604- .opendir = shadow_copy2_opendir,
1605- .mkdir = shadow_copy2_mkdir,
1606- .rmdir = shadow_copy2_rmdir,
1607- .chflags = shadow_copy2_chflags,
1608- .getxattr = shadow_copy2_getxattr,
1609- .lgetxattr = shadow_copy2_lgetxattr,
1610- .listxattr = shadow_copy2_listxattr,
1611- .removexattr = shadow_copy2_removexattr,
1612- .lremovexattr = shadow_copy2_lremovexattr,
1613- .setxattr = shadow_copy2_setxattr,
1614- .lsetxattr = shadow_copy2_lsetxattr,
1615- .open_fn = shadow_copy2_open,
1616- .rename = shadow_copy2_rename,
1617- .stat = shadow_copy2_stat,
1618- .lstat = shadow_copy2_lstat,
1619- .fstat = shadow_copy2_fstat,
1620- .unlink = shadow_copy2_unlink,
1621- .chmod = shadow_copy2_chmod,
1622- .chown = shadow_copy2_chown,
1623- .chdir = shadow_copy2_chdir,
1624- .ntimes = shadow_copy2_ntimes,
1625- .symlink = shadow_copy2_symlink,
1626- .vfs_readlink = shadow_copy2_readlink,
1627- .link = shadow_copy2_link,
1628- .mknod = shadow_copy2_mknod,
1629- .realpath = shadow_copy2_realpath,
1630- .connectpath = shadow_copy2_connectpath,
1631- .get_nt_acl = shadow_copy2_get_nt_acl,
1632- .chmod_acl = shadow_copy2_chmod_acl,
1633- .get_shadow_copy_data = shadow_copy2_get_shadow_copy2_data,
1634-};
1635+static NTSTATUS shadow_copy2_fget_nt_acl(vfs_handle_struct *handle,
1636+ struct files_struct *fsp,
1637+ uint32 security_info,
1638+ struct security_descriptor **ppdesc)
1639+{
1640+ time_t timestamp;
1641+ char *stripped;
1642+ NTSTATUS status;
1643+ char *conv;
1644
1645-NTSTATUS vfs_shadow_copy2_init(void);
1646-NTSTATUS vfs_shadow_copy2_init(void)
1647+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle,
1648+ fsp->fsp_name->base_name,
1649+ &timestamp, &stripped)) {
1650+ return map_nt_error_from_unix(errno);
1651+ }
1652+ if (timestamp == 0) {
1653+ return SMB_VFS_NEXT_FGET_NT_ACL(handle, fsp, security_info,
1654+ ppdesc);
1655+ }
1656+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1657+ TALLOC_FREE(stripped);
1658+ if (conv == NULL) {
1659+ return map_nt_error_from_unix(errno);
1660+ }
1661+ status = SMB_VFS_NEXT_GET_NT_ACL(handle, conv, security_info, ppdesc);
1662+ TALLOC_FREE(conv);
1663+ return status;
1664+}
1665+
1666+static NTSTATUS shadow_copy2_get_nt_acl(vfs_handle_struct *handle,
1667+ const char *fname,
1668+ uint32 security_info,
1669+ struct security_descriptor **ppdesc)
1670 {
1671- NTSTATUS ret;
1672+ time_t timestamp;
1673+ char *stripped;
1674+ NTSTATUS status;
1675+ char *conv;
1676
1677- ret = smb_register_vfs(SMB_VFS_INTERFACE_VERSION, "shadow_copy2",
1678- &vfs_shadow_copy2_fns);
1679+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1680+ &timestamp, &stripped)) {
1681+ return map_nt_error_from_unix(errno);
1682+ }
1683+ if (timestamp == 0) {
1684+ return SMB_VFS_NEXT_GET_NT_ACL(handle, fname, security_info,
1685+ ppdesc);
1686+ }
1687+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1688+ TALLOC_FREE(stripped);
1689+ if (conv == NULL) {
1690+ return map_nt_error_from_unix(errno);
1691+ }
1692+ status = SMB_VFS_NEXT_GET_NT_ACL(handle, conv, security_info, ppdesc);
1693+ TALLOC_FREE(conv);
1694+ return status;
1695+}
1696
1697- if (!NT_STATUS_IS_OK(ret))
1698- return ret;
1699+static int shadow_copy2_mkdir(vfs_handle_struct *handle,
1700+ const char *fname, mode_t mode)
1701+{
1702+ time_t timestamp;
1703+ char *stripped;
1704+ int ret, saved_errno;
1705+ char *conv;
1706
1707- vfs_shadow_copy2_debug_level = debug_add_class("shadow_copy2");
1708- if (vfs_shadow_copy2_debug_level == -1) {
1709- vfs_shadow_copy2_debug_level = DBGC_VFS;
1710- DEBUG(0, ("%s: Couldn't register custom debugging class!\n",
1711- "vfs_shadow_copy2_init"));
1712- } else {
1713- DEBUG(10, ("%s: Debug class number of '%s': %d\n",
1714- "vfs_shadow_copy2_init","shadow_copy2",vfs_shadow_copy2_debug_level));
1715+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1716+ &timestamp, &stripped)) {
1717+ return -1;
1718+ }
1719+ if (timestamp == 0) {
1720+ return SMB_VFS_NEXT_MKDIR(handle, fname, mode);
1721+ }
1722+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1723+ TALLOC_FREE(stripped);
1724+ if (conv == NULL) {
1725+ return -1;
1726+ }
1727+ ret = SMB_VFS_NEXT_MKDIR(handle, conv, mode);
1728+ saved_errno = errno;
1729+ TALLOC_FREE(conv);
1730+ errno = saved_errno;
1731+ return ret;
1732+}
1733+
1734+static int shadow_copy2_rmdir(vfs_handle_struct *handle, const char *fname)
1735+{
1736+ time_t timestamp;
1737+ char *stripped;
1738+ int ret, saved_errno;
1739+ char *conv;
1740+
1741+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1742+ &timestamp, &stripped)) {
1743+ return -1;
1744 }
1745+ if (timestamp == 0) {
1746+ return SMB_VFS_NEXT_RMDIR(handle, fname);
1747+ }
1748+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1749+ TALLOC_FREE(stripped);
1750+ if (conv == NULL) {
1751+ return -1;
1752+ }
1753+ ret = SMB_VFS_NEXT_RMDIR(handle, conv);
1754+ saved_errno = errno;
1755+ TALLOC_FREE(conv);
1756+ errno = saved_errno;
1757+ return ret;
1758+}
1759+
1760+static int shadow_copy2_chflags(vfs_handle_struct *handle, const char *fname,
1761+ unsigned int flags)
1762+{
1763+ time_t timestamp;
1764+ char *stripped;
1765+ int ret, saved_errno;
1766+ char *conv;
1767
1768+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1769+ &timestamp, &stripped)) {
1770+ return -1;
1771+ }
1772+ if (timestamp == 0) {
1773+ return SMB_VFS_NEXT_CHFLAGS(handle, fname, flags);
1774+ }
1775+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1776+ TALLOC_FREE(stripped);
1777+ if (conv == NULL) {
1778+ return -1;
1779+ }
1780+ ret = SMB_VFS_NEXT_CHFLAGS(handle, conv, flags);
1781+ saved_errno = errno;
1782+ TALLOC_FREE(conv);
1783+ errno = saved_errno;
1784 return ret;
1785 }
1786+
1787+static ssize_t shadow_copy2_getxattr(vfs_handle_struct *handle,
1788+ const char *fname, const char *aname,
1789+ void *value, size_t size)
1790+{
1791+ time_t timestamp;
1792+ char *stripped;
1793+ ssize_t ret;
1794+ int saved_errno;
1795+ char *conv;
1796+
1797+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1798+ &timestamp, &stripped)) {
1799+ return -1;
1800+ }
1801+ if (timestamp == 0) {
1802+ return SMB_VFS_NEXT_GETXATTR(handle, fname, aname, value,
1803+ size);
1804+ }
1805+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1806+ TALLOC_FREE(stripped);
1807+ if (conv == NULL) {
1808+ return -1;
1809+ }
1810+ ret = SMB_VFS_NEXT_GETXATTR(handle, conv, aname, value, size);
1811+ saved_errno = errno;
1812+ TALLOC_FREE(conv);
1813+ errno = saved_errno;
1814+ return ret;
1815+}
1816+
1817+static ssize_t shadow_copy2_lgetxattr(vfs_handle_struct *handle,
1818+ const char *fname, const char *aname,
1819+ void *value, size_t size)
1820+{
1821+ time_t timestamp;
1822+ char *stripped;
1823+ ssize_t ret;
1824+ int saved_errno;
1825+ char *conv;
1826+
1827+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1828+ &timestamp, &stripped)) {
1829+ return -1;
1830+ }
1831+ if (timestamp == 0) {
1832+ return SMB_VFS_NEXT_LGETXATTR(handle, fname, aname, value,
1833+ size);
1834+ }
1835+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1836+ TALLOC_FREE(stripped);
1837+ if (conv == NULL) {
1838+ return -1;
1839+ }
1840+ ret = SMB_VFS_NEXT_LGETXATTR(handle, conv, aname, value, size);
1841+ saved_errno = errno;
1842+ TALLOC_FREE(conv);
1843+ errno = saved_errno;
1844+ return ret;
1845+}
1846+
1847+static ssize_t shadow_copy2_listxattr(struct vfs_handle_struct *handle,
1848+ const char *fname,
1849+ char *list, size_t size)
1850+{
1851+ time_t timestamp;
1852+ char *stripped;
1853+ ssize_t ret;
1854+ int saved_errno;
1855+ char *conv;
1856+
1857+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1858+ &timestamp, &stripped)) {
1859+ return -1;
1860+ }
1861+ if (timestamp == 0) {
1862+ return SMB_VFS_NEXT_LISTXATTR(handle, fname, list, size);
1863+ }
1864+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1865+ TALLOC_FREE(stripped);
1866+ if (conv == NULL) {
1867+ return -1;
1868+ }
1869+ ret = SMB_VFS_NEXT_LISTXATTR(handle, conv, list, size);
1870+ saved_errno = errno;
1871+ TALLOC_FREE(conv);
1872+ errno = saved_errno;
1873+ return ret;
1874+}
1875+
1876+static int shadow_copy2_removexattr(vfs_handle_struct *handle,
1877+ const char *fname, const char *aname)
1878+{
1879+ time_t timestamp;
1880+ char *stripped;
1881+ int ret, saved_errno;
1882+ char *conv;
1883+
1884+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1885+ &timestamp, &stripped)) {
1886+ return -1;
1887+ }
1888+ if (timestamp == 0) {
1889+ return SMB_VFS_NEXT_REMOVEXATTR(handle, fname, aname);
1890+ }
1891+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1892+ TALLOC_FREE(stripped);
1893+ if (conv == NULL) {
1894+ return -1;
1895+ }
1896+ ret = SMB_VFS_NEXT_REMOVEXATTR(handle, conv, aname);
1897+ saved_errno = errno;
1898+ TALLOC_FREE(conv);
1899+ errno = saved_errno;
1900+ return ret;
1901+}
1902+
1903+static int shadow_copy2_lremovexattr(vfs_handle_struct *handle,
1904+ const char *fname, const char *aname)
1905+{
1906+ time_t timestamp;
1907+ char *stripped;
1908+ int ret, saved_errno;
1909+ char *conv;
1910+
1911+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1912+ &timestamp, &stripped)) {
1913+ return -1;
1914+ }
1915+ if (timestamp == 0) {
1916+ return SMB_VFS_NEXT_LREMOVEXATTR(handle, fname, aname);
1917+ }
1918+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1919+ TALLOC_FREE(stripped);
1920+ if (conv == NULL) {
1921+ return -1;
1922+ }
1923+ ret = SMB_VFS_NEXT_LREMOVEXATTR(handle, conv, aname);
1924+ saved_errno = errno;
1925+ TALLOC_FREE(conv);
1926+ errno = saved_errno;
1927+ return ret;
1928+}
1929+
1930+static int shadow_copy2_setxattr(struct vfs_handle_struct *handle,
1931+ const char *fname,
1932+ const char *aname, const void *value,
1933+ size_t size, int flags)
1934+{
1935+ time_t timestamp;
1936+ char *stripped;
1937+ ssize_t ret;
1938+ int saved_errno;
1939+ char *conv;
1940+
1941+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1942+ &timestamp, &stripped)) {
1943+ return -1;
1944+ }
1945+ if (timestamp == 0) {
1946+ return SMB_VFS_NEXT_SETXATTR(handle, fname, aname, value, size,
1947+ flags);
1948+ }
1949+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1950+ TALLOC_FREE(stripped);
1951+ if (conv == NULL) {
1952+ return -1;
1953+ }
1954+ ret = SMB_VFS_NEXT_SETXATTR(handle, conv, aname, value, size, flags);
1955+ saved_errno = errno;
1956+ TALLOC_FREE(conv);
1957+ errno = saved_errno;
1958+ return ret;
1959+}
1960+
1961+static int shadow_copy2_lsetxattr(struct vfs_handle_struct *handle,
1962+ const char *fname,
1963+ const char *aname, const void *value,
1964+ size_t size, int flags)
1965+{
1966+ time_t timestamp;
1967+ char *stripped;
1968+ ssize_t ret;
1969+ int saved_errno;
1970+ char *conv;
1971+
1972+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
1973+ &timestamp, &stripped)) {
1974+ return -1;
1975+ }
1976+ if (timestamp == 0) {
1977+ return SMB_VFS_NEXT_LSETXATTR(handle, fname, aname, value,
1978+ size, flags);
1979+ }
1980+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
1981+ TALLOC_FREE(stripped);
1982+ if (conv == NULL) {
1983+ return -1;
1984+ }
1985+ ret = SMB_VFS_NEXT_LSETXATTR(handle, conv, aname, value, size, flags);
1986+ saved_errno = errno;
1987+ TALLOC_FREE(conv);
1988+ errno = saved_errno;
1989+ return ret;
1990+}
1991+
1992+static int shadow_copy2_chmod_acl(vfs_handle_struct *handle,
1993+ const char *fname, mode_t mode)
1994+{
1995+ time_t timestamp;
1996+ char *stripped;
1997+ ssize_t ret;
1998+ int saved_errno;
1999+ char *conv;
2000+
2001+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, fname,
2002+ &timestamp, &stripped)) {
2003+ return -1;
2004+ }
2005+ if (timestamp == 0) {
2006+ return SMB_VFS_NEXT_CHMOD_ACL(handle, fname, mode);
2007+ }
2008+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
2009+ TALLOC_FREE(stripped);
2010+ if (conv == NULL) {
2011+ return -1;
2012+ }
2013+ ret = SMB_VFS_NEXT_CHMOD_ACL(handle, conv, mode);
2014+ saved_errno = errno;
2015+ TALLOC_FREE(conv);
2016+ errno = saved_errno;
2017+ return ret;
2018+}
2019+
2020+static int shadow_copy2_get_real_filename(struct vfs_handle_struct *handle,
2021+ const char *path,
2022+ const char *name,
2023+ TALLOC_CTX *mem_ctx,
2024+ char **found_name)
2025+{
2026+ time_t timestamp;
2027+ char *stripped;
2028+ ssize_t ret;
2029+ int saved_errno;
2030+ char *conv;
2031+
2032+ if (!shadow_copy2_strip_snapshot(talloc_tos(), handle, path,
2033+ &timestamp, &stripped)) {
2034+ return -1;
2035+ }
2036+ if (timestamp == 0) {
2037+ return SMB_VFS_NEXT_GET_REAL_FILENAME(handle, path, name,
2038+ mem_ctx, found_name);
2039+ }
2040+ if (stripped[0] == '\0') {
2041+ *found_name = talloc_strdup(mem_ctx, name);
2042+ if (*found_name == NULL) {
2043+ errno = ENOMEM;
2044+ return -1;
2045+ }
2046+ return 0;
2047+ }
2048+ conv = shadow_copy2_convert(talloc_tos(), handle, stripped, timestamp);
2049+ TALLOC_FREE(stripped);
2050+ if (conv == NULL) {
2051+ return -1;
2052+ }
2053+ ret = SMB_VFS_NEXT_GET_REAL_FILENAME(handle, conv, name,
2054+ mem_ctx, found_name);
2055+ saved_errno = errno;
2056+ TALLOC_FREE(conv);
2057+ errno = saved_errno;
2058+ return ret;
2059+}
2060+
2061+
2062+static struct vfs_fn_pointers vfs_shadow_copy2_fns = {
2063+ .opendir = shadow_copy2_opendir,
2064+ .rename = shadow_copy2_rename,
2065+ .link = shadow_copy2_link,
2066+ .symlink = shadow_copy2_symlink,
2067+ .stat = shadow_copy2_stat,
2068+ .lstat = shadow_copy2_lstat,
2069+ .fstat = shadow_copy2_fstat,
2070+ .open_fn = shadow_copy2_open,
2071+ .unlink = shadow_copy2_unlink,
2072+ .chmod = shadow_copy2_chmod,
2073+ .chown = shadow_copy2_chown,
2074+ .chdir = shadow_copy2_chdir,
2075+ .ntimes = shadow_copy2_ntimes,
2076+ .vfs_readlink = shadow_copy2_readlink,
2077+ .mknod = shadow_copy2_mknod,
2078+ .realpath = shadow_copy2_realpath,
2079+ .get_nt_acl = shadow_copy2_get_nt_acl,
2080+ .fget_nt_acl = shadow_copy2_fget_nt_acl,
2081+ .get_shadow_copy_data = shadow_copy2_get_shadow_copy_data,
2082+ .mkdir = shadow_copy2_mkdir,
2083+ .rmdir = shadow_copy2_rmdir,
2084+ .getxattr = shadow_copy2_getxattr,
2085+ .lgetxattr = shadow_copy2_lgetxattr,
2086+ .listxattr = shadow_copy2_listxattr,
2087+ .removexattr = shadow_copy2_removexattr,
2088+ .lremovexattr = shadow_copy2_lremovexattr,
2089+ .setxattr = shadow_copy2_setxattr,
2090+ .lsetxattr = shadow_copy2_lsetxattr,
2091+ .chmod_acl = shadow_copy2_chmod_acl,
2092+ .chflags = shadow_copy2_chflags,
2093+ .get_real_filename = shadow_copy2_get_real_filename,
2094+};
2095+
2096+NTSTATUS vfs_shadow_copy2_init(void);
2097+NTSTATUS vfs_shadow_copy2_init(void)
2098+{
2099+ return smb_register_vfs(SMB_VFS_INTERFACE_VERSION,
2100+ "shadow_copy2", &vfs_shadow_copy2_fns);
2101+}
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbclient-pager.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbclient-pager.patch
new file mode 100644
index 000000000..429f2cec4
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbclient-pager.patch
@@ -0,0 +1,18 @@
1Description: Use the pager alternative as pager is PAGER is undefined
2Author: Steve Langasek <vorlon@debian.org>
3Bug-Debian: http://bugs.debian.org/135603
4Forwarded: not-needed
5
6Index: experimental/source3/include/local.h
7===================================================================
8--- experimental.orig/source3/include/local.h
9+++ experimental/source3/include/local.h
10@@ -127,7 +127,7 @@
11 /* the default pager to use for the client "more" command. Users can
12 override this with the PAGER environment variable */
13 #ifndef PAGER
14-#define PAGER "more"
15+#define PAGER "/usr/bin/pager"
16 #endif
17
18 /* the size of the uid cache used to reduce valid user checks */
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbtar-bashism.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbtar-bashism.patch
new file mode 100644
index 000000000..27a47cb51
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbtar-bashism.patch
@@ -0,0 +1,19 @@
1Description: Avoid using bashism in smbtar
2Author: Jelmer Vernooij <jelmer@samba.org>
3Bug-Debian: http://bugs.debian.org/486056
4Forwarded: yes
5Bug: https://bugzilla.samba.org/show_bug.cgi?id=8924
6
7Index: experimental/source3/script/smbtar
8===================================================================
9--- experimental.orig/source3/script/smbtar
10+++ experimental/source3/script/smbtar
11@@ -151,7 +151,7 @@
12 if [ -z "$verbose" ]; then
13 echo "server is $server"
14 # echo "share is $service"
15- echo "share is $service\\$cdcmd"
16+ printf "share is %s\\%s\n" "$service" "$cdcmd"
17 echo "tar args is $tarargs"
18 # echo "password is $password" # passwords should never be sent to screen
19 echo "tape is $tapefile"
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbtorture-manpage.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbtorture-manpage.patch
new file mode 100644
index 000000000..59930b5e5
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/smbtorture-manpage.patch
@@ -0,0 +1,94 @@
1Description: Provide a manpage for smbtorture
2Author: Christian Perrier <bubulle@debian.org>
3Bug: https://bugzilla.samba.org/show_bug.cgi?id=8930
4Forwarded: yes
5Bug-Debian: http://bugs.debian.org/528735
6
7Index: samba/docs/manpages/smbtorture.1
8===================================================================
9--- /dev/null
10+++ samba/docs/manpages/smbtorture.1
11@@ -0,0 +1,83 @@
12+.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.9.
13+.TH smbtorture "1" "May 2012" "samba test suite" "User Commands"
14+.SH NAME
15+smbtorture \- samba test suite.
16+.SH SYNOPSIS
17+.B smbtorture
18+\fI//server/share <options> TEST1 TEST2 \fR...
19+.SH DESCRIPTION
20+smbtorture is the original samba3 test suite. It is nowadays recommended to use samba4 torture.
21+.HP
22+\fB\-d\fR debuglevel
23+.HP
24+\fB\-U\fR user%pass
25+.TP
26+\fB\-k\fR
27+use kerberos
28+.HP
29+\fB\-N\fR numprocs
30+.HP
31+\fB\-n\fR my_netbios_name
32+.HP
33+\fB\-W\fR workgroup
34+.HP
35+\fB\-o\fR num_operations
36+.HP
37+\fB\-O\fR socket_options
38+.HP
39+\fB\-m\fR maximum protocol
40+.HP
41+\fB\-L\fR use oplocks
42+.TP
43+\fB\-c\fR CLIENT.TXT
44+specify client load file for NBENCH
45+.HP
46+\fB\-A\fR showall
47+.HP
48+\fB\-p\fR port
49+.HP
50+\fB\-s\fR seed
51+.TP
52+\fB\-b\fR unclist_filename
53+specify multiple shares for multiple connections
54+.PP
55+tests are: FDPASS LOCK1 LOCK2 LOCK3 LOCK4 LOCK5 LOCK6 LOCK7 LOCK8 LOCK9 UNLINK BROWSE ATTR TRANS2 MAXFID TORTURE RANDOMIPC NEGNOWAIT NBENCH NBENCH2 OPLOCK1 OPLOCK2 OPLOCK3 OPLOCK4 DIR DIR1 DIR\-CREATETIME DENY1 DENY2 TCON TCONDEV RW1 RW2 RW3 RW\-SIGNING OPEN POSIX POSIX\-APPEND ASYNC\-ECHO UID\-REGRESSION\-TEST SHORTNAME\-TEST ADDRCHANGE OPENATTR XCOPY RENAME DELETE DELETE\-LN PROPERTIES MANGLE MANGLE1 W2K TRANS2SCAN NTTRANSSCAN UTABLE CASETABLE ERRMAPEXTRACT PIPE_NUMBER TCON2 IOCTL CHKPATH FDSESS EATEST SESSSETUP_BENCH CHAIN1 CHAIN2 WINDOWS\-WRITE CLI_ECHO GETADDRINFO TLDAP STREAMERROR NOTIFY\-BENCH BAD\-NBT\-SESSION SMB\-ANY\-CONNECT LOCAL\-SUBSTITUTE LOCAL\-GENCACHE LOCAL\-TALLOC\-DICT LOCAL\-BASE64 LOCAL\-RBTREE LOCAL\-MEMCACHE LOCAL\-STREAM\-NAME LOCAL\-WBCLIENT LOCAL\-string_to_sid LOCAL\-binary_to_sid LOCAL\-DBTRANS LOCAL\-TEVENT\-SELECT
56+default test is ALL
57+.PP
58+Usage: smbtorture //server/share <options> TEST1 TEST2 ...
59+.HP
60+\fB\-d\fR debuglevel
61+.HP
62+\fB\-U\fR user%pass
63+.TP
64+\fB\-k\fR
65+use kerberos
66+.HP
67+\fB\-N\fR numprocs
68+.HP
69+\fB\-n\fR my_netbios_name
70+.HP
71+\fB\-W\fR workgroup
72+.HP
73+\fB\-o\fR num_operations
74+.HP
75+\fB\-O\fR socket_options
76+.HP
77+\fB\-m\fR maximum protocol
78+.HP
79+\fB\-L\fR use oplocks
80+.TP
81+\fB\-c\fR CLIENT.TXT
82+specify client load file for NBENCH
83+.HP
84+\fB\-A\fR showall
85+.HP
86+\fB\-p\fR port
87+.HP
88+\fB\-s\fR seed
89+.TP
90+\fB\-b\fR unclist_filename
91+specify multiple shares for multiple connections
92+.PP
93+tests are: FDPASS LOCK1 LOCK2 LOCK3 LOCK4 LOCK5 LOCK6 LOCK7 LOCK8 LOCK9 UNLINK BROWSE ATTR TRANS2 MAXFID TORTURE RANDOMIPC NEGNOWAIT NBENCH NBENCH2 OPLOCK1 OPLOCK2 OPLOCK3 OPLOCK4 DIR DIR1 DIR\-CREATETIME DENY1 DENY2 TCON TCONDEV RW1 RW2 RW3 RW\-SIGNING OPEN POSIX POSIX\-APPEND ASYNC\-ECHO UID\-REGRESSION\-TEST SHORTNAME\-TEST ADDRCHANGE OPENATTR XCOPY RENAME DELETE DELETE\-LN PROPERTIES MANGLE MANGLE1 W2K TRANS2SCAN NTTRANSSCAN UTABLE CASETABLE ERRMAPEXTRACT PIPE_NUMBER TCON2 IOCTL CHKPATH FDSESS EATEST SESSSETUP_BENCH CHAIN1 CHAIN2 WINDOWS\-WRITE CLI_ECHO GETADDRINFO TLDAP STREAMERROR NOTIFY\-BENCH BAD\-NBT\-SESSION SMB\-ANY\-CONNECT LOCAL\-SUBSTITUTE LOCAL\-GENCACHE LOCAL\-TALLOC\-DICT LOCAL\-BASE64 LOCAL\-RBTREE LOCAL\-MEMCACHE LOCAL\-STREAM\-NAME LOCAL\-WBCLIENT LOCAL\-string_to_sid LOCAL\-binary_to_sid LOCAL\-DBTRANS LOCAL\-TEVENT\-SELECT
94+default test is ALL
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/undefined-symbols.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/undefined-symbols.patch
new file mode 100644
index 000000000..5babc1e38
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/undefined-symbols.patch
@@ -0,0 +1,24 @@
1Description: Fix missing symbols
2 Fix missing symbols in libsmbclient (and libnss_wins), and add
3 -Wl,-z,defs to the libsmbclient link options to prevent future
4 instances of undefined symbols.
5 .
6 This should be forwarded upstream once there's a configure test
7 for it.
8Author: Steve Langasek <vorlon@debian.org>
9Bug-Debian: http://bugs.debian.org/281181
10Forwarded: no
11
12Index: experimental/source3/Makefile.in
13===================================================================
14--- experimental.orig/source3/Makefile.in
15+++ experimental/source3/Makefile.in
16@@ -2594,7 +2594,7 @@
17
18 $(LIBSMBCLIENT_SHARED_TARGET_SONAME): $(BINARY_PREREQS) $(LIBSMBCLIENT_OBJ) $(LIBSMBCLIENT_THREAD_OBJ) $(LIBSMBCLIENT_SYMS) $(LIBTALLOC) $(LIBTEVENT) $(LIBTDB) $(LIBWBCLIENT)
19 @echo Linking shared library $@
20- @$(SHLD_DSO) $(LIBSMBCLIENT_OBJ) $(LIBSMBCLIENT_THREAD_OBJ) \
21+ @$(SHLD_DSO) -Wl,-z,defs $(LIBSMBCLIENT_OBJ) $(LIBSMBCLIENT_THREAD_OBJ) \
22 $(LIBTALLOC_LIBS) $(LIBTEVENT_LIBS) $(LIBTDB_LIBS) $(LIBWBCLIENT_LIBS) $(LIBS) \
23 $(KRB5LIBS) $(LDAP_LIBS) $(NSCD_LIBS) $(ZLIB_LIBS) $(PTHREAD_LDFLAGS) \
24 @SONAMEFLAG@`basename $@`
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/usershare.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/usershare.patch
new file mode 100644
index 000000000..3673db751
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/usershare.patch
@@ -0,0 +1,38 @@
1Description: Enable net usershares by default at build time
2 Enable net usershares by default at build time, with a limit of
3 100, and update the corresponding documentation.
4Author: Mathias Gug <mathiaz@ubuntu.com>,
5Author: Steve Langasek <vorlon@debian.org>
6Bug-Debian: http://bugs.debian.org/443230
7Bug-Ubuntu: https://bugs.launchpad.net/ubuntu/+source/samba/+bug/128548
8Forwarded: not-needed
9
10Index: experimental/docs/manpages/net.8
11===================================================================
12--- experimental.orig/docs/manpages/net.8
13+++ experimental/docs/manpages/net.8
14@@ -800,9 +800,9 @@
15 .RE
16 .SS "USERSHARE"
17 .PP
18-Starting with version 3\&.0\&.23, a Samba server now supports the ability for non\-root users to add user defined shares to be exported using the "net usershare" commands\&.
19+Starting with version 3\&.0\&.23, a Samba server now supports the ability for non\-root users to add user-defined shares to be exported using the "net usershare" commands\&.
20 .PP
21-To set this up, first set up your smb\&.conf by adding to the [global] section: usershare path = /usr/local/samba/lib/usershares Next create the directory /usr/local/samba/lib/usershares, change the owner to root and set the group owner to the UNIX group who should have the ability to create usershares, for example a group called "serverops"\&. Set the permissions on /usr/local/samba/lib/usershares to 01770\&. (Owner and group all access, no access for others, plus the sticky bit, which means that a file in that directory can be renamed or deleted only by the owner of the file)\&. Finally, tell smbd how many usershares you will allow by adding to the [global] section of smb\&.conf a line such as : usershare max shares = 100\&. To allow 100 usershare definitions\&. Now, members of the UNIX group "serverops" can create user defined shares on demand using the commands below\&.
22+Members of the UNIX group "sambashare" can create user-defined shares on demand using the commands below\&.
23 .PP
24 The usershare commands are:
25 .RS 4
26Index: experimental/source3/param/loadparm.c
27===================================================================
28--- experimental.orig/source3/param/loadparm.c
29+++ experimental/source3/param/loadparm.c
30@@ -5461,7 +5461,7 @@
31 string_set(&Globals.szUsersharePath, s);
32 SAFE_FREE(s);
33 string_set(&Globals.szUsershareTemplateShare, "");
34- Globals.iUsershareMaxShares = 0;
35+ Globals.iUsershareMaxShares = 100;
36 /* By default disallow sharing of directories not owned by the sharer. */
37 Globals.bUsershareOwnerOnly = True;
38 /* By default disallow guest access to usershares. */
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch
new file mode 100644
index 000000000..985ed5af1
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch
@@ -0,0 +1,18534 @@
1Description: Include waf as an extracted source directory, rather than as a one-in-a-file script.
2Author: Jelmer Vernooij <jelmer@samba.org>
3Bug-Debian: http://bugs.debian.org/654499
4Forwarded: yes
5Bug: https://bugzilla.samba.org/show_bug.cgi?id=8923
6
7diff --git a/buildtools/README b/buildtools/README
8new file mode 100644
9index 0000000..eab0382
10--- /dev/null
11+++ b/buildtools/README
12@@ -0,0 +1,12 @@
13+See http://code.google.com/p/waf/ for more information on waf
14+
15+You can get a svn copy of the upstream source with:
16+
17+ svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
18+
19+Samba currently uses waf 1.5, which can be found at:
20+
21+ http://waf.googlecode.com/svn/branches/waf-1.5
22+
23+To update the current copy of waf, use the update-waf.sh script in this
24+directory.
25diff --git a/buildtools/bin/README b/buildtools/bin/README
26deleted file mode 100644
27index 9ef8a1f..0000000
28--- a/buildtools/bin/README
29+++ /dev/null
30@@ -1,16 +0,0 @@
31-This copy of waf-svn is taken from the git mirror of waf
32-at:
33-
34- git://git.samba.org/tridge/waf-svn.git
35-
36-using the waf-samba branch
37-
38-It was built using the command:
39-
40- ./waf-light --zip-type=gz --make-waf
41-
42-See http://code.google.com/p/waf/ for more information on waf
43-
44-You can get a svn copy of the upstream source with:
45-
46- svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
47diff --git a/buildtools/update-waf.sh b/buildtools/update-waf.sh
48new file mode 100755
49index 0000000..bb3a4bf
50--- /dev/null
51+++ b/buildtools/update-waf.sh
52@@ -0,0 +1,13 @@
53+#!/bin/sh
54+# Update our copy of waf
55+
56+TARGETDIR="`dirname $0`"
57+WORKDIR="`mktemp -d`"
58+
59+mkdir -p "$WORKDIR"
60+
61+svn checkout http://waf.googlecode.com/svn/branches/waf-1.5/wafadmin "$WORKDIR/wafadmin"
62+
63+rsync -C -avz --delete "$WORKDIR/wafadmin/" "$TARGETDIR/wafadmin/"
64+
65+rm -rf "$WORKDIR"
66diff --git a/buildtools/wafadmin/3rdparty/ParallelDebug.py b/buildtools/wafadmin/3rdparty/ParallelDebug.py
67new file mode 100644
68index 0000000..9d0493e
69--- /dev/null
70+++ b/buildtools/wafadmin/3rdparty/ParallelDebug.py
71@@ -0,0 +1,299 @@
72+#! /usr/bin/env python
73+# encoding: utf-8
74+# Thomas Nagy, 2007-2010 (ita)
75+
76+"""
77+debugging helpers for parallel compilation, outputs
78+a svg file in the build directory
79+"""
80+
81+import os, time, sys, threading
82+try: from Queue import Queue
83+except: from queue import Queue
84+import Runner, Options, Utils, Task, Logs
85+from Constants import *
86+
87+#import random
88+#random.seed(100)
89+
90+def set_options(opt):
91+ opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
92+ help='title for the svg diagram', dest='dtitle')
93+ opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=1000, dest='dwidth')
94+ opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
95+ opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
96+ opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
97+
98+# red #ff4d4d
99+# green #4da74d
100+# lila #a751ff
101+
102+color2code = {
103+ 'GREEN' : '#4da74d',
104+ 'YELLOW' : '#fefe44',
105+ 'PINK' : '#a751ff',
106+ 'RED' : '#cc1d1d',
107+ 'BLUE' : '#6687bb',
108+ 'CYAN' : '#34e2e2',
109+
110+}
111+
112+mp = {}
113+info = [] # list of (text,color)
114+
115+def map_to_color(name):
116+ if name in mp:
117+ return mp[name]
118+ try:
119+ cls = Task.TaskBase.classes[name]
120+ except KeyError:
121+ return color2code['RED']
122+ if cls.color in mp:
123+ return mp[cls.color]
124+ if cls.color in color2code:
125+ return color2code[cls.color]
126+ return color2code['RED']
127+
128+def loop(self):
129+ while 1:
130+ tsk=Runner.TaskConsumer.ready.get()
131+ tsk.master.set_running(1, id(threading.currentThread()), tsk)
132+ Runner.process_task(tsk)
133+ tsk.master.set_running(-1, id(threading.currentThread()), tsk)
134+Runner.TaskConsumer.loop = loop
135+
136+
137+old_start = Runner.Parallel.start
138+def do_start(self):
139+ print Options.options
140+ try:
141+ Options.options.dband
142+ except AttributeError:
143+ raise ValueError('use def options(opt): opt.load("parallel_debug")!')
144+
145+ self.taskinfo = Queue()
146+ old_start(self)
147+ process_colors(self)
148+Runner.Parallel.start = do_start
149+
150+def set_running(self, by, i, tsk):
151+ self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
152+Runner.Parallel.set_running = set_running
153+
154+def name2class(name):
155+ return name.replace(' ', '_').replace('.', '_')
156+
157+def process_colors(producer):
158+ # first, cast the parameters
159+ tmp = []
160+ try:
161+ while True:
162+ tup = producer.taskinfo.get(False)
163+ tmp.append(list(tup))
164+ except:
165+ pass
166+
167+ try:
168+ ini = float(tmp[0][2])
169+ except:
170+ return
171+
172+ if not info:
173+ seen = []
174+ for x in tmp:
175+ name = x[3]
176+ if not name in seen:
177+ seen.append(name)
178+ else:
179+ continue
180+
181+ info.append((name, map_to_color(name)))
182+ info.sort(key=lambda x: x[0])
183+
184+ thread_count = 0
185+ acc = []
186+ for x in tmp:
187+ thread_count += x[6]
188+ acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
189+ f = open('pdebug.dat', 'w')
190+ #Utils.write('\n'.join(acc))
191+ f.write('\n'.join(acc))
192+
193+ tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
194+
195+ st = {}
196+ for l in tmp:
197+ if not l[0] in st:
198+ st[l[0]] = len(st.keys())
199+ tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
200+ THREAD_AMOUNT = len(st.keys())
201+
202+ st = {}
203+ for l in tmp:
204+ if not l[1] in st:
205+ st[l[1]] = len(st.keys())
206+ tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
207+
208+
209+ BAND = Options.options.dband
210+
211+ seen = {}
212+ acc = []
213+ for x in range(len(tmp)):
214+ line = tmp[x]
215+ id = line[1]
216+
217+ if id in seen:
218+ continue
219+ seen[id] = True
220+
221+ begin = line[2]
222+ thread_id = line[0]
223+ for y in range(x + 1, len(tmp)):
224+ line = tmp[y]
225+ if line[1] == id:
226+ end = line[2]
227+ #print id, thread_id, begin, end
228+ #acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
229+ acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
230+ break
231+
232+ if Options.options.dmaxtime < 0.1:
233+ gwidth = 1
234+ for x in tmp:
235+ m = BAND * x[2]
236+ if m > gwidth:
237+ gwidth = m
238+ else:
239+ gwidth = BAND * Options.options.dmaxtime
240+
241+ ratio = float(Options.options.dwidth) / gwidth
242+ gwidth = Options.options.dwidth
243+
244+ gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
245+
246+ out = []
247+
248+ out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
249+<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
250+\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">
251+<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.0\"
252+ x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
253+ id=\"svg602\" xml:space=\"preserve\">
254+
255+<style type='text/css' media='screen'>
256+ g.over rect { stroke:#FF0000; fill-opacity:0.4 }
257+</style>
258+
259+<script type='text/javascript'><![CDATA[
260+ var svg = document.getElementsByTagName('svg')[0];
261+ var svgNS = svg.getAttribute('xmlns');
262+ svg.addEventListener('mouseover',function(e){
263+ var g = e.target.parentNode;
264+ var x = document.getElementById('r_'+g.id);
265+ if (x) {
266+ g.setAttribute('class', g.getAttribute('class')+' over');
267+ x.setAttribute('class', x.getAttribute('class')+' over');
268+ showInfo(e, g.id);
269+ }
270+ },false);
271+ svg.addEventListener('mouseout',function(e){
272+ var g = e.target.parentNode;
273+ var x = document.getElementById('r_'+g.id);
274+ if (x) {
275+ g.setAttribute('class',g.getAttribute('class').replace(' over',''));
276+ x.setAttribute('class',x.getAttribute('class').replace(' over',''));
277+ hideInfo(e);
278+ }
279+ },false);
280+
281+function showInfo(evt, txt) {
282+ tooltip = document.getElementById('tooltip');
283+
284+ var t = document.getElementById('tooltiptext');
285+ t.firstChild.data = txt;
286+
287+ var x = evt.clientX+10;
288+ if (x > 200) { x -= t.getComputedTextLength() + 16; }
289+ var y = evt.clientY+30;
290+ tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
291+ tooltip.setAttributeNS(null,"visibility","visible");
292+
293+ var r = document.getElementById('tooltiprect');
294+ r.setAttribute('width', t.getComputedTextLength()+6)
295+}
296+
297+
298+function hideInfo(evt) {
299+ tooltip = document.getElementById('tooltip');
300+ tooltip.setAttributeNS(null,"visibility","hidden");
301+}
302+
303+]]></script>
304+
305+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
306+<rect
307+ x='%r' y='%r'
308+ width='%r' height='%r' z-index='10'
309+ style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
310+ />\n
311+
312+""" % (0, 0, gwidth + 4, gheight + 4, 0, 0, gwidth + 4, gheight + 4))
313+
314+ # main title
315+ if Options.options.dtitle:
316+ out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
317+""" % (gwidth/2, gheight - 5, Options.options.dtitle))
318+
319+ # the rectangles
320+ groups = {}
321+ for (x, y, w, h, clsname) in acc:
322+ try:
323+ groups[clsname].append((x, y, w, h))
324+ except:
325+ groups[clsname] = [(x, y, w, h)]
326+
327+ for cls in groups:
328+
329+ out.append("<g id='%s'>\n" % name2class(cls))
330+
331+ for (x, y, w, h) in groups[cls]:
332+ out.append(""" <rect
333+ x='%r' y='%r'
334+ width='%r' height='%r' z-index='11'
335+ style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
336+ />\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
337+
338+ out.append("</g>\n")
339+
340+ # output the caption
341+ cnt = THREAD_AMOUNT
342+
343+ for (text, color) in info:
344+ # caption box
345+ b = BAND/2
346+ out.append("""<g id='r_%s'><rect
347+ x='%r' y='%r'
348+ width='%r' height='%r'
349+ style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
350+ />\n""" % (name2class(text), 2 + BAND, 5 + (cnt + 0.5) * BAND, b, b, color))
351+
352+ # caption text
353+ out.append("""<text
354+ style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
355+ x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
356+ cnt += 1
357+
358+ out.append("""
359+<g transform="translate(0,0)" visibility="hidden" id="tooltip">
360+ <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
361+ <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
362+</g>""")
363+
364+ out.append("\n</svg>")
365+
366+ #node = producer.bld.path.make_node('pdebug.svg')
367+ f = open('pdebug.svg', 'w')
368+ f.write("".join(out))
369+
370+
371diff --git a/buildtools/wafadmin/3rdparty/batched_cc.py b/buildtools/wafadmin/3rdparty/batched_cc.py
372new file mode 100644
373index 0000000..8e31074
374--- /dev/null
375+++ b/buildtools/wafadmin/3rdparty/batched_cc.py
376@@ -0,0 +1,183 @@
377+#!/usr/bin/env python
378+# encoding: utf-8
379+# Thomas Nagy, 2006 (ita)
380+
381+"""
382+Batched builds - compile faster
383+instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
384+cc -c ../file1.c ../file2.c ../file3.c
385+
386+Files are output on the directory where the compiler is called, and dependencies are more difficult
387+to track (do not run the command on all source files if only one file changes)
388+
389+As such, we do as if the files were compiled one by one, but no command is actually run:
390+replace each cc/cpp Task by a TaskSlave
391+A new task called TaskMaster collects the signatures from each slave and finds out the command-line
392+to run.
393+
394+To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
395+it is only necessary to import this module in the configuration (no other change required)
396+"""
397+
398+MAX_BATCH = 50
399+MAXPARALLEL = False
400+
401+EXT_C = ['.c', '.cc', '.cpp', '.cxx']
402+
403+import os, threading
404+import TaskGen, Task, ccroot, Build, Logs
405+from TaskGen import extension, feature, before
406+from Constants import *
407+
408+cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
409+cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
410+
411+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
412+cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
413+
414+count = 70000
415+class batch_task(Task.Task):
416+ color = 'RED'
417+
418+ after = 'cc cxx'
419+ before = 'cc_link cxx_link static_link'
420+
421+ def __str__(self):
422+ return '(batch compilation for %d slaves)\n' % len(self.slaves)
423+
424+ def __init__(self, *k, **kw):
425+ Task.Task.__init__(self, *k, **kw)
426+ self.slaves = []
427+ self.inputs = []
428+ self.hasrun = 0
429+
430+ global count
431+ count += 1
432+ self.idx = count
433+
434+ def add_slave(self, slave):
435+ self.slaves.append(slave)
436+ self.set_run_after(slave)
437+
438+ def runnable_status(self):
439+ for t in self.run_after:
440+ if not t.hasrun:
441+ return ASK_LATER
442+
443+ for t in self.slaves:
444+ #if t.executed:
445+ if t.hasrun != SKIPPED:
446+ return RUN_ME
447+
448+ return SKIP_ME
449+
450+ def run(self):
451+ outputs = []
452+ self.outputs = []
453+
454+ srclst = []
455+ slaves = []
456+ for t in self.slaves:
457+ if t.hasrun != SKIPPED:
458+ slaves.append(t)
459+ srclst.append(t.inputs[0].abspath(self.env))
460+
461+ self.env.SRCLST = srclst
462+ self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
463+
464+ env = self.env
465+ app = env.append_unique
466+ cpppath_st = env['CPPPATH_ST']
467+ env._CCINCFLAGS = env.CXXINCFLAGS = []
468+
469+ # local flags come first
470+ # set the user-defined includes paths
471+ for i in env['INC_PATHS']:
472+ app('_CCINCFLAGS', cpppath_st % i.abspath())
473+ app('_CXXINCFLAGS', cpppath_st % i.abspath())
474+ app('_CCINCFLAGS', cpppath_st % i.abspath(env))
475+ app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
476+
477+ # set the library include paths
478+ for i in env['CPPPATH']:
479+ app('_CCINCFLAGS', cpppath_st % i)
480+ app('_CXXINCFLAGS', cpppath_st % i)
481+
482+ if self.slaves[0].__class__.__name__ == 'cc':
483+ ret = cc_fun(self)
484+ else:
485+ ret = cxx_fun(self)
486+
487+ if ret:
488+ return ret
489+
490+ for t in slaves:
491+ t.old_post_run()
492+
493+from TaskGen import extension, feature, after
494+
495+import cc, cxx
496+def wrap(fun):
497+ def foo(self, node):
498+ # we cannot control the extension, this sucks
499+ self.obj_ext = '.o'
500+
501+ task = fun(self, node)
502+ if not getattr(self, 'masters', None):
503+ self.masters = {}
504+ self.allmasters = []
505+
506+ if not node.parent.id in self.masters:
507+ m = self.masters[node.parent.id] = self.master = self.create_task('batch')
508+ self.allmasters.append(m)
509+ else:
510+ m = self.masters[node.parent.id]
511+ if len(m.slaves) > MAX_BATCH:
512+ m = self.masters[node.parent.id] = self.master = self.create_task('batch')
513+ self.allmasters.append(m)
514+
515+ m.add_slave(task)
516+ return task
517+ return foo
518+
519+c_hook = wrap(cc.c_hook)
520+extension(cc.EXT_CC)(c_hook)
521+
522+cxx_hook = wrap(cxx.cxx_hook)
523+extension(cxx.EXT_CXX)(cxx_hook)
524+
525+
526+@feature('cprogram', 'cshlib', 'cstaticlib')
527+@after('apply_link')
528+def link_after_masters(self):
529+ if getattr(self, 'allmasters', None):
530+ for m in self.allmasters:
531+ self.link_task.set_run_after(m)
532+
533+for c in ['cc', 'cxx']:
534+ t = Task.TaskBase.classes[c]
535+ def run(self):
536+ pass
537+
538+ def post_run(self):
539+ #self.executed=1
540+ pass
541+
542+ def can_retrieve_cache(self):
543+ if self.old_can_retrieve_cache():
544+ for m in self.generator.allmasters:
545+ try:
546+ m.slaves.remove(self)
547+ except ValueError:
548+ pass #this task wasn't included in that master
549+ return 1
550+ else:
551+ return None
552+
553+ setattr(t, 'oldrun', t.__dict__['run'])
554+ setattr(t, 'run', run)
555+ setattr(t, 'old_post_run', t.post_run)
556+ setattr(t, 'post_run', post_run)
557+ setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
558+ setattr(t, 'can_retrieve_cache', can_retrieve_cache)
559+
560diff --git a/buildtools/wafadmin/3rdparty/boost.py b/buildtools/wafadmin/3rdparty/boost.py
561new file mode 100644
562index 0000000..e690a4e
563--- /dev/null
564+++ b/buildtools/wafadmin/3rdparty/boost.py
565@@ -0,0 +1,343 @@
566+#!/usr/bin/env python
567+# encoding: utf-8
568+#
569+# partially based on boost.py written by Gernot Vormayr
570+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
571+# modified by Bjoern Michaelsen, 2008
572+# modified by Luca Fossati, 2008
573+# rewritten for waf 1.5.1, Thomas Nagy, 2008
574+#
575+#def set_options(opt):
576+# opt.tool_options('boost')
577+# # ...
578+#
579+#def configure(conf):
580+# # ... (e.g. conf.check_tool('g++'))
581+# conf.check_tool('boost')
582+# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
583+#
584+#def build(bld):
585+# bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
586+#
587+#ISSUES:
588+# * find_includes should be called only once!
589+# * support mandatory
590+
591+######## boost update ###########
592+## ITA: * the method get_boost_version_number does work
593+## * the rest of the code has not really been tried
594+# * make certain a demo is provided (in demos/adv for example)
595+
596+# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
597+
598+import os.path, glob, types, re, sys
599+import Configure, config_c, Options, Utils, Logs
600+from Logs import warn, debug
601+from Configure import conf
602+
603+boost_code = '''
604+#include <iostream>
605+#include <boost/version.hpp>
606+int main() { std::cout << BOOST_VERSION << std::endl; }
607+'''
608+
609+boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
610+boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
611+
612+STATIC_NOSTATIC = 'nostatic'
613+STATIC_BOTH = 'both'
614+STATIC_ONLYSTATIC = 'onlystatic'
615+
616+is_versiontag = re.compile('^\d+_\d+_?\d*$')
617+is_threadingtag = re.compile('^mt$')
618+is_abitag = re.compile('^[sgydpn]+$')
619+is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
620+is_pythontag=re.compile('^py[0-9]{2}$')
621+
622+def set_options(opt):
623+ opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
624+ opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
625+
626+def string_to_version(s):
627+ version = s.split('.')
628+ if len(version) < 3: return 0
629+ return int(version[0])*100000 + int(version[1])*100 + int(version[2])
630+
631+def version_string(version):
632+ major = version / 100000
633+ minor = version / 100 % 1000
634+ minor_minor = version % 100
635+ if minor_minor == 0:
636+ return "%d_%d" % (major, minor)
637+ else:
638+ return "%d_%d_%d" % (major, minor, minor_minor)
639+
640+def libfiles(lib, pattern, lib_paths):
641+ result = []
642+ for lib_path in lib_paths:
643+ libname = pattern % ('boost_%s[!_]*' % lib)
644+ result += glob.glob(os.path.join(lib_path, libname))
645+ return result
646+
647+@conf
648+def get_boost_version_number(self, dir):
649+ """silently retrieve the boost version number"""
650+ try:
651+ return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
652+ except Configure.ConfigurationError, e:
653+ return -1
654+
655+def set_default(kw, var, val):
656+ if not var in kw:
657+ kw[var] = val
658+
659+def tags_score(tags, kw):
660+ """
661+ checks library tags
662+
663+ see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
664+ """
665+ score = 0
666+ needed_tags = {
667+ 'threading': kw['tag_threading'],
668+ 'abi': kw['tag_abi'],
669+ 'toolset': kw['tag_toolset'],
670+ 'version': kw['tag_version'],
671+ 'python': kw['tag_python']
672+ }
673+
674+ if kw['tag_toolset'] is None:
675+ v = kw['env']
676+ toolset = v['CXX_NAME']
677+ if v['CXX_VERSION']:
678+ version_no = v['CXX_VERSION'].split('.')
679+ toolset += version_no[0]
680+ if len(version_no) > 1:
681+ toolset += version_no[1]
682+ needed_tags['toolset'] = toolset
683+
684+ found_tags = {}
685+ for tag in tags:
686+ if is_versiontag.match(tag): found_tags['version'] = tag
687+ if is_threadingtag.match(tag): found_tags['threading'] = tag
688+ if is_abitag.match(tag): found_tags['abi'] = tag
689+ if is_toolsettag.match(tag): found_tags['toolset'] = tag
690+ if is_pythontag.match(tag): found_tags['python'] = tag
691+
692+ for tagname in needed_tags.iterkeys():
693+ if needed_tags[tagname] is not None and tagname in found_tags:
694+ if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
695+ score += kw['score_' + tagname][0]
696+ else:
697+ score += kw['score_' + tagname][1]
698+ return score
699+
700+@conf
701+def validate_boost(self, kw):
702+ ver = kw.get('version', '')
703+
704+ for x in 'min_version max_version version'.split():
705+ set_default(kw, x, ver)
706+
707+ set_default(kw, 'lib', '')
708+ kw['lib'] = Utils.to_list(kw['lib'])
709+
710+ set_default(kw, 'env', self.env)
711+
712+ set_default(kw, 'libpath', boost_libpath)
713+ set_default(kw, 'cpppath', boost_cpppath)
714+
715+ for x in 'tag_threading tag_version tag_toolset'.split():
716+ set_default(kw, x, None)
717+ set_default(kw, 'tag_abi', '^[^d]*$')
718+
719+ set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
720+ set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
721+
722+ set_default(kw, 'score_threading', (10, -10))
723+ set_default(kw, 'score_abi', (10, -10))
724+ set_default(kw, 'score_python', (10,-10))
725+ set_default(kw, 'score_toolset', (1, -1))
726+ set_default(kw, 'score_version', (100, -100))
727+
728+ set_default(kw, 'score_min', 0)
729+ set_default(kw, 'static', STATIC_NOSTATIC)
730+ set_default(kw, 'found_includes', False)
731+ set_default(kw, 'min_score', 0)
732+
733+ set_default(kw, 'errmsg', 'not found')
734+ set_default(kw, 'okmsg', 'ok')
735+
736+@conf
737+def find_boost_includes(self, kw):
738+ """
739+ check every path in kw['cpppath'] for subdir
740+ that either starts with boost- or is named boost.
741+
742+ Then the version is checked and selected accordingly to
743+ min_version/max_version. The highest possible version number is
744+ selected!
745+
746+ If no versiontag is set the versiontag is set accordingly to the
747+ selected library and CPPPATH_BOOST is set.
748+ """
749+ boostPath = getattr(Options.options, 'boostincludes', '')
750+ if boostPath:
751+ boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
752+ else:
753+ boostPath = Utils.to_list(kw['cpppath'])
754+
755+ min_version = string_to_version(kw.get('min_version', ''))
756+ max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
757+
758+ version = 0
759+ for include_path in boostPath:
760+ boost_paths = [p for p in glob.glob(os.path.join(include_path, 'boost*')) if os.path.isdir(p)]
761+ debug('BOOST Paths: %r' % boost_paths)
762+ for path in boost_paths:
763+ pathname = os.path.split(path)[-1]
764+ ret = -1
765+ if pathname == 'boost':
766+ path = include_path
767+ ret = self.get_boost_version_number(path)
768+ elif pathname.startswith('boost-'):
769+ ret = self.get_boost_version_number(path)
770+ ret = int(ret)
771+
772+ if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
773+ boost_path = path
774+ version = ret
775+ if not version:
776+ self.fatal('boost headers not found! (required version min: %s max: %s)'
777+ % (kw['min_version'], kw['max_version']))
778+ return False
779+
780+ found_version = version_string(version)
781+ versiontag = '^' + found_version + '$'
782+ if kw['tag_version'] is None:
783+ kw['tag_version'] = versiontag
784+ elif kw['tag_version'] != versiontag:
785+ warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
786+ env = self.env
787+ env['CPPPATH_BOOST'] = boost_path
788+ env['BOOST_VERSION'] = found_version
789+ self.found_includes = 1
790+ ret = 'Version %s (%s)' % (found_version, boost_path)
791+ return ret
792+
793+@conf
794+def find_boost_library(self, lib, kw):
795+
796+ def find_library_from_list(lib, files):
797+ lib_pattern = re.compile('.*boost_(.*?)\..*')
798+ result = (None, None)
799+ resultscore = kw['min_score'] - 1
800+ for file in files:
801+ m = lib_pattern.search(file, 1)
802+ if m:
803+ libname = m.group(1)
804+ libtags = libname.split('-')[1:]
805+ currentscore = tags_score(libtags, kw)
806+ if currentscore > resultscore:
807+ result = (libname, file)
808+ resultscore = currentscore
809+ return result
810+
811+ lib_paths = getattr(Options.options, 'boostlibs', '')
812+ if lib_paths:
813+ lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
814+ else:
815+ lib_paths = Utils.to_list(kw['libpath'])
816+
817+ v = kw.get('env', self.env)
818+
819+ (libname, file) = (None, None)
820+ if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
821+ st_env_prefix = 'LIB'
822+ files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
823+ (libname, file) = find_library_from_list(lib, files)
824+ if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
825+ st_env_prefix = 'STATICLIB'
826+ staticLibPattern = v['staticlib_PATTERN']
827+ if self.env['CC_NAME'] == 'msvc':
828+ staticLibPattern = 'lib' + staticLibPattern
829+ files = libfiles(lib, staticLibPattern, lib_paths)
830+ (libname, file) = find_library_from_list(lib, files)
831+ if libname is not None:
832+ v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
833+ if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
834+ v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
835+ else:
836+ v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
837+ return
838+ self.fatal('lib boost_' + lib + ' not found!')
839+
840+@conf
841+def check_boost(self, *k, **kw):
842+ """
843+ This should be the main entry point
844+
845+- min_version
846+- max_version
847+- version
848+- include_path
849+- lib_path
850+- lib
851+- toolsettag - None or a regexp
852+- threadingtag - None or a regexp
853+- abitag - None or a regexp
854+- versiontag - WARNING: you should rather use version or min_version/max_version
855+- static - look for static libs (values:
856+ 'nostatic' or STATIC_NOSTATIC - ignore static libs (default)
857+ 'both' or STATIC_BOTH - find static libs, too
858+ 'onlystatic' or STATIC_ONLYSTATIC - find only static libs
859+- score_version
860+- score_abi
861+- scores_threading
862+- score_toolset
863+ * the scores are tuples (match_score, nomatch_score)
864+ match_score is the added to the score if the tag is matched
865+ nomatch_score is added when a tag is found and does not match
866+- min_score
867+ """
868+
869+ if not self.env['CXX']:
870+ self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
871+ self.validate_boost(kw)
872+ ret = None
873+ try:
874+ if not kw.get('found_includes', None):
875+ self.check_message_1(kw.get('msg_includes', 'boost headers'))
876+ ret = self.find_boost_includes(kw)
877+
878+ except Configure.ConfigurationError, e:
879+ if 'errmsg' in kw:
880+ self.check_message_2(kw['errmsg'], 'YELLOW')
881+ if 'mandatory' in kw:
882+ if Logs.verbose > 1:
883+ raise
884+ else:
885+ self.fatal('the configuration failed (see %r)' % self.log.name)
886+ else:
887+ if 'okmsg' in kw:
888+ self.check_message_2(kw.get('okmsg_includes', ret))
889+
890+ for lib in kw['lib']:
891+ self.check_message_1('library boost_'+lib)
892+ try:
893+ self.find_boost_library(lib, kw)
894+ except Configure.ConfigurationError, e:
895+ ret = False
896+ if 'errmsg' in kw:
897+ self.check_message_2(kw['errmsg'], 'YELLOW')
898+ if 'mandatory' in kw:
899+ if Logs.verbose > 1:
900+ raise
901+ else:
902+ self.fatal('the configuration failed (see %r)' % self.log.name)
903+ else:
904+ if 'okmsg' in kw:
905+ self.check_message_2(kw['okmsg'])
906+
907+ return ret
908+
909diff --git a/buildtools/wafadmin/3rdparty/fluid.py b/buildtools/wafadmin/3rdparty/fluid.py
910new file mode 100644
911index 0000000..117edef
912--- /dev/null
913+++ b/buildtools/wafadmin/3rdparty/fluid.py
914@@ -0,0 +1,27 @@
915+#!/usr/bin/python
916+# encoding: utf-8
917+# Grygoriy Fuchedzhy 2009
918+
919+"""
920+Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
921+"""
922+
923+import Task
924+from TaskGen import extension
925+
926+Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
927+
928+@extension('.fl')
929+def fluid(self, node):
930+ """add the .fl to the source list; the cxx file generated will be compiled when possible"""
931+ cpp = node.change_ext('.cpp')
932+ hpp = node.change_ext('.hpp')
933+ self.create_task('fluid', node, [cpp, hpp])
934+
935+ if 'cxx' in self.features:
936+ self.allnodes.append(cpp)
937+
938+def detect(conf):
939+ fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
940+ conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
941+
942diff --git a/buildtools/wafadmin/3rdparty/gccdeps.py b/buildtools/wafadmin/3rdparty/gccdeps.py
943new file mode 100644
944index 0000000..6600c9c
945--- /dev/null
946+++ b/buildtools/wafadmin/3rdparty/gccdeps.py
947@@ -0,0 +1,128 @@
948+#!/usr/bin/env python
949+# encoding: utf-8
950+# Thomas Nagy, 2008-2010 (ita)
951+
952+"""
953+Execute the tasks with gcc -MD, read the dependencies from the .d file
954+and prepare the dependency calculation for the next run
955+"""
956+
957+import os, re, threading
958+import Task, Logs, Utils, preproc
959+from TaskGen import before, after, feature
960+
961+lock = threading.Lock()
962+
963+preprocessor_flag = '-MD'
964+
965+@feature('cc')
966+@before('apply_core')
967+def add_mmd_cc(self):
968+ if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
969+ self.env.append_value('CCFLAGS', preprocessor_flag)
970+
971+@feature('cxx')
972+@before('apply_core')
973+def add_mmd_cxx(self):
974+ if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
975+ self.env.append_value('CXXFLAGS', preprocessor_flag)
976+
977+def scan(self):
978+ "the scanner does not do anything initially"
979+ nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
980+ names = []
981+ return (nodes, names)
982+
983+re_o = re.compile("\.o$")
984+re_src = re.compile("^(\.\.)[\\/](.*)$")
985+
986+def post_run(self):
987+ # The following code is executed by threads, it is not safe, so a lock is needed...
988+
989+ if getattr(self, 'cached', None):
990+ return Task.Task.post_run(self)
991+
992+ name = self.outputs[0].abspath(self.env)
993+ name = re_o.sub('.d', name)
994+ txt = Utils.readf(name)
995+ #os.unlink(name)
996+
997+ txt = txt.replace('\\\n', '')
998+
999+ lst = txt.strip().split(':')
1000+ val = ":".join(lst[1:])
1001+ val = val.split()
1002+
1003+ nodes = []
1004+ bld = self.generator.bld
1005+
1006+ f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
1007+ for x in val:
1008+ if os.path.isabs(x):
1009+
1010+ if not preproc.go_absolute:
1011+ continue
1012+
1013+ lock.acquire()
1014+ try:
1015+ node = bld.root.find_resource(x)
1016+ finally:
1017+ lock.release()
1018+ else:
1019+ g = re.search(re_src, x)
1020+ if g:
1021+ x = g.group(2)
1022+ lock.acquire()
1023+ try:
1024+ node = bld.bldnode.parent.find_resource(x)
1025+ finally:
1026+ lock.release()
1027+ else:
1028+ g = re.search(f, x)
1029+ if g:
1030+ x = g.group(2)
1031+ lock.acquire()
1032+ try:
1033+ node = bld.srcnode.find_resource(x)
1034+ finally:
1035+ lock.release()
1036+
1037+ if id(node) == id(self.inputs[0]):
1038+ # ignore the source file, it is already in the dependencies
1039+ # this way, successful config tests may be retrieved from the cache
1040+ continue
1041+
1042+ if not node:
1043+ raise ValueError('could not find %r for %r' % (x, self))
1044+ else:
1045+ nodes.append(node)
1046+
1047+ Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
1048+
1049+ bld.node_deps[self.unique_id()] = nodes
1050+ bld.raw_deps[self.unique_id()] = []
1051+
1052+ try:
1053+ del self.cache_sig
1054+ except:
1055+ pass
1056+
1057+ Task.Task.post_run(self)
1058+
1059+import Constants, Utils
1060+def sig_implicit_deps(self):
1061+ try:
1062+ return Task.Task.sig_implicit_deps(self)
1063+ except Utils.WafError:
1064+ return Constants.SIG_NIL
1065+
1066+for name in 'cc cxx'.split():
1067+ try:
1068+ cls = Task.TaskBase.classes[name]
1069+ except KeyError:
1070+ pass
1071+ else:
1072+ cls.post_run = post_run
1073+ cls.scan = scan
1074+ cls.sig_implicit_deps = sig_implicit_deps
1075+
1076diff --git a/buildtools/wafadmin/3rdparty/go.py b/buildtools/wafadmin/3rdparty/go.py
1077new file mode 100644
1078index 0000000..2d8df0d
1079--- /dev/null
1080+++ b/buildtools/wafadmin/3rdparty/go.py
1081@@ -0,0 +1,111 @@
1082+#!/usr/bin/env python
1083+# encoding: utf-8
1084+# go.py - Waf tool for the Go programming language
1085+# By: Tom Wambold <tom5760@gmail.com>
1086+
1087+import platform, os
1088+
1089+import Task
1090+import Utils
1091+from TaskGen import feature, extension, after
1092+
1093+Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
1094+Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
1095+Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
1096+
1097+def detect(conf):
1098+
1099+ def set_def(var, val):
1100+ if not conf.env[var]:
1101+ conf.env[var] = val
1102+
1103+ goarch = os.getenv("GOARCH")
1104+
1105+ if goarch == '386':
1106+ set_def('GO_PLATFORM', 'i386')
1107+ elif goarch == 'amd64':
1108+ set_def('GO_PLATFORM', 'x86_64')
1109+ elif goarch == 'arm':
1110+ set_def('GO_PLATFORM', 'arm')
1111+ else:
1112+ set_def('GO_PLATFORM', platform.machine())
1113+
1114+ if conf.env.GO_PLATFORM == 'x86_64':
1115+ set_def('GO_COMPILER', '6g')
1116+ set_def('GO_LINKER', '6l')
1117+ set_def('GO_EXTENSION', '.6')
1118+ elif conf.env.GO_PLATFORM in ['i386', 'i486', 'i586', 'i686']:
1119+ set_def('GO_COMPILER', '8g')
1120+ set_def('GO_LINKER', '8l')
1121+ set_def('GO_EXTENSION', '.8')
1122+ elif conf.env.GO_PLATFORM == 'arm':
1123+ set_def('GO_COMPILER', '5g')
1124+ set_def('GO_LINKER', '5l')
1125+ set_def('GO_EXTENSION', '.5')
1126+
1127+ if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
1128+ raise conf.fatal('Unsupported platform ' + platform.machine())
1129+
1130+ set_def('GO_PACK', 'gopack')
1131+ set_def('GO_PACK_EXTENSION', '.a')
1132+
1133+ conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
1134+ conf.find_program(conf.env.GO_LINKER, var='GOL', mandatory=True)
1135+ conf.find_program(conf.env.GO_PACK, var='GOP', mandatory=True)
1136+ conf.find_program('cgo', var='CGO', mandatory=True)
1137+
1138+@extension('.go')
1139+def compile_go(self, node):
1140+ try:
1141+ self.go_nodes.append(node)
1142+ except AttributeError:
1143+ self.go_nodes = [node]
1144+
1145+@feature('go')
1146+@after('apply_core')
1147+def apply_compile_go(self):
1148+ try:
1149+ nodes = self.go_nodes
1150+ except AttributeError:
1151+ self.go_compile_task = None
1152+ else:
1153+ self.go_compile_task = self.create_task('gocompile',
1154+ nodes,
1155+ [self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
1156+
1157+@feature('gopackage', 'goprogram')
1158+@after('apply_compile_go')
1159+def apply_goinc(self):
1160+ if not getattr(self, 'go_compile_task', None):
1161+ return
1162+
1163+ names = self.to_list(getattr(self, 'uselib_local', []))
1164+ for name in names:
1165+ obj = self.name_to_obj(name)
1166+ if not obj:
1167+ raise Utils.WafError('object %r was not found in uselib_local '
1168+ '(required by %r)' % (lib_name, self.name))
1169+ obj.post()
1170+ self.go_compile_task.set_run_after(obj.go_package_task)
1171+ self.go_compile_task.dep_nodes.extend(obj.go_package_task.outputs)
1172+ self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
1173+ self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
1174+
1175+@feature('gopackage')
1176+@after('apply_goinc')
1177+def apply_gopackage(self):
1178+ self.go_package_task = self.create_task('gopack',
1179+ self.go_compile_task.outputs[0],
1180+ self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
1181+ self.go_package_task.set_run_after(self.go_compile_task)
1182+ self.go_package_task.dep_nodes.extend(self.go_compile_task.outputs)
1183+
1184+@feature('goprogram')
1185+@after('apply_goinc')
1186+def apply_golink(self):
1187+ self.go_link_task = self.create_task('golink',
1188+ self.go_compile_task.outputs[0],
1189+ self.path.find_or_declare(self.target))
1190+ self.go_link_task.set_run_after(self.go_compile_task)
1191+ self.go_link_task.dep_nodes.extend(self.go_compile_task.outputs)
1192+
1193diff --git a/buildtools/wafadmin/3rdparty/lru_cache.py b/buildtools/wafadmin/3rdparty/lru_cache.py
1194new file mode 100644
1195index 0000000..5b00abc
1196--- /dev/null
1197+++ b/buildtools/wafadmin/3rdparty/lru_cache.py
1198@@ -0,0 +1,97 @@
1199+#! /usr/bin/env python
1200+# encoding: utf-8
1201+# Thomas Nagy 2011
1202+
1203+import os, shutil, re
1204+import Options, Build, Logs
1205+
1206+"""
1207+Apply a least recently used policy to the Waf cache.
1208+
1209+For performance reasons, it is called after the build is complete.
1210+
1211+We assume that the the folders are written atomically
1212+
1213+Do export WAFCACHE=/tmp/foo-xyz where xyz represents the cache size in megabytes
1214+If missing, the default cache size will be set to 10GB
1215+"""
1216+
1217+re_num = re.compile('[a-zA-Z_]+(\d+)')
1218+
1219+CACHESIZE = 10*1024*1024*1024 # in bytes
1220+CLEANRATIO = 0.8
1221+DIRSIZE = 4096
1222+
1223+def compile(self):
1224+ if Options.cache_global and not Options.options.nocache:
1225+ try:
1226+ os.makedirs(Options.cache_global)
1227+ except:
1228+ pass
1229+
1230+ try:
1231+ self.raw_compile()
1232+ finally:
1233+ if Options.cache_global and not Options.options.nocache:
1234+ self.sweep()
1235+
1236+def sweep(self):
1237+ global CACHESIZE
1238+ CACHEDIR = Options.cache_global
1239+
1240+ # get the cache max size from the WAFCACHE filename
1241+ re_num = re.compile('[a-zA-Z_]+(\d+)')
1242+ val = re_num.sub('\\1', os.path.basename(Options.cache_global))
1243+ try:
1244+ CACHESIZE = int(val)
1245+ except:
1246+ pass
1247+
1248+ # map folder names to timestamps
1249+ flist = {}
1250+ for x in os.listdir(CACHEDIR):
1251+ j = os.path.join(CACHEDIR, x)
1252+ if os.path.isdir(j) and len(x) == 32: # dir names are md5 hexdigests
1253+ flist[x] = [os.stat(j).st_mtime, 0]
1254+
1255+ for (x, v) in flist.items():
1256+ cnt = DIRSIZE # each entry takes 4kB
1257+ d = os.path.join(CACHEDIR, x)
1258+ for k in os.listdir(d):
1259+ cnt += os.stat(os.path.join(d, k)).st_size
1260+ flist[x][1] = cnt
1261+
1262+ total = sum([x[1] for x in flist.values()])
1263+ Logs.debug('lru: Cache size is %r' % total)
1264+
1265+ if total >= CACHESIZE:
1266+ Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
1267+
1268+ # make a list to sort the folders by timestamp
1269+ lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
1270+ lst.sort(key=lambda x: x[1]) # sort by timestamp
1271+ lst.reverse()
1272+
1273+ while total >= CACHESIZE * CLEANRATIO:
1274+ (k, t, s) = lst.pop()
1275+ p = os.path.join(CACHEDIR, k)
1276+ v = p + '.del'
1277+ try:
1278+ os.rename(p, v)
1279+ except:
1280+ # someone already did it
1281+ pass
1282+ else:
1283+ try:
1284+ shutil.rmtree(v)
1285+ except:
1286+ # this should not happen, but who knows?
1287+ Logs.warn('If you ever see this message, report it (%r)' % v)
1288+ total -= s
1289+ del flist[k]
1290+ Logs.debug('lru: Total at the end %r' % total)
1291+
1292+Build.BuildContext.raw_compile = Build.BuildContext.compile
1293+Build.BuildContext.compile = compile
1294+Build.BuildContext.sweep = sweep
1295+
1296diff --git a/buildtools/wafadmin/3rdparty/paranoid.py b/buildtools/wafadmin/3rdparty/paranoid.py
1297new file mode 100644
1298index 0000000..ead64ea
1299--- /dev/null
1300+++ b/buildtools/wafadmin/3rdparty/paranoid.py
1301@@ -0,0 +1,35 @@
1302+#!/usr/bin/env python
1303+# encoding: utf-8
1304+# ita 2010
1305+
1306+import Logs, Utils, Build, Task
1307+
1308+def say(txt):
1309+ Logs.warn("^o^: %s" % txt)
1310+
1311+try:
1312+ ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
1313+except Exception, e:
1314+ pass
1315+else:
1316+ def say(txt):
1317+ f = Utils.cmd_output([ret, txt])
1318+ Utils.pprint('PINK', f)
1319+
1320+say('you make the errors, we detect them')
1321+
1322+def check_task_classes(self):
1323+ for x in Task.TaskBase.classes:
1324+ if isinstance(x, Task.Task):
1325+ if not getattr(cls, 'ext_in', None) or getattr(cls, 'before', None):
1326+ say('class %s has no precedence constraints (ext_in/before)')
1327+ if not getattr(cls, 'ext_out', None) or getattr(cls, 'after', None):
1328+ say('class %s has no precedence constraints (ext_out/after)')
1329+
1330+comp = Build.BuildContext.compile
1331+def compile(self):
1332+ if not getattr(self, 'magic', None):
1333+ check_task_classes(self)
1334+ return comp(self)
1335+Build.BuildContext.compile = compile
1336+
1337diff --git a/buildtools/wafadmin/3rdparty/swig.py b/buildtools/wafadmin/3rdparty/swig.py
1338new file mode 100644
1339index 0000000..c0a4108
1340--- /dev/null
1341+++ b/buildtools/wafadmin/3rdparty/swig.py
1342@@ -0,0 +1,190 @@
1343+#! /usr/bin/env python
1344+# encoding: UTF-8
1345+# Petar Forai
1346+# Thomas Nagy 2008
1347+
1348+import re
1349+import Task, Utils, Logs
1350+from TaskGen import extension
1351+from Configure import conf
1352+import preproc
1353+
1354+"""
1355+Welcome in the hell of adding tasks dynamically
1356+
1357+swig interface files may be created at runtime, the module name may be unknown in advance
1358+
1359+rev 5859 is much more simple
1360+"""
1361+
1362+SWIG_EXTS = ['.swig', '.i']
1363+
1364+swig_str = '${SWIG} ${SWIGFLAGS} ${_CCINCFLAGS} ${_CXXINCFLAGS} ${_CCDEFFLAGS} ${_CXXDEFFLAGS} ${SRC}'
1365+cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
1366+
1367+def runnable_status(self):
1368+ for t in self.run_after:
1369+ if not t.hasrun:
1370+ return ASK_LATER
1371+
1372+ if not getattr(self, 'init_outputs', None):
1373+ self.init_outputs = True
1374+ if not getattr(self, 'module', None):
1375+ # search the module name
1376+ txt = self.inputs[0].read(self.env)
1377+ m = re_module.search(txt)
1378+ if not m:
1379+ raise ValueError("could not find the swig module name")
1380+ self.module = m.group(1)
1381+
1382+ swig_c(self)
1383+
1384+ # add the language-specific output files as nodes
1385+ # call funs in the dict swig_langs
1386+ for x in self.env['SWIGFLAGS']:
1387+ # obtain the language
1388+ x = x[1:]
1389+ try:
1390+ fun = swig_langs[x]
1391+ except KeyError:
1392+ pass
1393+ else:
1394+ fun(self)
1395+
1396+ return Task.Task.runnable_status(self)
1397+setattr(cls, 'runnable_status', runnable_status)
1398+
1399+re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
1400+
1401+re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
1402+re_2 = re.compile('%include "(.*)"', re.M)
1403+re_3 = re.compile('#include "(.*)"', re.M)
1404+
1405+def scan(self):
1406+ "scan for swig dependencies, climb the .i files"
1407+ env = self.env
1408+
1409+ lst_src = []
1410+
1411+ seen = []
1412+ to_see = [self.inputs[0]]
1413+
1414+ while to_see:
1415+ node = to_see.pop(0)
1416+ if node.id in seen:
1417+ continue
1418+ seen.append(node.id)
1419+ lst_src.append(node)
1420+
1421+ # read the file
1422+ code = node.read(env)
1423+ code = preproc.re_nl.sub('', code)
1424+ code = preproc.re_cpp.sub(preproc.repl, code)
1425+
1426+ # find .i files and project headers
1427+ names = re_2.findall(code) + re_3.findall(code)
1428+ for n in names:
1429+ for d in self.generator.env.INC_PATHS + [node.parent]:
1430+ u = d.find_resource(n)
1431+ if u:
1432+ to_see.append(u)
1433+ break
1434+ else:
1435+ Logs.warn('could not find %r' % n)
1436+
1437+ # list of nodes this one depends on, and module name if present
1438+ if Logs.verbose:
1439+ Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
1440+ return (lst_src, [])
1441+cls.scan = scan
1442+
1443+# provide additional language processing
1444+swig_langs = {}
1445+def swig(fun):
1446+ swig_langs[fun.__name__.replace('swig_', '')] = fun
1447+
1448+def swig_c(self):
1449+ ext = '.swigwrap_%d.c' % self.generator.idx
1450+ flags = self.env['SWIGFLAGS']
1451+ if '-c++' in flags:
1452+ ext += 'xx'
1453+ out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
1454+
1455+ try:
1456+ if '-c++' in flags:
1457+ fun = self.generator.cxx_hook
1458+ else:
1459+ fun = self.generator.c_hook
1460+ except AttributeError:
1461+ raise Utils.WafError('No c%s compiler was found to process swig files' % ('-c++' in flags and '++' or ''))
1462+
1463+ task = fun(out_node)
1464+ task.set_run_after(self)
1465+
1466+ ge = self.generator.bld.generator
1467+ ge.outstanding.insert(0, task)
1468+ ge.total += 1
1469+
1470+ try:
1471+ ltask = self.generator.link_task
1472+ except AttributeError:
1473+ pass
1474+ else:
1475+ ltask.inputs.append(task.outputs[0])
1476+
1477+ self.outputs.append(out_node)
1478+
1479+ if not '-o' in self.env['SWIGFLAGS']:
1480+ self.env.append_value('SWIGFLAGS', '-o')
1481+ self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
1482+
1483+@swig
1484+def swig_python(tsk):
1485+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
1486+
1487+@swig
1488+def swig_ocaml(tsk):
1489+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
1490+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
1491+
1492+@extension(SWIG_EXTS)
1493+def i_file(self, node):
1494+ # the task instance
1495+ tsk = self.create_task('swig')
1496+ tsk.set_inputs(node)
1497+ tsk.module = getattr(self, 'swig_module', None)
1498+
1499+ flags = self.to_list(getattr(self, 'swig_flags', []))
1500+ self.env.append_value('SWIGFLAGS', flags)
1501+
1502+ if not '-outdir' in flags:
1503+ flags.append('-outdir')
1504+ flags.append(node.parent.abspath(self.env))
1505+
1506+@conf
1507+def check_swig_version(conf, minver=None):
1508+ """Check for a minimum swig version like conf.check_swig_version('1.3.28')
1509+ or conf.check_swig_version((1,3,28)) """
1510+ reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
1511+
1512+ swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
1513+
1514+ swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
1515+ if isinstance(minver, basestring):
1516+ minver = [int(s) for s in minver.split(".")]
1517+ if isinstance(minver, tuple):
1518+ minver = [int(s) for s in minver]
1519+ result = (minver is None) or (minver[:3] <= swigver[:3])
1520+ swigver_full = '.'.join(map(str, swigver))
1521+ if result:
1522+ conf.env['SWIG_VERSION'] = swigver_full
1523+ minver_str = '.'.join(map(str, minver))
1524+ if minver is None:
1525+ conf.check_message_custom('swig version', '', swigver_full)
1526+ else:
1527+ conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
1528+ return result
1529+
1530+def detect(conf):
1531+ swig = conf.find_program('swig', var='SWIG', mandatory=True)
1532+
1533diff --git a/buildtools/wafadmin/3rdparty/valadoc.py b/buildtools/wafadmin/3rdparty/valadoc.py
1534new file mode 100644
1535index 0000000..d0a9fe8
1536--- /dev/null
1537+++ b/buildtools/wafadmin/3rdparty/valadoc.py
1538@@ -0,0 +1,113 @@
1539+#! /usr/bin/env python
1540+# encoding: UTF-8
1541+# Nicolas Joseph 2009
1542+
1543+from fnmatch import fnmatchcase
1544+import os, os.path, re, stat
1545+import Task, Utils, Node, Constants
1546+from TaskGen import feature, extension, after
1547+from Logs import debug, warn, error
1548+
1549+VALADOC_STR = '${VALADOC}'
1550+
1551+class valadoc_task(Task.Task):
1552+
1553+ vars = ['VALADOC', 'VALADOCFLAGS']
1554+ color = 'BLUE'
1555+ after = 'cxx_link cc_link'
1556+ quiet = True
1557+
1558+ output_dir = ''
1559+ doclet = ''
1560+ package_name = ''
1561+ package_version = ''
1562+ files = []
1563+ protected = True
1564+ private = False
1565+ inherit = False
1566+ deps = False
1567+ enable_non_null_experimental = False
1568+ force = False
1569+
1570+ def runnable_status(self):
1571+ return True
1572+
1573+ def run(self):
1574+ if self.env['VALADOC']:
1575+ if not self.env['VALADOCFLAGS']:
1576+ self.env['VALADOCFLAGS'] = ''
1577+ cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
1578+ cmd.append ('-o %s' % self.output_dir)
1579+ if getattr(self, 'doclet', None):
1580+ cmd.append ('--doclet %s' % self.doclet)
1581+ cmd.append ('--package-name %s' % self.package_name)
1582+ if getattr(self, 'version', None):
1583+ cmd.append ('--package-version %s' % self.package_version)
1584+ if getattr(self, 'packages', None):
1585+ for package in self.packages:
1586+ cmd.append ('--pkg %s' % package)
1587+ if getattr(self, 'vapi_dirs', None):
1588+ for vapi_dir in self.vapi_dirs:
1589+ cmd.append ('--vapidir %s' % vapi_dir)
1590+ if not getattr(self, 'protected', None):
1591+ cmd.append ('--no-protected')
1592+ if getattr(self, 'private', None):
1593+ cmd.append ('--private')
1594+ if getattr(self, 'inherit', None):
1595+ cmd.append ('--inherit')
1596+ if getattr(self, 'deps', None):
1597+ cmd.append ('--deps')
1598+ if getattr(self, 'enable_non_null_experimental', None):
1599+ cmd.append ('--enable-non-null-experimental')
1600+ if getattr(self, 'force', None):
1601+ cmd.append ('--force')
1602+ cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
1603+ return self.generator.bld.exec_command(' '.join(cmd))
1604+ else:
1605+ error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
1606+ return -1
1607+
1608+@feature('valadoc')
1609+def process_valadoc(self):
1610+ task = getattr(self, 'task', None)
1611+ if not task:
1612+ task = self.create_task('valadoc')
1613+ self.task = task
1614+ if getattr(self, 'output_dir', None):
1615+ task.output_dir = self.output_dir
1616+ else:
1617+ Utils.WafError('no output directory')
1618+ if getattr(self, 'doclet', None):
1619+ task.doclet = self.doclet
1620+ else:
1621+ Utils.WafError('no doclet directory')
1622+ if getattr(self, 'package_name', None):
1623+ task.package_name = self.package_name
1624+ else:
1625+ Utils.WafError('no package name')
1626+ if getattr(self, 'package_version', None):
1627+ task.package_version = self.package_version
1628+ if getattr(self, 'packages', None):
1629+ task.packages = Utils.to_list(self.packages)
1630+ if getattr(self, 'vapi_dirs', None):
1631+ task.vapi_dirs = Utils.to_list(self.vapi_dirs)
1632+ if getattr(self, 'files', None):
1633+ task.files = self.files
1634+ else:
1635+ Utils.WafError('no input file')
1636+ if getattr(self, 'protected', None):
1637+ task.protected = self.protected
1638+ if getattr(self, 'private', None):
1639+ task.private = self.private
1640+ if getattr(self, 'inherit', None):
1641+ task.inherit = self.inherit
1642+ if getattr(self, 'deps', None):
1643+ task.deps = self.deps
1644+ if getattr(self, 'enable_non_null_experimental', None):
1645+ task.enable_non_null_experimental = self.enable_non_null_experimental
1646+ if getattr(self, 'force', None):
1647+ task.force = self.force
1648+
1649+def detect(conf):
1650+ conf.find_program('valadoc', var='VALADOC', mandatory=False)
1651+
1652diff --git a/buildtools/wafadmin/Build.py b/buildtools/wafadmin/Build.py
1653new file mode 100644
1654index 0000000..8e7c72c
1655--- /dev/null
1656+++ b/buildtools/wafadmin/Build.py
1657@@ -0,0 +1,1033 @@
1658+#!/usr/bin/env python
1659+# encoding: utf-8
1660+# Thomas Nagy, 2005 (ita)
1661+
1662+"""
1663+Dependency tree holder
1664+
1665+The class Build holds all the info related to a build:
1666+* file system representation (tree of Node instances)
1667+* various cached objects (task signatures, file scan results, ..)
1668+
1669+There is only one Build object at a time (bld singleton)
1670+"""
1671+
1672+import os, sys, errno, re, glob, gc, datetime, shutil
1673+try: import cPickle
1674+except: import pickle as cPickle
1675+import Runner, TaskGen, Node, Scripting, Utils, Environment, Task, Logs, Options
1676+from Logs import debug, error, info
1677+from Constants import *
1678+
1679+SAVED_ATTRS = 'root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
1680+"Build class members to save"
1681+
1682+bld = None
1683+"singleton - safe to use when Waf is not used as a library"
1684+
1685+class BuildError(Utils.WafError):
1686+ def __init__(self, b=None, t=[]):
1687+ self.bld = b
1688+ self.tasks = t
1689+ self.ret = 1
1690+ Utils.WafError.__init__(self, self.format_error())
1691+
1692+ def format_error(self):
1693+ lst = ['Build failed:']
1694+ for tsk in self.tasks:
1695+ txt = tsk.format_error()
1696+ if txt: lst.append(txt)
1697+ sep = ' '
1698+ if len(lst) > 2:
1699+ sep = '\n'
1700+ return sep.join(lst)
1701+
1702+def group_method(fun):
1703+ """
1704+ sets a build context method to execute after the current group has finished executing
1705+ this is useful for installing build files:
1706+ * calling install_files/install_as will fail if called too early
1707+ * people do not want to define install method in their task classes
1708+
1709+ TODO: try it
1710+ """
1711+ def f(*k, **kw):
1712+ if not k[0].is_install:
1713+ return False
1714+
1715+ postpone = True
1716+ if 'postpone' in kw:
1717+ postpone = kw['postpone']
1718+ del kw['postpone']
1719+
1720+ # TODO waf 1.6 in theory there should be no reference to the TaskManager internals here
1721+ if postpone:
1722+ m = k[0].task_manager
1723+ if not m.groups: m.add_group()
1724+ m.groups[m.current_group].post_funs.append((fun, k, kw))
1725+ if not 'cwd' in kw:
1726+ kw['cwd'] = k[0].path
1727+ else:
1728+ fun(*k, **kw)
1729+ return f
1730+
1731+class BuildContext(Utils.Context):
1732+ "holds the dependency tree"
1733+ def __init__(self):
1734+
1735+ # not a singleton, but provided for compatibility
1736+ global bld
1737+ bld = self
1738+
1739+ self.task_manager = Task.TaskManager()
1740+
1741+ # instead of hashing the nodes, we assign them a unique id when they are created
1742+ self.id_nodes = 0
1743+ self.idx = {}
1744+
1745+ # map names to environments, the 'default' must be defined
1746+ self.all_envs = {}
1747+
1748+ # ======================================= #
1749+ # code for reading the scripts
1750+
1751+ # project build directory - do not reset() from load_dirs()
1752+ self.bdir = ''
1753+
1754+ # the current directory from which the code is run
1755+ # the folder changes everytime a wscript is read
1756+ self.path = None
1757+
1758+ # Manual dependencies.
1759+ self.deps_man = Utils.DefaultDict(list)
1760+
1761+ # ======================================= #
1762+ # cache variables
1763+
1764+ # local cache for absolute paths - cache_node_abspath[variant][node]
1765+ self.cache_node_abspath = {}
1766+
1767+ # list of folders that are already scanned
1768+ # so that we do not need to stat them one more time
1769+ self.cache_scanned_folders = {}
1770+
1771+ # list of targets to uninstall for removing the empty folders after uninstalling
1772+ self.uninstall = []
1773+
1774+ # ======================================= #
1775+ # tasks and objects
1776+
1777+ # build dir variants (release, debug, ..)
1778+ for v in 'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
1779+ var = {}
1780+ setattr(self, v, var)
1781+
1782+ self.cache_dir_contents = {}
1783+
1784+ self.all_task_gen = []
1785+ self.task_gen_cache_names = {}
1786+ self.cache_sig_vars = {}
1787+ self.log = None
1788+
1789+ self.root = None
1790+ self.srcnode = None
1791+ self.bldnode = None
1792+
1793+ # bind the build context to the nodes in use
1794+ # this means better encapsulation and no build context singleton
1795+ class node_class(Node.Node):
1796+ pass
1797+ self.node_class = node_class
1798+ self.node_class.__module__ = "Node"
1799+ self.node_class.__name__ = "Nodu"
1800+ self.node_class.bld = self
1801+
1802+ self.is_install = None
1803+
1804+ def __copy__(self):
1805+ "nodes are not supposed to be copied"
1806+ raise Utils.WafError('build contexts are not supposed to be cloned')
1807+
1808+ def load(self):
1809+ "load the cache from the disk"
1810+ try:
1811+ env = Environment.Environment(os.path.join(self.cachedir, 'build.config.py'))
1812+ except (IOError, OSError):
1813+ pass
1814+ else:
1815+ if env['version'] < HEXVERSION:
1816+ raise Utils.WafError('Version mismatch! reconfigure the project')
1817+ for t in env['tools']:
1818+ self.setup(**t)
1819+
1820+ try:
1821+ gc.disable()
1822+ f = data = None
1823+
1824+ Node.Nodu = self.node_class
1825+
1826+ try:
1827+ f = open(os.path.join(self.bdir, DBFILE), 'rb')
1828+ except (IOError, EOFError):
1829+ # handle missing file/empty file
1830+ pass
1831+
1832+ try:
1833+ if f: data = cPickle.load(f)
1834+ except AttributeError:
1835+ # handle file of an old Waf version
1836+ # that has an attribute which no longer exist
1837+ # (e.g. AttributeError: 'module' object has no attribute 'BuildDTO')
1838+ if Logs.verbose > 1: raise
1839+
1840+ if data:
1841+ for x in SAVED_ATTRS: setattr(self, x, data[x])
1842+ else:
1843+ debug('build: Build cache loading failed')
1844+
1845+ finally:
1846+ if f: f.close()
1847+ gc.enable()
1848+
1849+ def save(self):
1850+ "store the cache on disk, see self.load"
1851+ gc.disable()
1852+ self.root.__class__.bld = None
1853+
1854+ # some people are very nervous with ctrl+c so we have to make a temporary file
1855+ Node.Nodu = self.node_class
1856+ db = os.path.join(self.bdir, DBFILE)
1857+ file = open(db + '.tmp', 'wb')
1858+ data = {}
1859+ for x in SAVED_ATTRS: data[x] = getattr(self, x)
1860+ cPickle.dump(data, file, -1)
1861+ file.close()
1862+
1863+ # do not use shutil.move
1864+ try: os.unlink(db)
1865+ except OSError: pass
1866+ os.rename(db + '.tmp', db)
1867+ self.root.__class__.bld = self
1868+ gc.enable()
1869+
1870+ # ======================================= #
1871+
1872+ def clean(self):
1873+ debug('build: clean called')
1874+
1875+ # does not clean files created during the configuration
1876+ precious = set([])
1877+ for env in self.all_envs.values():
1878+ for x in env[CFG_FILES]:
1879+ node = self.srcnode.find_resource(x)
1880+ if node:
1881+ precious.add(node.id)
1882+
1883+ def clean_rec(node):
1884+ for x in list(node.childs.keys()):
1885+ nd = node.childs[x]
1886+
1887+ tp = nd.id & 3
1888+ if tp == Node.DIR:
1889+ clean_rec(nd)
1890+ elif tp == Node.BUILD:
1891+ if nd.id in precious: continue
1892+ for env in self.all_envs.values():
1893+ try: os.remove(nd.abspath(env))
1894+ except OSError: pass
1895+ node.childs.__delitem__(x)
1896+
1897+ clean_rec(self.srcnode)
1898+
1899+ for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
1900+ setattr(self, v, {})
1901+
1902+ def compile(self):
1903+ """The cache file is not written if nothing was build at all (build is up to date)"""
1904+ debug('build: compile called')
1905+
1906+ """
1907+ import cProfile, pstats
1908+ cProfile.run("import Build\nBuild.bld.flush()", 'profi.txt')
1909+ p = pstats.Stats('profi.txt')
1910+ p.sort_stats('cumulative').print_stats(80)
1911+ """
1912+ self.flush()
1913+ #"""
1914+
1915+ self.generator = Runner.Parallel(self, Options.options.jobs)
1916+
1917+ def dw(on=True):
1918+ if Options.options.progress_bar:
1919+ if on: sys.stderr.write(Logs.colors.cursor_on)
1920+ else: sys.stderr.write(Logs.colors.cursor_off)
1921+
1922+ debug('build: executor starting')
1923+
1924+ back = os.getcwd()
1925+ os.chdir(self.bldnode.abspath())
1926+
1927+ try:
1928+ try:
1929+ dw(on=False)
1930+ self.generator.start()
1931+ except KeyboardInterrupt:
1932+ dw()
1933+ # if self.generator.processed != 1: TODO
1934+ self.save()
1935+ raise
1936+ except Exception:
1937+ dw()
1938+ # do not store anything, for something bad happened
1939+ raise
1940+ else:
1941+ dw()
1942+ #if self.generator.processed != 1: TODO
1943+ self.save()
1944+
1945+ if self.generator.error:
1946+ raise BuildError(self, self.task_manager.tasks_done)
1947+
1948+ finally:
1949+ os.chdir(back)
1950+
1951+ def install(self):
1952+ "this function is called for both install and uninstall"
1953+ debug('build: install called')
1954+
1955+ self.flush()
1956+
1957+ # remove empty folders after uninstalling
1958+ if self.is_install < 0:
1959+ lst = []
1960+ for x in self.uninstall:
1961+ dir = os.path.dirname(x)
1962+ if not dir in lst: lst.append(dir)
1963+ lst.sort()
1964+ lst.reverse()
1965+
1966+ nlst = []
1967+ for y in lst:
1968+ x = y
1969+ while len(x) > 4:
1970+ if not x in nlst: nlst.append(x)
1971+ x = os.path.dirname(x)
1972+
1973+ nlst.sort()
1974+ nlst.reverse()
1975+ for x in nlst:
1976+ try: os.rmdir(x)
1977+ except OSError: pass
1978+
1979+ def new_task_gen(self, *k, **kw):
1980+ if self.task_gen_cache_names:
1981+ self.task_gen_cache_names = {}
1982+
1983+ kw['bld'] = self
1984+ if len(k) == 0:
1985+ ret = TaskGen.task_gen(*k, **kw)
1986+ else:
1987+ cls_name = k[0]
1988+
1989+ try: cls = TaskGen.task_gen.classes[cls_name]
1990+ except KeyError: raise Utils.WscriptError('%s is not a valid task generator -> %s' %
1991+ (cls_name, [x for x in TaskGen.task_gen.classes]))
1992+ ret = cls(*k, **kw)
1993+ return ret
1994+
1995+ def __call__(self, *k, **kw):
1996+ if self.task_gen_cache_names:
1997+ self.task_gen_cache_names = {}
1998+
1999+ kw['bld'] = self
2000+ return TaskGen.task_gen(*k, **kw)
2001+
2002+ def load_envs(self):
2003+ try:
2004+ lst = Utils.listdir(self.cachedir)
2005+ except OSError, e:
2006+ if e.errno == errno.ENOENT:
2007+ raise Utils.WafError('The project was not configured: run "waf configure" first!')
2008+ else:
2009+ raise
2010+
2011+ if not lst:
2012+ raise Utils.WafError('The cache directory is empty: reconfigure the project')
2013+
2014+ for file in lst:
2015+ if file.endswith(CACHE_SUFFIX):
2016+ env = Environment.Environment(os.path.join(self.cachedir, file))
2017+ name = file[:-len(CACHE_SUFFIX)]
2018+
2019+ self.all_envs[name] = env
2020+
2021+ self.init_variants()
2022+
2023+ for env in self.all_envs.values():
2024+ for f in env[CFG_FILES]:
2025+ newnode = self.path.find_or_declare(f)
2026+ try:
2027+ hash = Utils.h_file(newnode.abspath(env))
2028+ except (IOError, AttributeError):
2029+ error("cannot find "+f)
2030+ hash = SIG_NIL
2031+ self.node_sigs[env.variant()][newnode.id] = hash
2032+
2033+ # TODO: hmmm, these nodes are removed from the tree when calling rescan()
2034+ self.bldnode = self.root.find_dir(self.bldnode.abspath())
2035+ self.path = self.srcnode = self.root.find_dir(self.srcnode.abspath())
2036+ self.cwd = self.bldnode.abspath()
2037+
2038+ def setup(self, tool, tooldir=None, funs=None):
2039+ "setup tools for build process"
2040+ if isinstance(tool, list):
2041+ for i in tool: self.setup(i, tooldir)
2042+ return
2043+
2044+ if not tooldir: tooldir = Options.tooldir
2045+
2046+ module = Utils.load_tool(tool, tooldir)
2047+ if hasattr(module, "setup"): module.setup(self)
2048+
2049+ def init_variants(self):
2050+ debug('build: init variants')
2051+
2052+ lstvariants = []
2053+ for env in self.all_envs.values():
2054+ if not env.variant() in lstvariants:
2055+ lstvariants.append(env.variant())
2056+ self.lst_variants = lstvariants
2057+
2058+ debug('build: list of variants is %r', lstvariants)
2059+
2060+ for name in lstvariants+[0]:
2061+ for v in 'node_sigs cache_node_abspath'.split():
2062+ var = getattr(self, v)
2063+ if not name in var:
2064+ var[name] = {}
2065+
2066+ # ======================================= #
2067+ # node and folder handling
2068+
2069+ # this should be the main entry point
2070+ def load_dirs(self, srcdir, blddir, load_cache=1):
2071+ "this functions should be the start of everything"
2072+
2073+ assert(os.path.isabs(srcdir))
2074+ assert(os.path.isabs(blddir))
2075+
2076+ self.cachedir = os.path.join(blddir, CACHE_DIR)
2077+
2078+ if srcdir == blddir:
2079+ raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))
2080+
2081+ self.bdir = blddir
2082+
2083+ # try to load the cache file, if it does not exist, nothing happens
2084+ self.load()
2085+
2086+ if not self.root:
2087+ Node.Nodu = self.node_class
2088+ self.root = Node.Nodu('', None, Node.DIR)
2089+
2090+ if not self.srcnode:
2091+ self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
2092+ debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)
2093+
2094+ self.path = self.srcnode
2095+
2096+ # create this build dir if necessary
2097+ try: os.makedirs(blddir)
2098+ except OSError: pass
2099+
2100+ if not self.bldnode:
2101+ self.bldnode = self.root.ensure_dir_node_from_path(blddir)
2102+
2103+ self.init_variants()
2104+
2105+ def rescan(self, src_dir_node):
2106+ """
2107+ look the contents of a (folder)node and update its list of childs
2108+
2109+ The intent is to perform the following steps
2110+ * remove the nodes for the files that have disappeared
2111+ * remove the signatures for the build files that have disappeared
2112+ * cache the results of os.listdir
2113+ * create the build folder equivalent (mkdir) for each variant
2114+ src/bar -> build/default/src/bar, build/release/src/bar
2115+
2116+ when a folder in the source directory is removed, we do not check recursively
2117+ to remove the unused nodes. To do that, call 'waf clean' and build again.
2118+ """
2119+
2120+ # do not rescan over and over again
2121+ # TODO use a single variable in waf 1.6
2122+ if self.cache_scanned_folders.get(src_dir_node.id, None): return
2123+ self.cache_scanned_folders[src_dir_node.id] = True
2124+
2125+ # TODO remove in waf 1.6
2126+ if hasattr(self, 'repository'): self.repository(src_dir_node)
2127+
2128+ if not src_dir_node.name and sys.platform == 'win32':
2129+ # the root has no name, contains drive letters, and cannot be listed
2130+ return
2131+
2132+
2133+ # first, take the case of the source directory
2134+ parent_path = src_dir_node.abspath()
2135+ try:
2136+ lst = set(Utils.listdir(parent_path))
2137+ except OSError:
2138+ lst = set([])
2139+
2140+ # TODO move this at the bottom
2141+ self.cache_dir_contents[src_dir_node.id] = lst
2142+
2143+ # hash the existing source files, remove the others
2144+ cache = self.node_sigs[0]
2145+ for x in src_dir_node.childs.values():
2146+ if x.id & 3 != Node.FILE: continue
2147+ if x.name in lst:
2148+ try:
2149+ cache[x.id] = Utils.h_file(x.abspath())
2150+ except IOError:
2151+ raise Utils.WafError('The file %s is not readable or has become a dir' % x.abspath())
2152+ else:
2153+ try: del cache[x.id]
2154+ except KeyError: pass
2155+
2156+ del src_dir_node.childs[x.name]
2157+
2158+
2159+ # first obtain the differences between srcnode and src_dir_node
2160+ h1 = self.srcnode.height()
2161+ h2 = src_dir_node.height()
2162+
2163+ lst = []
2164+ child = src_dir_node
2165+ while h2 > h1:
2166+ lst.append(child.name)
2167+ child = child.parent
2168+ h2 -= 1
2169+ lst.reverse()
2170+
2171+ # list the files in the build dirs
2172+ try:
2173+ for variant in self.lst_variants:
2174+ sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
2175+ self.listdir_bld(src_dir_node, sub_path, variant)
2176+ except OSError:
2177+
2178+ # listdir failed, remove the build node signatures for all variants
2179+ for node in src_dir_node.childs.values():
2180+ if node.id & 3 != Node.BUILD:
2181+ continue
2182+
2183+ for dct in self.node_sigs.values():
2184+ if node.id in dct:
2185+ dct.__delitem__(node.id)
2186+
2187+ # the policy is to avoid removing nodes representing directories
2188+ src_dir_node.childs.__delitem__(node.name)
2189+
2190+ for variant in self.lst_variants:
2191+ sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
2192+ try:
2193+ os.makedirs(sub_path)
2194+ except OSError:
2195+ pass
2196+
2197+ # ======================================= #
2198+ def listdir_src(self, parent_node):
2199+ """do not use, kept for compatibility"""
2200+ pass
2201+
2202+ def remove_node(self, node):
2203+ """do not use, kept for compatibility"""
2204+ pass
2205+
2206+ def listdir_bld(self, parent_node, path, variant):
2207+ """in this method we do not add timestamps but we remove them
2208+ when the files no longer exist (file removed in the build dir)"""
2209+
2210+ i_existing_nodes = [x for x in parent_node.childs.values() if x.id & 3 == Node.BUILD]
2211+
2212+ lst = set(Utils.listdir(path))
2213+ node_names = set([x.name for x in i_existing_nodes])
2214+ remove_names = node_names - lst
2215+
2216+ # remove the stamps of the build nodes that no longer exist on the filesystem
2217+ ids_to_remove = [x.id for x in i_existing_nodes if x.name in remove_names]
2218+ cache = self.node_sigs[variant]
2219+ for nid in ids_to_remove:
2220+ if nid in cache:
2221+ cache.__delitem__(nid)
2222+
2223+ def get_env(self):
2224+ return self.env_of_name('default')
2225+ def set_env(self, name, val):
2226+ self.all_envs[name] = val
2227+
2228+ env = property(get_env, set_env)
2229+
2230+ def add_manual_dependency(self, path, value):
2231+ if isinstance(path, Node.Node):
2232+ node = path
2233+ elif os.path.isabs(path):
2234+ node = self.root.find_resource(path)
2235+ else:
2236+ node = self.path.find_resource(path)
2237+ self.deps_man[node.id].append(value)
2238+
2239+ def launch_node(self):
2240+ """return the launch directory as a node"""
2241+ # p_ln is kind of private, but public in case if
2242+ try:
2243+ return self.p_ln
2244+ except AttributeError:
2245+ self.p_ln = self.root.find_dir(Options.launch_dir)
2246+ return self.p_ln
2247+
2248+ def glob(self, pattern, relative=True):
2249+ "files matching the pattern, seen from the current folder"
2250+ path = self.path.abspath()
2251+ files = [self.root.find_resource(x) for x in glob.glob(path+os.sep+pattern)]
2252+ if relative:
2253+ files = [x.path_to_parent(self.path) for x in files if x]
2254+ else:
2255+ files = [x.abspath() for x in files if x]
2256+ return files
2257+
2258+ ## the following methods are candidates for the stable apis ##
2259+
2260+ def add_group(self, *k):
2261+ self.task_manager.add_group(*k)
2262+
2263+ def set_group(self, *k, **kw):
2264+ self.task_manager.set_group(*k, **kw)
2265+
2266+ def hash_env_vars(self, env, vars_lst):
2267+ """hash environment variables
2268+ ['CXX', ..] -> [env['CXX'], ..] -> md5()"""
2269+
2270+ # ccroot objects use the same environment for building the .o at once
2271+ # the same environment and the same variables are used
2272+
2273+ idx = str(id(env)) + str(vars_lst)
2274+ try: return self.cache_sig_vars[idx]
2275+ except KeyError: pass
2276+
2277+ lst = [str(env[a]) for a in vars_lst]
2278+ ret = Utils.h_list(lst)
2279+ debug('envhash: %r %r', ret, lst)
2280+
2281+ # next time
2282+ self.cache_sig_vars[idx] = ret
2283+ return ret
2284+
2285+ def name_to_obj(self, name, env):
2286+ """retrieve a task generator from its name or its target name
2287+ remember that names must be unique"""
2288+ cache = self.task_gen_cache_names
2289+ if not cache:
2290+ # create the index lazily
2291+ for x in self.all_task_gen:
2292+ vt = x.env.variant() + '_'
2293+ if x.name:
2294+ cache[vt + x.name] = x
2295+ else:
2296+ if isinstance(x.target, str):
2297+ target = x.target
2298+ else:
2299+ target = ' '.join(x.target)
2300+ v = vt + target
2301+ if not cache.get(v, None):
2302+ cache[v] = x
2303+ return cache.get(env.variant() + '_' + name, None)
2304+
2305+ def flush(self, all=1):
2306+ """tell the task generators to create the tasks"""
2307+
2308+ self.ini = datetime.datetime.now()
2309+ # force the initialization of the mapping name->object in flush
2310+ # name_to_obj can be used in userland scripts, in that case beware of incomplete mapping
2311+ self.task_gen_cache_names = {}
2312+ self.name_to_obj('', self.env)
2313+
2314+ debug('build: delayed operation TaskGen.flush() called')
2315+
2316+ if Options.options.compile_targets:
2317+ debug('task_gen: posting objects %r listed in compile_targets', Options.options.compile_targets)
2318+
2319+ mana = self.task_manager
2320+ to_post = []
2321+ min_grp = 0
2322+
2323+ # ensure the target names exist, fail before any post()
2324+ target_objects = Utils.DefaultDict(list)
2325+ for target_name in Options.options.compile_targets.split(','):
2326+ # trim target_name (handle cases when the user added spaces to targets)
2327+ target_name = target_name.strip()
2328+ for env in self.all_envs.values():
2329+ tg = self.name_to_obj(target_name, env)
2330+ if tg:
2331+ target_objects[target_name].append(tg)
2332+
2333+ m = mana.group_idx(tg)
2334+ if m > min_grp:
2335+ min_grp = m
2336+ to_post = [tg]
2337+ elif m == min_grp:
2338+ to_post.append(tg)
2339+
2340+ if not target_name in target_objects and all:
2341+ raise Utils.WafError("target '%s' does not exist" % target_name)
2342+
2343+ debug('group: Forcing up to group %s for target %s', mana.group_name(min_grp), Options.options.compile_targets)
2344+
2345+ # post all the task generators in previous groups
2346+ for i in xrange(len(mana.groups)):
2347+ mana.current_group = i
2348+ if i == min_grp:
2349+ break
2350+ g = mana.groups[i]
2351+ debug('group: Forcing group %s', mana.group_name(g))
2352+ for t in g.tasks_gen:
2353+ debug('group: Posting %s', t.name or t.target)
2354+ t.post()
2355+
2356+ # then post the task generators listed in compile_targets in the last group
2357+ for t in to_post:
2358+ t.post()
2359+
2360+ else:
2361+ debug('task_gen: posting objects (normal)')
2362+ ln = self.launch_node()
2363+ # if the build is started from the build directory, do as if it was started from the top-level
2364+ # for the pretty-printing (Node.py), the two lines below cannot be moved to Build::launch_node
2365+ if ln.is_child_of(self.bldnode) or not ln.is_child_of(self.srcnode):
2366+ ln = self.srcnode
2367+
2368+ # if the project file is located under the source directory, build all targets by default
2369+ # else 'waf configure build' does nothing
2370+ proj_node = self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
2371+ if proj_node.id != self.srcnode.id:
2372+ ln = self.srcnode
2373+
2374+ for i in xrange(len(self.task_manager.groups)):
2375+ g = self.task_manager.groups[i]
2376+ self.task_manager.current_group = i
2377+ if Logs.verbose:
2378+ groups = [x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x]) == id(g)]
2379+ name = groups and groups[0] or 'unnamed'
2380+ Logs.debug('group: group', name)
2381+ for tg in g.tasks_gen:
2382+ if not tg.path.is_child_of(ln):
2383+ continue
2384+ if Logs.verbose:
2385+ Logs.debug('group: %s' % tg)
2386+ tg.post()
2387+
2388+ def env_of_name(self, name):
2389+ try:
2390+ return self.all_envs[name]
2391+ except KeyError:
2392+ error('no such environment: '+name)
2393+ return None
2394+
2395+ def progress_line(self, state, total, col1, col2):
2396+ n = len(str(total))
2397+
2398+ Utils.rot_idx += 1
2399+ ind = Utils.rot_chr[Utils.rot_idx % 4]
2400+
2401+ ini = self.ini
2402+
2403+ pc = (100.*state)/total
2404+ eta = Utils.get_elapsed_time(ini)
2405+ fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind)
2406+ left = fs % (state, total, col1, pc, col2)
2407+ right = '][%s%s%s]' % (col1, eta, col2)
2408+
2409+ cols = Utils.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
2410+ if cols < 7: cols = 7
2411+
2412+ ratio = int((cols*state)/total) - 1
2413+
2414+ bar = ('='*ratio+'>').ljust(cols)
2415+ msg = Utils.indicator % (left, bar, right)
2416+
2417+ return msg
2418+
2419+
2420+ # do_install is not used anywhere
2421+ def do_install(self, src, tgt, chmod=O644):
2422+ """returns true if the file was effectively installed or uninstalled, false otherwise"""
2423+ if self.is_install > 0:
2424+ if not Options.options.force:
2425+ # check if the file is already there to avoid a copy
2426+ try:
2427+ st1 = os.stat(tgt)
2428+ st2 = os.stat(src)
2429+ except OSError:
2430+ pass
2431+ else:
2432+ # same size and identical timestamps -> make no copy
2433+ if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
2434+ return False
2435+
2436+ srclbl = src.replace(self.srcnode.abspath(None)+os.sep, '')
2437+ info("* installing %s as %s" % (srclbl, tgt))
2438+
2439+ # following is for shared libs and stale inodes (-_-)
2440+ try: os.remove(tgt)
2441+ except OSError: pass
2442+
2443+ try:
2444+ shutil.copy2(src, tgt)
2445+ os.chmod(tgt, chmod)
2446+ except IOError:
2447+ try:
2448+ os.stat(src)
2449+ except (OSError, IOError):
2450+ error('File %r does not exist' % src)
2451+ raise Utils.WafError('Could not install the file %r' % tgt)
2452+ return True
2453+
2454+ elif self.is_install < 0:
2455+ info("* uninstalling %s" % tgt)
2456+
2457+ self.uninstall.append(tgt)
2458+
2459+ try:
2460+ os.remove(tgt)
2461+ except OSError, e:
2462+ if e.errno != errno.ENOENT:
2463+ if not getattr(self, 'uninstall_error', None):
2464+ self.uninstall_error = True
2465+ Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
2466+ if Logs.verbose > 1:
2467+ Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
2468+ return True
2469+
2470+ red = re.compile(r"^([A-Za-z]:)?[/\\\\]*")
2471+ def get_install_path(self, path, env=None):
2472+ "installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
2473+ if not env: env = self.env
2474+ destdir = env.get_destdir()
2475+ path = path.replace('/', os.sep)
2476+ destpath = Utils.subst_vars(path, env)
2477+ if destdir:
2478+ destpath = os.path.join(destdir, self.red.sub('', destpath))
2479+ return destpath
2480+
2481+ def install_dir(self, path, env=None):
2482+ """
2483+ create empty folders for the installation (very rarely used)
2484+ """
2485+ if env:
2486+ assert isinstance(env, Environment.Environment), "invalid parameter"
2487+ else:
2488+ env = self.env
2489+
2490+ if not path:
2491+ return []
2492+
2493+ destpath = self.get_install_path(path, env)
2494+
2495+ if self.is_install > 0:
2496+ info('* creating %s' % destpath)
2497+ Utils.check_dir(destpath)
2498+ elif self.is_install < 0:
2499+ info('* removing %s' % destpath)
2500+ self.uninstall.append(destpath + '/xxx') # yes, ugly
2501+
2502+ def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
2503+ """To install files only after they have been built, put the calls in a method named
2504+ post_build on the top-level wscript
2505+
2506+ The files must be a list and contain paths as strings or as Nodes
2507+
2508+ The relative_trick flag can be set to install folders, use bld.path.ant_glob() with it
2509+ """
2510+ if env:
2511+ assert isinstance(env, Environment.Environment), "invalid parameter"
2512+ else:
2513+ env = self.env
2514+
2515+ if not path: return []
2516+
2517+ if not cwd:
2518+ cwd = self.path
2519+
2520+ if isinstance(files, str) and '*' in files:
2521+ gl = cwd.abspath() + os.sep + files
2522+ lst = glob.glob(gl)
2523+ else:
2524+ lst = Utils.to_list(files)
2525+
2526+ if not getattr(lst, '__iter__', False):
2527+ lst = [lst]
2528+
2529+ destpath = self.get_install_path(path, env)
2530+
2531+ Utils.check_dir(destpath)
2532+
2533+ installed_files = []
2534+ for filename in lst:
2535+ if isinstance(filename, str) and os.path.isabs(filename):
2536+ alst = Utils.split_path(filename)
2537+ destfile = os.path.join(destpath, alst[-1])
2538+ else:
2539+ if isinstance(filename, Node.Node):
2540+ nd = filename
2541+ else:
2542+ nd = cwd.find_resource(filename)
2543+ if not nd:
2544+ raise Utils.WafError("Unable to install the file %r (not found in %s)" % (filename, cwd))
2545+
2546+ if relative_trick:
2547+ destfile = os.path.join(destpath, filename)
2548+ Utils.check_dir(os.path.dirname(destfile))
2549+ else:
2550+ destfile = os.path.join(destpath, nd.name)
2551+
2552+ filename = nd.abspath(env)
2553+
2554+ if self.do_install(filename, destfile, chmod):
2555+ installed_files.append(destfile)
2556+ return installed_files
2557+
2558+ def install_as(self, path, srcfile, env=None, chmod=O644, cwd=None):
2559+ """
2560+ srcfile may be a string or a Node representing the file to install
2561+
2562+ returns True if the file was effectively installed, False otherwise
2563+ """
2564+ if env:
2565+ assert isinstance(env, Environment.Environment), "invalid parameter"
2566+ else:
2567+ env = self.env
2568+
2569+ if not path:
2570+ raise Utils.WafError("where do you want to install %r? (%r?)" % (srcfile, path))
2571+
2572+ if not cwd:
2573+ cwd = self.path
2574+
2575+ destpath = self.get_install_path(path, env)
2576+
2577+ dir, name = os.path.split(destpath)
2578+ Utils.check_dir(dir)
2579+
2580+ # the source path
2581+ if isinstance(srcfile, Node.Node):
2582+ src = srcfile.abspath(env)
2583+ else:
2584+ src = srcfile
2585+ if not os.path.isabs(srcfile):
2586+ node = cwd.find_resource(srcfile)
2587+ if not node:
2588+ raise Utils.WafError("Unable to install the file %r (not found in %s)" % (srcfile, cwd))
2589+ src = node.abspath(env)
2590+
2591+ return self.do_install(src, destpath, chmod)
2592+
2593+ def symlink_as(self, path, src, env=None, cwd=None):
2594+ """example: bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') """
2595+
2596+ if sys.platform == 'win32':
2597+ # well, this *cannot* work
2598+ return
2599+
2600+ if not path:
2601+ raise Utils.WafError("where do you want to install %r? (%r?)" % (src, path))
2602+
2603+ tgt = self.get_install_path(path, env)
2604+
2605+ dir, name = os.path.split(tgt)
2606+ Utils.check_dir(dir)
2607+
2608+ if self.is_install > 0:
2609+ link = False
2610+ if not os.path.islink(tgt):
2611+ link = True
2612+ elif os.readlink(tgt) != src:
2613+ link = True
2614+
2615+ if link:
2616+ try: os.remove(tgt)
2617+ except OSError: pass
2618+
2619+ info('* symlink %s (-> %s)' % (tgt, src))
2620+ os.symlink(src, tgt)
2621+ return 0
2622+
2623+ else: # UNINSTALL
2624+ try:
2625+ info('* removing %s' % (tgt))
2626+ os.remove(tgt)
2627+ return 0
2628+ except OSError:
2629+ return 1
2630+
2631+ def exec_command(self, cmd, **kw):
2632+ # 'runner' zone is printed out for waf -v, see wafadmin/Options.py
2633+ debug('runner: system command -> %s', cmd)
2634+ if self.log:
2635+ self.log.write('%s\n' % cmd)
2636+ kw['log'] = self.log
2637+ try:
2638+ if not kw.get('cwd', None):
2639+ kw['cwd'] = self.cwd
2640+ except AttributeError:
2641+ self.cwd = kw['cwd'] = self.bldnode.abspath()
2642+ return Utils.exec_command(cmd, **kw)
2643+
2644+ def printout(self, s):
2645+ f = self.log or sys.stderr
2646+ f.write(s)
2647+ f.flush()
2648+
2649+ def add_subdirs(self, dirs):
2650+ self.recurse(dirs, 'build')
2651+
2652+ def pre_recurse(self, name_or_mod, path, nexdir):
2653+ if not hasattr(self, 'oldpath'):
2654+ self.oldpath = []
2655+ self.oldpath.append(self.path)
2656+ self.path = self.root.find_dir(nexdir)
2657+ return {'bld': self, 'ctx': self}
2658+
2659+ def post_recurse(self, name_or_mod, path, nexdir):
2660+ self.path = self.oldpath.pop()
2661+
2662+ ###### user-defined behaviour
2663+
2664+ def pre_build(self):
2665+ if hasattr(self, 'pre_funs'):
2666+ for m in self.pre_funs:
2667+ m(self)
2668+
2669+ def post_build(self):
2670+ if hasattr(self, 'post_funs'):
2671+ for m in self.post_funs:
2672+ m(self)
2673+
2674+ def add_pre_fun(self, meth):
2675+ try: self.pre_funs.append(meth)
2676+ except AttributeError: self.pre_funs = [meth]
2677+
2678+ def add_post_fun(self, meth):
2679+ try: self.post_funs.append(meth)
2680+ except AttributeError: self.post_funs = [meth]
2681+
2682+ def use_the_magic(self):
2683+ Task.algotype = Task.MAXPARALLEL
2684+ Task.file_deps = Task.extract_deps
2685+ self.magic = True
2686+
2687+ install_as = group_method(install_as)
2688+ install_files = group_method(install_files)
2689+ symlink_as = group_method(symlink_as)
2690+
2691diff --git a/buildtools/wafadmin/Configure.py b/buildtools/wafadmin/Configure.py
2692new file mode 100644
2693index 0000000..35b4e51
2694--- /dev/null
2695+++ b/buildtools/wafadmin/Configure.py
2696@@ -0,0 +1,444 @@
2697+#!/usr/bin/env python
2698+# encoding: utf-8
2699+# Thomas Nagy, 2005-2008 (ita)
2700+
2701+"""
2702+Configuration system
2703+
2704+A configuration instance is created when "waf configure" is called, it is used to:
2705+* create data dictionaries (Environment instances)
2706+* store the list of modules to import
2707+
2708+The old model (copied from Scons) was to store logic (mapping file extensions to functions)
2709+along with the data. In Waf a way was found to separate that logic by adding an indirection
2710+layer (storing the names in the Environment instances)
2711+
2712+In the new model, the logic is more object-oriented, and the user scripts provide the
2713+logic. The data files (Environments) must contain configuration data only (flags, ..).
2714+
2715+Note: the c/c++ related code is in the module config_c
2716+"""
2717+
2718+import os, shlex, sys, time
2719+try: import cPickle
2720+except ImportError: import pickle as cPickle
2721+import Environment, Utils, Options, Logs
2722+from Logs import warn
2723+from Constants import *
2724+
2725+try:
2726+ from urllib import request
2727+except:
2728+ from urllib import urlopen
2729+else:
2730+ urlopen = request.urlopen
2731+
2732+conf_template = '''# project %(app)s configured on %(now)s by
2733+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
2734+# using %(args)s
2735+#
2736+'''
2737+
2738+class ConfigurationError(Utils.WscriptError):
2739+ pass
2740+
2741+autoconfig = False
2742+"reconfigure the project automatically"
2743+
2744+def find_file(filename, path_list):
2745+ """find a file in a list of paths
2746+ @param filename: name of the file to search for
2747+ @param path_list: list of directories to search
2748+ @return: the first occurrence filename or '' if filename could not be found
2749+"""
2750+ for directory in Utils.to_list(path_list):
2751+ if os.path.exists(os.path.join(directory, filename)):
2752+ return directory
2753+ return ''
2754+
2755+def find_program_impl(env, filename, path_list=[], var=None, environ=None):
2756+ """find a program in folders path_lst, and sets env[var]
2757+ @param env: environment
2758+ @param filename: name of the program to search for
2759+ @param path_list: list of directories to search for filename
2760+ @param var: environment value to be checked for in env or os.environ
2761+ @return: either the value that is referenced with [var] in env or os.environ
2762+ or the first occurrence filename or '' if filename could not be found
2763+"""
2764+
2765+ if not environ:
2766+ environ = os.environ
2767+
2768+ try: path_list = path_list.split()
2769+ except AttributeError: pass
2770+
2771+ if var:
2772+ if env[var]: return env[var]
2773+ if var in environ: env[var] = environ[var]
2774+
2775+ if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
2776+
2777+ ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
2778+ for y in [filename+x for x in ext.split(',')]:
2779+ for directory in path_list:
2780+ x = os.path.join(directory, y)
2781+ if os.path.isfile(x):
2782+ if var: env[var] = x
2783+ return x
2784+ return ''
2785+
2786+class ConfigurationContext(Utils.Context):
2787+ tests = {}
2788+ error_handlers = []
2789+ def __init__(self, env=None, blddir='', srcdir=''):
2790+ self.env = None
2791+ self.envname = ''
2792+
2793+ self.environ = dict(os.environ)
2794+
2795+ self.line_just = 40
2796+
2797+ self.blddir = blddir
2798+ self.srcdir = srcdir
2799+ self.all_envs = {}
2800+
2801+ # curdir: necessary for recursion
2802+ self.cwd = self.curdir = os.getcwd()
2803+
2804+ self.tools = [] # tools loaded in the configuration, and that will be loaded when building
2805+
2806+ self.setenv(DEFAULT)
2807+
2808+ self.lastprog = ''
2809+
2810+ self.hash = 0
2811+ self.files = []
2812+
2813+ self.tool_cache = []
2814+
2815+ if self.blddir:
2816+ self.post_init()
2817+
2818+ def post_init(self):
2819+
2820+ self.cachedir = os.path.join(self.blddir, CACHE_DIR)
2821+
2822+ path = os.path.join(self.blddir, WAF_CONFIG_LOG)
2823+ try: os.unlink(path)
2824+ except (OSError, IOError): pass
2825+
2826+ try:
2827+ self.log = open(path, 'w')
2828+ except (OSError, IOError):
2829+ self.fatal('could not open %r for writing' % path)
2830+
2831+ app = Utils.g_module.APPNAME
2832+ if app:
2833+ ver = getattr(Utils.g_module, 'VERSION', '')
2834+ if ver:
2835+ app = "%s (%s)" % (app, ver)
2836+
2837+ now = time.ctime()
2838+ pyver = sys.hexversion
2839+ systype = sys.platform
2840+ args = " ".join(sys.argv)
2841+ wafver = WAFVERSION
2842+ abi = ABI
2843+ self.log.write(conf_template % vars())
2844+
2845+ def __del__(self):
2846+ """cleanup function: close config.log"""
2847+
2848+ # may be ran by the gc, not always after initialization
2849+ if hasattr(self, 'log') and self.log:
2850+ self.log.close()
2851+
2852+ def fatal(self, msg):
2853+ raise ConfigurationError(msg)
2854+
2855+ def check_tool(self, input, tooldir=None, funs=None):
2856+ "load a waf tool"
2857+
2858+ tools = Utils.to_list(input)
2859+ if tooldir: tooldir = Utils.to_list(tooldir)
2860+ for tool in tools:
2861+ tool = tool.replace('++', 'xx')
2862+ if tool == 'java': tool = 'javaw'
2863+ if tool.lower() == 'unittest': tool = 'unittestw'
2864+ # avoid loading the same tool more than once with the same functions
2865+ # used by composite projects
2866+
2867+ mag = (tool, id(self.env), funs)
2868+ if mag in self.tool_cache:
2869+ continue
2870+ self.tool_cache.append(mag)
2871+
2872+ module = None
2873+ try:
2874+ module = Utils.load_tool(tool, tooldir)
2875+ except Exception, e:
2876+ ex = e
2877+ if Options.options.download:
2878+ _3rdparty = os.path.normpath(Options.tooldir[0] + os.sep + '..' + os.sep + '3rdparty')
2879+
2880+ # try to download the tool from the repository then
2881+ # the default is set to false
2882+ for x in Utils.to_list(Options.remote_repo):
2883+ for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
2884+ url = '/'.join((x, sub, tool + '.py'))
2885+ try:
2886+ web = urlopen(url)
2887+ if web.getcode() != 200:
2888+ continue
2889+ except Exception, e:
2890+ # on python3 urlopen throws an exception
2891+ continue
2892+ else:
2893+ loc = None
2894+ try:
2895+ loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
2896+ loc.write(web.read())
2897+ web.close()
2898+ finally:
2899+ if loc:
2900+ loc.close()
2901+ Logs.warn('downloaded %s from %s' % (tool, url))
2902+ try:
2903+ module = Utils.load_tool(tool, tooldir)
2904+ except:
2905+ Logs.warn('module %s from %s is unusable' % (tool, url))
2906+ try:
2907+ os.unlink(_3rdparty + os.sep + tool + '.py')
2908+ except:
2909+ pass
2910+ continue
2911+ else:
2912+ break
2913+
2914+ if not module:
2915+ Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
2916+ raise ex
2917+ else:
2918+ Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s' % (tool, sys.path, e))
2919+ raise ex
2920+
2921+ if funs is not None:
2922+ self.eval_rules(funs)
2923+ else:
2924+ func = getattr(module, 'detect', None)
2925+ if func:
2926+ if type(func) is type(find_file): func(self)
2927+ else: self.eval_rules(func)
2928+
2929+ self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
2930+
2931+ def sub_config(self, k):
2932+ "executes the configure function of a wscript module"
2933+ self.recurse(k, name='configure')
2934+
2935+ def pre_recurse(self, name_or_mod, path, nexdir):
2936+ return {'conf': self, 'ctx': self}
2937+
2938+ def post_recurse(self, name_or_mod, path, nexdir):
2939+ if not autoconfig:
2940+ return
2941+ self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
2942+ self.files.append(path)
2943+
2944+ def store(self, file=''):
2945+ "save the config results into the cache file"
2946+ if not os.path.isdir(self.cachedir):
2947+ os.makedirs(self.cachedir)
2948+
2949+ if not file:
2950+ file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
2951+ file.write('version = 0x%x\n' % HEXVERSION)
2952+ file.write('tools = %r\n' % self.tools)
2953+ file.close()
2954+
2955+ if not self.all_envs:
2956+ self.fatal('nothing to store in the configuration context!')
2957+ for key in self.all_envs:
2958+ tmpenv = self.all_envs[key]
2959+ tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
2960+
2961+ def set_env_name(self, name, env):
2962+ "add a new environment called name"
2963+ self.all_envs[name] = env
2964+ return env
2965+
2966+ def retrieve(self, name, fromenv=None):
2967+ "retrieve an environment called name"
2968+ try:
2969+ env = self.all_envs[name]
2970+ except KeyError:
2971+ env = Environment.Environment()
2972+ env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
2973+ self.all_envs[name] = env
2974+ else:
2975+ if fromenv: warn("The environment %s may have been configured already" % name)
2976+ return env
2977+
2978+ def setenv(self, name):
2979+ "enable the environment called name"
2980+ self.env = self.retrieve(name)
2981+ self.envname = name
2982+
2983+ def add_os_flags(self, var, dest=None):
2984+ # do not use 'get' to make certain the variable is not defined
2985+ try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
2986+ except KeyError: pass
2987+
2988+ def check_message_1(self, sr):
2989+ self.line_just = max(self.line_just, len(sr))
2990+ for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
2991+ self.log.write(x)
2992+ Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
2993+
2994+ def check_message_2(self, sr, color='GREEN'):
2995+ self.log.write(sr)
2996+ self.log.write('\n')
2997+ Utils.pprint(color, sr)
2998+
2999+ def check_message(self, th, msg, state, option=''):
3000+ sr = 'Checking for %s %s' % (th, msg)
3001+ self.check_message_1(sr)
3002+ p = self.check_message_2
3003+ if state: p('ok ' + str(option))
3004+ else: p('not found', 'YELLOW')
3005+
3006+ # FIXME remove in waf 1.6
3007+ # the parameter 'option' is not used (kept for compatibility)
3008+ def check_message_custom(self, th, msg, custom, option='', color='PINK'):
3009+ sr = 'Checking for %s %s' % (th, msg)
3010+ self.check_message_1(sr)
3011+ self.check_message_2(custom, color)
3012+
3013+ def msg(self, msg, result, color=None):
3014+ """Prints a configuration message 'Checking for xxx: ok'"""
3015+ self.start_msg('Checking for ' + msg)
3016+
3017+ if not isinstance(color, str):
3018+ color = result and 'GREEN' or 'YELLOW'
3019+
3020+ self.end_msg(result, color)
3021+
3022+ def start_msg(self, msg):
3023+ try:
3024+ if self.in_msg:
3025+ return
3026+ except:
3027+ self.in_msg = 0
3028+ self.in_msg += 1
3029+
3030+ self.line_just = max(self.line_just, len(msg))
3031+ for x in ('\n', self.line_just * '-', '\n', msg, '\n'):
3032+ self.log.write(x)
3033+ Utils.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
3034+
3035+ def end_msg(self, result, color):
3036+ self.in_msg -= 1
3037+ if self.in_msg:
3038+ return
3039+
3040+ if not color:
3041+ color = 'GREEN'
3042+ if result == True:
3043+ msg = 'ok'
3044+ elif result == False:
3045+ msg = 'not found'
3046+ color = 'YELLOW'
3047+ else:
3048+ msg = str(result)
3049+
3050+ self.log.write(msg)
3051+ self.log.write('\n')
3052+ Utils.pprint(color, msg)
3053+
3054+ def find_program(self, filename, path_list=[], var=None, mandatory=False):
3055+ "wrapper that adds a configuration message"
3056+
3057+ ret = None
3058+ if var:
3059+ if self.env[var]:
3060+ ret = self.env[var]
3061+ elif var in os.environ:
3062+ ret = os.environ[var]
3063+
3064+ if not isinstance(filename, list): filename = [filename]
3065+ if not ret:
3066+ for x in filename:
3067+ ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
3068+ if ret: break
3069+
3070+ self.check_message_1('Checking for program %s' % ' or '.join(filename))
3071+ self.log.write(' find program=%r paths=%r var=%r\n -> %r\n' % (filename, path_list, var, ret))
3072+ if ret:
3073+ Utils.pprint('GREEN', str(ret))
3074+ else:
3075+ Utils.pprint('YELLOW', 'not found')
3076+ if mandatory:
3077+ self.fatal('The program %r is required' % filename)
3078+
3079+ if var:
3080+ self.env[var] = ret
3081+ return ret
3082+
3083+ def cmd_to_list(self, cmd):
3084+ "commands may be written in pseudo shell like 'ccache g++'"
3085+ if isinstance(cmd, str) and cmd.find(' '):
3086+ try:
3087+ os.stat(cmd)
3088+ except OSError:
3089+ return shlex.split(cmd)
3090+ else:
3091+ return [cmd]
3092+ return cmd
3093+
3094+ def __getattr__(self, name):
3095+ r = self.__class__.__dict__.get(name, None)
3096+ if r: return r
3097+ if name and name.startswith('require_'):
3098+
3099+ for k in ['check_', 'find_']:
3100+ n = name.replace('require_', k)
3101+ ret = self.__class__.__dict__.get(n, None)
3102+ if ret:
3103+ def run(*k, **kw):
3104+ r = ret(self, *k, **kw)
3105+ if not r:
3106+ self.fatal('requirement failure')
3107+ return r
3108+ return run
3109+ self.fatal('No such method %r' % name)
3110+
3111+ def eval_rules(self, rules):
3112+ self.rules = Utils.to_list(rules)
3113+ for x in self.rules:
3114+ f = getattr(self, x)
3115+ if not f: self.fatal("No such method '%s'." % x)
3116+ try:
3117+ f()
3118+ except Exception, e:
3119+ ret = self.err_handler(x, e)
3120+ if ret == BREAK:
3121+ break
3122+ elif ret == CONTINUE:
3123+ continue
3124+ else:
3125+ self.fatal(e)
3126+
3127+ def err_handler(self, fun, error):
3128+ pass
3129+
3130+def conf(f):
3131+ "decorator: attach new configuration functions"
3132+ setattr(ConfigurationContext, f.__name__, f)
3133+ return f
3134+
3135+def conftest(f):
3136+ "decorator: attach new configuration tests (registered as strings)"
3137+ ConfigurationContext.tests[f.__name__] = f
3138+ return conf(f)
3139+
3140+
3141diff --git a/buildtools/wafadmin/Constants.py b/buildtools/wafadmin/Constants.py
3142new file mode 100644
3143index 0000000..e67dda6
3144--- /dev/null
3145+++ b/buildtools/wafadmin/Constants.py
3146@@ -0,0 +1,76 @@
3147+#!/usr/bin/env python
3148+# encoding: utf-8
3149+# Yinon dot me gmail 2008
3150+
3151+"""
3152+these constants are somewhat public, try not to mess them
3153+
3154+maintainer: the version number is updated from the top-level wscript file
3155+"""
3156+
3157+# do not touch these three lines, they are updated automatically
3158+HEXVERSION=0x105019
3159+WAFVERSION="1.5.19"
3160+WAFREVISION = "9709M"
3161+ABI = 7
3162+
3163+# permissions
3164+O644 = 420
3165+O755 = 493
3166+
3167+MAXJOBS = 99999999
3168+
3169+CACHE_DIR = 'c4che'
3170+CACHE_SUFFIX = '.cache.py'
3171+DBFILE = '.wafpickle-%d' % ABI
3172+WSCRIPT_FILE = 'wscript'
3173+WSCRIPT_BUILD_FILE = 'wscript_build'
3174+WAF_CONFIG_LOG = 'config.log'
3175+WAF_CONFIG_H = 'config.h'
3176+
3177+SIG_NIL = 'iluvcuteoverload'
3178+
3179+VARIANT = '_VARIANT_'
3180+DEFAULT = 'default'
3181+
3182+SRCDIR = 'srcdir'
3183+BLDDIR = 'blddir'
3184+APPNAME = 'APPNAME'
3185+VERSION = 'VERSION'
3186+
3187+DEFINES = 'defines'
3188+UNDEFINED = ()
3189+
3190+BREAK = "break"
3191+CONTINUE = "continue"
3192+
3193+# task scheduler options
3194+JOBCONTROL = "JOBCONTROL"
3195+MAXPARALLEL = "MAXPARALLEL"
3196+NORMAL = "NORMAL"
3197+
3198+# task state
3199+NOT_RUN = 0
3200+MISSING = 1
3201+CRASHED = 2
3202+EXCEPTION = 3
3203+SKIPPED = 8
3204+SUCCESS = 9
3205+
3206+ASK_LATER = -1
3207+SKIP_ME = -2
3208+RUN_ME = -3
3209+
3210+
3211+LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
3212+HOUR_FORMAT = "%H:%M:%S"
3213+
3214+TEST_OK = True
3215+
3216+CFG_FILES = 'cfg_files'
3217+
3218+# positive '->' install
3219+# negative '<-' uninstall
3220+INSTALL = 1337
3221+UNINSTALL = -1337
3222+
3223diff --git a/buildtools/wafadmin/Environment.py b/buildtools/wafadmin/Environment.py
3224new file mode 100644
3225index 0000000..52c83b4
3226--- /dev/null
3227+++ b/buildtools/wafadmin/Environment.py
3228@@ -0,0 +1,210 @@
3229+#!/usr/bin/env python
3230+# encoding: utf-8
3231+# Thomas Nagy, 2005 (ita)
3232+
3233+"""Environment representation
3234+
3235+There is one gotcha: getitem returns [] if the contents evals to False
3236+This means env['foo'] = {}; print env['foo'] will print [] not {}
3237+"""
3238+
3239+import os, copy, re
3240+import Logs, Options, Utils
3241+from Constants import *
3242+re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
3243+
3244+class Environment(object):
3245+ """A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
3246+ An environment instance can be stored into a file and loaded easily
3247+ """
3248+ __slots__ = ("table", "parent")
3249+ def __init__(self, filename=None):
3250+ self.table = {}
3251+ #self.parent = None
3252+
3253+ if filename:
3254+ self.load(filename)
3255+
3256+ def __contains__(self, key):
3257+ if key in self.table: return True
3258+ try: return self.parent.__contains__(key)
3259+ except AttributeError: return False # parent may not exist
3260+
3261+ def __str__(self):
3262+ keys = set()
3263+ cur = self
3264+ while cur:
3265+ keys.update(cur.table.keys())
3266+ cur = getattr(cur, 'parent', None)
3267+ keys = list(keys)
3268+ keys.sort()
3269+ return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
3270+
3271+ def __getitem__(self, key):
3272+ try:
3273+ while 1:
3274+ x = self.table.get(key, None)
3275+ if not x is None:
3276+ return x
3277+ self = self.parent
3278+ except AttributeError:
3279+ return []
3280+
3281+ def __setitem__(self, key, value):
3282+ self.table[key] = value
3283+
3284+ def __delitem__(self, key):
3285+ del self.table[key]
3286+
3287+ def pop(self, key, *args):
3288+ if len(args):
3289+ return self.table.pop(key, *args)
3290+ return self.table.pop(key)
3291+
3292+ def set_variant(self, name):
3293+ self.table[VARIANT] = name
3294+
3295+ def variant(self):
3296+ try:
3297+ while 1:
3298+ x = self.table.get(VARIANT, None)
3299+ if not x is None:
3300+ return x
3301+ self = self.parent
3302+ except AttributeError:
3303+ return DEFAULT
3304+
3305+ def copy(self):
3306+ # TODO waf 1.6 rename this method derive, #368
3307+ newenv = Environment()
3308+ newenv.parent = self
3309+ return newenv
3310+
3311+ def detach(self):
3312+ """TODO try it
3313+ modifying the original env will not change the copy"""
3314+ tbl = self.get_merged_dict()
3315+ try:
3316+ delattr(self, 'parent')
3317+ except AttributeError:
3318+ pass
3319+ else:
3320+ keys = tbl.keys()
3321+ for x in keys:
3322+ tbl[x] = copy.deepcopy(tbl[x])
3323+ self.table = tbl
3324+
3325+ def get_flat(self, key):
3326+ s = self[key]
3327+ if isinstance(s, str): return s
3328+ return ' '.join(s)
3329+
3330+ def _get_list_value_for_modification(self, key):
3331+ """Gets a value that must be a list for further modification. The
3332+ list may be modified inplace and there is no need to
3333+ "self.table[var] = value" afterwards.
3334+ """
3335+ try:
3336+ value = self.table[key]
3337+ except KeyError:
3338+ try: value = self.parent[key]
3339+ except AttributeError: value = []
3340+ if isinstance(value, list):
3341+ value = value[:]
3342+ else:
3343+ value = [value]
3344+ else:
3345+ if not isinstance(value, list):
3346+ value = [value]
3347+ self.table[key] = value
3348+ return value
3349+
3350+ def append_value(self, var, value):
3351+ current_value = self._get_list_value_for_modification(var)
3352+
3353+ if isinstance(value, list):
3354+ current_value.extend(value)
3355+ else:
3356+ current_value.append(value)
3357+
3358+ def prepend_value(self, var, value):
3359+ current_value = self._get_list_value_for_modification(var)
3360+
3361+ if isinstance(value, list):
3362+ current_value = value + current_value
3363+ # a new list: update the dictionary entry
3364+ self.table[var] = current_value
3365+ else:
3366+ current_value.insert(0, value)
3367+
3368+ # prepend unique would be ambiguous
3369+ def append_unique(self, var, value):
3370+ current_value = self._get_list_value_for_modification(var)
3371+
3372+ if isinstance(value, list):
3373+ for value_item in value:
3374+ if value_item not in current_value:
3375+ current_value.append(value_item)
3376+ else:
3377+ if value not in current_value:
3378+ current_value.append(value)
3379+
3380+ def get_merged_dict(self):
3381+ """compute a merged table"""
3382+ table_list = []
3383+ env = self
3384+ while 1:
3385+ table_list.insert(0, env.table)
3386+ try: env = env.parent
3387+ except AttributeError: break
3388+ merged_table = {}
3389+ for table in table_list:
3390+ merged_table.update(table)
3391+ return merged_table
3392+
3393+ def store(self, filename):
3394+ "Write the variables into a file"
3395+ file = open(filename, 'w')
3396+ merged_table = self.get_merged_dict()
3397+ keys = list(merged_table.keys())
3398+ keys.sort()
3399+ for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
3400+ file.close()
3401+
3402+ def load(self, filename):
3403+ "Retrieve the variables from a file"
3404+ tbl = self.table
3405+ code = Utils.readf(filename)
3406+ for m in re_imp.finditer(code):
3407+ g = m.group
3408+ tbl[g(2)] = eval(g(3))
3409+ Logs.debug('env: %s', self.table)
3410+
3411+ def get_destdir(self):
3412+ "return the destdir, useful for installing"
3413+ if self.__getitem__('NOINSTALL'): return ''
3414+ return Options.options.destdir
3415+
3416+ def update(self, d):
3417+ for k, v in d.iteritems():
3418+ self[k] = v
3419+
3420+
3421+ def __getattr__(self, name):
3422+ if name in self.__slots__:
3423+ return object.__getattr__(self, name)
3424+ else:
3425+ return self[name]
3426+
3427+ def __setattr__(self, name, value):
3428+ if name in self.__slots__:
3429+ object.__setattr__(self, name, value)
3430+ else:
3431+ self[name] = value
3432+
3433+ def __delattr__(self, name):
3434+ if name in self.__slots__:
3435+ object.__delattr__(self, name)
3436+ else:
3437+ del self[name]
3438+
3439diff --git a/buildtools/wafadmin/Logs.py b/buildtools/wafadmin/Logs.py
3440new file mode 100644
3441index 0000000..c160b37
3442--- /dev/null
3443+++ b/buildtools/wafadmin/Logs.py
3444@@ -0,0 +1,134 @@
3445+#!/usr/bin/env python
3446+# encoding: utf-8
3447+# Thomas Nagy, 2005 (ita)
3448+
3449+import ansiterm
3450+import os, re, logging, traceback, sys
3451+from Constants import *
3452+
3453+zones = ''
3454+verbose = 0
3455+
3456+colors_lst = {
3457+'USE' : True,
3458+'BOLD' :'\x1b[01;1m',
3459+'RED' :'\x1b[01;31m',
3460+'GREEN' :'\x1b[32m',
3461+'YELLOW':'\x1b[33m',
3462+'PINK' :'\x1b[35m',
3463+'BLUE' :'\x1b[01;34m',
3464+'CYAN' :'\x1b[36m',
3465+'NORMAL':'\x1b[0m',
3466+'cursor_on' :'\x1b[?25h',
3467+'cursor_off' :'\x1b[?25l',
3468+}
3469+
3470+got_tty = False
3471+term = os.environ.get('TERM', 'dumb')
3472+if not term in ['dumb', 'emacs']:
3473+ try:
3474+ got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
3475+ except AttributeError:
3476+ pass
3477+
3478+import Utils
3479+
3480+if not got_tty or 'NOCOLOR' in os.environ:
3481+ colors_lst['USE'] = False
3482+
3483+# test
3484+#if sys.platform == 'win32':
3485+# colors_lst['USE'] = True
3486+
3487+def get_color(cl):
3488+ if not colors_lst['USE']: return ''
3489+ return colors_lst.get(cl, '')
3490+
3491+class foo(object):
3492+ def __getattr__(self, a):
3493+ return get_color(a)
3494+ def __call__(self, a):
3495+ return get_color(a)
3496+
3497+colors = foo()
3498+
3499+re_log = re.compile(r'(\w+): (.*)', re.M)
3500+class log_filter(logging.Filter):
3501+ def __init__(self, name=None):
3502+ pass
3503+
3504+ def filter(self, rec):
3505+ rec.c1 = colors.PINK
3506+ rec.c2 = colors.NORMAL
3507+ rec.zone = rec.module
3508+ if rec.levelno >= logging.INFO:
3509+ if rec.levelno >= logging.ERROR:
3510+ rec.c1 = colors.RED
3511+ elif rec.levelno >= logging.WARNING:
3512+ rec.c1 = colors.YELLOW
3513+ else:
3514+ rec.c1 = colors.GREEN
3515+ return True
3516+
3517+ zone = ''
3518+ m = re_log.match(rec.msg)
3519+ if m:
3520+ zone = rec.zone = m.group(1)
3521+ rec.msg = m.group(2)
3522+
3523+ if zones:
3524+ return getattr(rec, 'zone', '') in zones or '*' in zones
3525+ elif not verbose > 2:
3526+ return False
3527+ return True
3528+
3529+class formatter(logging.Formatter):
3530+ def __init__(self):
3531+ logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
3532+
3533+ def format(self, rec):
3534+ if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
3535+ try:
3536+ return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
3537+ except:
3538+ return rec.c1+rec.msg+rec.c2
3539+ return logging.Formatter.format(self, rec)
3540+
3541+def debug(*k, **kw):
3542+ if verbose:
3543+ k = list(k)
3544+ k[0] = k[0].replace('\n', ' ')
3545+ logging.debug(*k, **kw)
3546+
3547+def error(*k, **kw):
3548+ logging.error(*k, **kw)
3549+ if verbose > 1:
3550+ if isinstance(k[0], Utils.WafError):
3551+ st = k[0].stack
3552+ else:
3553+ st = traceback.extract_stack()
3554+ if st:
3555+ st = st[:-1]
3556+ buf = []
3557+ for filename, lineno, name, line in st:
3558+ buf.append(' File "%s", line %d, in %s' % (filename, lineno, name))
3559+ if line:
3560+ buf.append(' %s' % line.strip())
3561+ if buf: logging.error("\n".join(buf))
3562+
3563+warn = logging.warn
3564+info = logging.info
3565+
3566+def init_log():
3567+ log = logging.getLogger()
3568+ log.handlers = []
3569+ log.filters = []
3570+ hdlr = logging.StreamHandler()
3571+ hdlr.setFormatter(formatter())
3572+ log.addHandler(hdlr)
3573+ log.addFilter(log_filter())
3574+ log.setLevel(logging.DEBUG)
3575+
3576+# may be initialized more than once
3577+init_log()
3578+
3579diff --git a/buildtools/wafadmin/Node.py b/buildtools/wafadmin/Node.py
3580new file mode 100644
3581index 0000000..236dd0d
3582--- /dev/null
3583+++ b/buildtools/wafadmin/Node.py
3584@@ -0,0 +1,695 @@
3585+#!/usr/bin/env python
3586+# encoding: utf-8
3587+# Thomas Nagy, 2005 (ita)
3588+
3589+"""
3590+Node: filesystem structure, contains lists of nodes
3591+
3592+IMPORTANT:
3593+1. Each file/folder is represented by exactly one node.
3594+
3595+2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
3596+unused class members increase the .wafpickle file size sensibly with lots of objects.
3597+
3598+3. The build is launched from the top of the build dir (for example, in _build_/).
3599+
3600+4. Node should not be instantiated directly.
3601+Each instance of Build.BuildContext has a Node subclass.
3602+(aka: 'Nodu', see BuildContext initializer)
3603+The BuildContext is referenced here as self.__class__.bld
3604+Its Node class is referenced here as self.__class__
3605+
3606+The public and advertised apis are the following:
3607+${TGT} -> dir/to/file.ext
3608+${TGT[0].base()} -> dir/to/file
3609+${TGT[0].dir(env)} -> dir/to
3610+${TGT[0].file()} -> file.ext
3611+${TGT[0].file_base()} -> file
3612+${TGT[0].suffix()} -> .ext
3613+${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
3614+
3615+"""
3616+
3617+import os, sys, fnmatch, re, stat
3618+import Utils, Constants
3619+
3620+UNDEFINED = 0
3621+DIR = 1
3622+FILE = 2
3623+BUILD = 3
3624+
3625+type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
3626+
3627+# These fnmatch expressions are used by default to prune the directory tree
3628+# while doing the recursive traversal in the find_iter method of the Node class.
3629+prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
3630+
3631+# These fnmatch expressions are used by default to exclude files and dirs
3632+# while doing the recursive traversal in the find_iter method of the Node class.
3633+exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
3634+
3635+# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
3636+# while doing the recursive traversal in the ant_glob method of the Node class.
3637+exclude_regs = '''
3638+**/*~
3639+**/#*#
3640+**/.#*
3641+**/%*%
3642+**/._*
3643+**/CVS
3644+**/CVS/**
3645+**/.cvsignore
3646+**/SCCS
3647+**/SCCS/**
3648+**/vssver.scc
3649+**/.svn
3650+**/.svn/**
3651+**/.git
3652+**/.git/**
3653+**/.gitignore
3654+**/.bzr
3655+**/.bzr/**
3656+**/.hg
3657+**/.hg/**
3658+**/_MTN
3659+**/_MTN/**
3660+**/_darcs
3661+**/_darcs/**
3662+**/.DS_Store'''
3663+
3664+class Node(object):
3665+ __slots__ = ("name", "parent", "id", "childs")
3666+ def __init__(self, name, parent, node_type = UNDEFINED):
3667+ self.name = name
3668+ self.parent = parent
3669+
3670+ # assumption: one build object at a time
3671+ self.__class__.bld.id_nodes += 4
3672+ self.id = self.__class__.bld.id_nodes + node_type
3673+
3674+ if node_type == DIR: self.childs = {}
3675+
3676+ # We do not want to add another type attribute (memory)
3677+ # use the id to find out: type = id & 3
3678+ # for setting: new type = type + x - type & 3
3679+
3680+ if parent and name in parent.childs:
3681+ raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
3682+
3683+ if parent: parent.childs[name] = self
3684+
3685+ def __setstate__(self, data):
3686+ if len(data) == 4:
3687+ (self.parent, self.name, self.id, self.childs) = data
3688+ else:
3689+ (self.parent, self.name, self.id) = data
3690+
3691+ def __getstate__(self):
3692+ if getattr(self, 'childs', None) is None:
3693+ return (self.parent, self.name, self.id)
3694+ else:
3695+ return (self.parent, self.name, self.id, self.childs)
3696+
3697+ def __str__(self):
3698+ if not self.parent: return ''
3699+ return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
3700+
3701+ def __repr__(self):
3702+ return self.__str__()
3703+
3704+ def __hash__(self):
3705+ "expensive, make certain it is not used"
3706+ raise Utils.WafError('nodes, you are doing it wrong')
3707+
3708+ def __copy__(self):
3709+ "nodes are not supposed to be copied"
3710+ raise Utils.WafError('nodes are not supposed to be cloned')
3711+
3712+ def get_type(self):
3713+ return self.id & 3
3714+
3715+ def set_type(self, t):
3716+ "dangerous, you are not supposed to use this"
3717+ self.id = self.id + t - self.id & 3
3718+
3719+ def dirs(self):
3720+ return [x for x in self.childs.values() if x.id & 3 == DIR]
3721+
3722+ def files(self):
3723+ return [x for x in self.childs.values() if x.id & 3 == FILE]
3724+
3725+ def get_dir(self, name, default=None):
3726+ node = self.childs.get(name, None)
3727+ if not node or node.id & 3 != DIR: return default
3728+ return node
3729+
3730+ def get_file(self, name, default=None):
3731+ node = self.childs.get(name, None)
3732+ if not node or node.id & 3 != FILE: return default
3733+ return node
3734+
3735+ def get_build(self, name, default=None):
3736+ node = self.childs.get(name, None)
3737+ if not node or node.id & 3 != BUILD: return default
3738+ return node
3739+
3740+ def find_resource(self, lst):
3741+ "Find an existing input file: either a build node declared previously or a source node"
3742+ if isinstance(lst, str):
3743+ lst = Utils.split_path(lst)
3744+
3745+ if len(lst) == 1:
3746+ parent = self
3747+ else:
3748+ parent = self.find_dir(lst[:-1])
3749+ if not parent: return None
3750+ self.__class__.bld.rescan(parent)
3751+
3752+ name = lst[-1]
3753+ node = parent.childs.get(name, None)
3754+ if node:
3755+ tp = node.id & 3
3756+ if tp == FILE or tp == BUILD:
3757+ return node
3758+ else:
3759+ return None
3760+
3761+ tree = self.__class__.bld
3762+ if not name in tree.cache_dir_contents[parent.id]:
3763+ return None
3764+
3765+ path = parent.abspath() + os.sep + name
3766+ try:
3767+ st = Utils.h_file(path)
3768+ except IOError:
3769+ return None
3770+
3771+ child = self.__class__(name, parent, FILE)
3772+ tree.node_sigs[0][child.id] = st
3773+ return child
3774+
3775+ def find_or_declare(self, lst):
3776+ "Used for declaring a build node representing a file being built"
3777+ if isinstance(lst, str):
3778+ lst = Utils.split_path(lst)
3779+
3780+ if len(lst) == 1:
3781+ parent = self
3782+ else:
3783+ parent = self.find_dir(lst[:-1])
3784+ if not parent: return None
3785+ self.__class__.bld.rescan(parent)
3786+
3787+ name = lst[-1]
3788+ node = parent.childs.get(name, None)
3789+ if node:
3790+ tp = node.id & 3
3791+ if tp != BUILD:
3792+ raise Utils.WafError('find_or_declare found a source file where a build file was expected %r' % '/'.join(lst))
3793+ return node
3794+ node = self.__class__(name, parent, BUILD)
3795+ return node
3796+
3797+ def find_dir(self, lst):
3798+ "search a folder in the filesystem"
3799+
3800+ if isinstance(lst, str):
3801+ lst = Utils.split_path(lst)
3802+
3803+ current = self
3804+ for name in lst:
3805+ self.__class__.bld.rescan(current)
3806+ prev = current
3807+
3808+ if not current.parent and name == current.name:
3809+ continue
3810+ elif not name:
3811+ continue
3812+ elif name == '.':
3813+ continue
3814+ elif name == '..':
3815+ current = current.parent or current
3816+ else:
3817+ current = prev.childs.get(name, None)
3818+ if current is None:
3819+ dir_cont = self.__class__.bld.cache_dir_contents
3820+ if prev.id in dir_cont and name in dir_cont[prev.id]:
3821+ if not prev.name:
3822+ if os.sep == '/':
3823+ # cygwin //machine/share
3824+ dirname = os.sep + name
3825+ else:
3826+ # windows c:
3827+ dirname = name
3828+ else:
3829+ # regular path
3830+ dirname = prev.abspath() + os.sep + name
3831+ if not os.path.isdir(dirname):
3832+ return None
3833+ current = self.__class__(name, prev, DIR)
3834+ elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
3835+ # drive letter or \\ path for windows
3836+ current = self.__class__(name, prev, DIR)
3837+ else:
3838+ return None
3839+ else:
3840+ if current.id & 3 != DIR:
3841+ return None
3842+ return current
3843+
3844+ def ensure_dir_node_from_path(self, lst):
3845+ "used very rarely, force the construction of a branch of node instance for representing folders"
3846+
3847+ if isinstance(lst, str):
3848+ lst = Utils.split_path(lst)
3849+
3850+ current = self
3851+ for name in lst:
3852+ if not name:
3853+ continue
3854+ elif name == '.':
3855+ continue
3856+ elif name == '..':
3857+ current = current.parent or current
3858+ else:
3859+ prev = current
3860+ current = prev.childs.get(name, None)
3861+ if current is None:
3862+ current = self.__class__(name, prev, DIR)
3863+ return current
3864+
3865+ def exclusive_build_node(self, path):
3866+ """
3867+ create a hierarchy in the build dir (no source folders) for ill-behaving compilers
3868+ the node is not hashed, so you must do it manually
3869+
3870+ after declaring such a node, find_dir and find_resource should work as expected
3871+ """
3872+ lst = Utils.split_path(path)
3873+ name = lst[-1]
3874+ if len(lst) > 1:
3875+ parent = None
3876+ try:
3877+ parent = self.find_dir(lst[:-1])
3878+ except OSError:
3879+ pass
3880+ if not parent:
3881+ parent = self.ensure_dir_node_from_path(lst[:-1])
3882+ self.__class__.bld.rescan(parent)
3883+ else:
3884+ try:
3885+ self.__class__.bld.rescan(parent)
3886+ except OSError:
3887+ pass
3888+ else:
3889+ parent = self
3890+
3891+ node = parent.childs.get(name, None)
3892+ if not node:
3893+ node = self.__class__(name, parent, BUILD)
3894+
3895+ return node
3896+
3897+ def path_to_parent(self, parent):
3898+ "path relative to a direct ancestor, as string"
3899+ lst = []
3900+ p = self
3901+ h1 = parent.height()
3902+ h2 = p.height()
3903+ while h2 > h1:
3904+ h2 -= 1
3905+ lst.append(p.name)
3906+ p = p.parent
3907+ if lst:
3908+ lst.reverse()
3909+ ret = os.path.join(*lst)
3910+ else:
3911+ ret = ''
3912+ return ret
3913+
3914+ def find_ancestor(self, node):
3915+ "find a common ancestor for two nodes - for the shortest path in hierarchy"
3916+ dist = self.height() - node.height()
3917+ if dist < 0: return node.find_ancestor(self)
3918+ # now the real code
3919+ cand = self
3920+ while dist > 0:
3921+ cand = cand.parent
3922+ dist -= 1
3923+ if cand == node: return cand
3924+ cursor = node
3925+ while cand.parent:
3926+ cand = cand.parent
3927+ cursor = cursor.parent
3928+ if cand == cursor: return cand
3929+
3930+ def relpath_gen(self, from_node):
3931+ "string representing a relative path between self to another node"
3932+
3933+ if self == from_node: return '.'
3934+ if from_node.parent == self: return '..'
3935+
3936+ # up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
3937+ ancestor = self.find_ancestor(from_node)
3938+ lst = []
3939+ cand = self
3940+ while not cand.id == ancestor.id:
3941+ lst.append(cand.name)
3942+ cand = cand.parent
3943+ cand = from_node
3944+ while not cand.id == ancestor.id:
3945+ lst.append('..')
3946+ cand = cand.parent
3947+ lst.reverse()
3948+ return os.sep.join(lst)
3949+
3950+ def nice_path(self, env=None):
3951+ "printed in the console, open files easily from the launch directory"
3952+ tree = self.__class__.bld
3953+ ln = tree.launch_node()
3954+
3955+ if self.id & 3 == FILE: return self.relpath_gen(ln)
3956+ else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
3957+
3958+ def is_child_of(self, node):
3959+ "does this node belong to the subtree node"
3960+ p = self
3961+ diff = self.height() - node.height()
3962+ while diff > 0:
3963+ diff -= 1
3964+ p = p.parent
3965+ return p.id == node.id
3966+
3967+ def variant(self, env):
3968+ "variant, or output directory for this node, a source has for variant 0"
3969+ if not env: return 0
3970+ elif self.id & 3 == FILE: return 0
3971+ else: return env.variant()
3972+
3973+ def height(self):
3974+ "amount of parents"
3975+ # README a cache can be added here if necessary
3976+ d = self
3977+ val = -1
3978+ while d:
3979+ d = d.parent
3980+ val += 1
3981+ return val
3982+
3983+ # helpers for building things
3984+
3985+ def abspath(self, env=None):
3986+ """
3987+ absolute path
3988+ @param env [Environment]:
3989+ * obligatory for build nodes: build/variant/src/dir/bar.o
3990+ * optional for dirs: get either src/dir or build/variant/src/dir
3991+ * excluded for source nodes: src/dir/bar.c
3992+
3993+ Instead of computing the absolute path each time again,
3994+ store the already-computed absolute paths in one of (variants+1) dictionaries:
3995+ bld.cache_node_abspath[0] holds absolute paths for source nodes.
3996+ bld.cache_node_abspath[variant] holds the absolute path for the build nodes
3997+ which reside in the variant given by env.
3998+ """
3999+ ## absolute path - hot zone, so do not touch
4000+
4001+ # less expensive
4002+ variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
4003+
4004+ ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
4005+ if ret: return ret
4006+
4007+ if not variant:
4008+ # source directory
4009+ if not self.parent:
4010+ val = os.sep == '/' and os.sep or ''
4011+ elif not self.parent.name: # root
4012+ val = (os.sep == '/' and os.sep or '') + self.name
4013+ else:
4014+ val = self.parent.abspath() + os.sep + self.name
4015+ else:
4016+ # build directory
4017+ val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
4018+ self.__class__.bld.cache_node_abspath[variant][self.id] = val
4019+ return val
4020+
4021+ def change_ext(self, ext):
4022+ "node of the same path, but with a different extension - hot zone so do not touch"
4023+ name = self.name
4024+ k = name.rfind('.')
4025+ if k >= 0:
4026+ name = name[:k] + ext
4027+ else:
4028+ name = name + ext
4029+
4030+ return self.parent.find_or_declare([name])
4031+
4032+ def src_dir(self, env):
4033+ "src path without the file name"
4034+ return self.parent.srcpath(env)
4035+
4036+ def bld_dir(self, env):
4037+ "build path without the file name"
4038+ return self.parent.bldpath(env)
4039+
4040+ def bld_base(self, env):
4041+ "build path without the extension: src/dir/foo(.cpp)"
4042+ s = os.path.splitext(self.name)[0]
4043+ return os.path.join(self.bld_dir(env), s)
4044+
4045+ def bldpath(self, env=None):
4046+ "path seen from the build dir default/src/foo.cpp"
4047+ if self.id & 3 == FILE:
4048+ return self.relpath_gen(self.__class__.bld.bldnode)
4049+ p = self.path_to_parent(self.__class__.bld.srcnode)
4050+ if p is not '':
4051+ return env.variant() + os.sep + p
4052+ return env.variant()
4053+
4054+ def srcpath(self, env=None):
4055+ "path in the srcdir from the build dir ../src/foo.cpp"
4056+ if self.id & 3 == BUILD:
4057+ return self.bldpath(env)
4058+ return self.relpath_gen(self.__class__.bld.bldnode)
4059+
4060+ def read(self, env):
4061+ "get the contents of a file, it is not used anywhere for the moment"
4062+ return Utils.readf(self.abspath(env))
4063+
4064+ def dir(self, env):
4065+ "scons-like"
4066+ return self.parent.abspath(env)
4067+
4068+ def file(self):
4069+ "scons-like"
4070+ return self.name
4071+
4072+ def file_base(self):
4073+ "scons-like"
4074+ return os.path.splitext(self.name)[0]
4075+
4076+ def suffix(self):
4077+ "scons-like - hot zone so do not touch"
4078+ k = max(0, self.name.rfind('.'))
4079+ return self.name[k:]
4080+
4081+ def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
4082+ """find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
4083+ bld_ctx = self.__class__.bld
4084+ bld_ctx.rescan(self)
4085+ for name in bld_ctx.cache_dir_contents[self.id]:
4086+ if accept_name(self, name):
4087+ node = self.find_resource(name)
4088+ if node:
4089+ if src and node.id & 3 == FILE:
4090+ yield node
4091+ else:
4092+ node = self.find_dir(name)
4093+ if node and node.id != bld_ctx.bldnode.id:
4094+ if dir:
4095+ yield node
4096+ if not is_prune(self, name):
4097+ if maxdepth:
4098+ for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
4099+ yield k
4100+ else:
4101+ if not is_prune(self, name):
4102+ node = self.find_resource(name)
4103+ if not node:
4104+ # not a file, it is a dir
4105+ node = self.find_dir(name)
4106+ if node and node.id != bld_ctx.bldnode.id:
4107+ if maxdepth:
4108+ for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
4109+ yield k
4110+
4111+ if bld:
4112+ for node in self.childs.values():
4113+ if node.id == bld_ctx.bldnode.id:
4114+ continue
4115+ if node.id & 3 == BUILD:
4116+ if accept_name(self, node.name):
4117+ yield node
4118+ raise StopIteration
4119+
4120+ def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
4121+ """find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
4122+
4123+ if not (src or bld or dir):
4124+ raise StopIteration
4125+
4126+ if self.id & 3 != DIR:
4127+ raise StopIteration
4128+
4129+ in_pat = Utils.to_list(in_pat)
4130+ ex_pat = Utils.to_list(ex_pat)
4131+ prune_pat = Utils.to_list(prune_pat)
4132+
4133+ def accept_name(node, name):
4134+ for pat in ex_pat:
4135+ if fnmatch.fnmatchcase(name, pat):
4136+ return False
4137+ for pat in in_pat:
4138+ if fnmatch.fnmatchcase(name, pat):
4139+ return True
4140+ return False
4141+
4142+ def is_prune(node, name):
4143+ for pat in prune_pat:
4144+ if fnmatch.fnmatchcase(name, pat):
4145+ return True
4146+ return False
4147+
4148+ ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
4149+ if flat:
4150+ return " ".join([x.relpath_gen(self) for x in ret])
4151+
4152+ return ret
4153+
4154+ def ant_glob(self, *k, **kw):
4155+ """
4156+ known gotcha: will enumerate the files, but only if the folder exists in the source directory
4157+ """
4158+
4159+ src=kw.get('src', 1)
4160+ bld=kw.get('bld', 0)
4161+ dir=kw.get('dir', 0)
4162+ excl = kw.get('excl', exclude_regs)
4163+ incl = k and k[0] or kw.get('incl', '**')
4164+
4165+ def to_pat(s):
4166+ lst = Utils.to_list(s)
4167+ ret = []
4168+ for x in lst:
4169+ x = x.replace('//', '/')
4170+ if x.endswith('/'):
4171+ x += '**'
4172+ lst2 = x.split('/')
4173+ accu = []
4174+ for k in lst2:
4175+ if k == '**':
4176+ accu.append(k)
4177+ else:
4178+ k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
4179+ k = '^%s$' % k
4180+ #print "pattern", k
4181+ accu.append(re.compile(k))
4182+ ret.append(accu)
4183+ return ret
4184+
4185+ def filtre(name, nn):
4186+ ret = []
4187+ for lst in nn:
4188+ if not lst:
4189+ pass
4190+ elif lst[0] == '**':
4191+ ret.append(lst)
4192+ if len(lst) > 1:
4193+ if lst[1].match(name):
4194+ ret.append(lst[2:])
4195+ else:
4196+ ret.append([])
4197+ elif lst[0].match(name):
4198+ ret.append(lst[1:])
4199+ return ret
4200+
4201+ def accept(name, pats):
4202+ nacc = filtre(name, pats[0])
4203+ nrej = filtre(name, pats[1])
4204+ if [] in nrej:
4205+ nacc = []
4206+ return [nacc, nrej]
4207+
4208+ def ant_iter(nodi, maxdepth=25, pats=[]):
4209+ nodi.__class__.bld.rescan(nodi)
4210+ tmp = list(nodi.__class__.bld.cache_dir_contents[nodi.id])
4211+ tmp.sort()
4212+ for name in tmp:
4213+ npats = accept(name, pats)
4214+ if npats and npats[0]:
4215+ accepted = [] in npats[0]
4216+ #print accepted, nodi, name
4217+
4218+ node = nodi.find_resource(name)
4219+ if node and accepted:
4220+ if src and node.id & 3 == FILE:
4221+ yield node
4222+ else:
4223+ node = nodi.find_dir(name)
4224+ if node and node.id != nodi.__class__.bld.bldnode.id:
4225+ if accepted and dir:
4226+ yield node
4227+ if maxdepth:
4228+ for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
4229+ yield k
4230+ if bld:
4231+ for node in nodi.childs.values():
4232+ if node.id == nodi.__class__.bld.bldnode.id:
4233+ continue
4234+ if node.id & 3 == BUILD:
4235+ npats = accept(node.name, pats)
4236+ if npats and npats[0] and [] in npats[0]:
4237+ yield node
4238+ raise StopIteration
4239+
4240+ ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
4241+
4242+ if kw.get('flat', True):
4243+ return " ".join([x.relpath_gen(self) for x in ret])
4244+
4245+ return ret
4246+
4247+ def update_build_dir(self, env=None):
4248+
4249+ if not env:
4250+ for env in bld.all_envs:
4251+ self.update_build_dir(env)
4252+ return
4253+
4254+ path = self.abspath(env)
4255+
4256+ lst = Utils.listdir(path)
4257+ try:
4258+ self.__class__.bld.cache_dir_contents[self.id].update(lst)
4259+ except KeyError:
4260+ self.__class__.bld.cache_dir_contents[self.id] = set(lst)
4261+ self.__class__.bld.cache_scanned_folders[self.id] = True
4262+
4263+ for k in lst:
4264+ npath = path + os.sep + k
4265+ st = os.stat(npath)
4266+ if stat.S_ISREG(st[stat.ST_MODE]):
4267+ ick = self.find_or_declare(k)
4268+ if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
4269+ self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
4270+ elif stat.S_ISDIR(st[stat.ST_MODE]):
4271+ child = self.find_dir(k)
4272+ if not child:
4273+ child = self.ensure_dir_node_from_path(k)
4274+ child.update_build_dir(env)
4275+
4276+
4277+class Nodu(Node):
4278+ pass
4279+
4280diff --git a/buildtools/wafadmin/Options.py b/buildtools/wafadmin/Options.py
4281new file mode 100644
4282index 0000000..c9ddcfe
4283--- /dev/null
4284+++ b/buildtools/wafadmin/Options.py
4285@@ -0,0 +1,288 @@
4286+#!/usr/bin/env python
4287+# encoding: utf-8
4288+# Scott Newton, 2005 (scottn)
4289+# Thomas Nagy, 2006 (ita)
4290+
4291+"Custom command-line options"
4292+
4293+import os, sys, imp, types, tempfile, optparse
4294+import Logs, Utils
4295+from Constants import *
4296+
4297+cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
4298+
4299+# TODO remove in waf 1.6 the following two
4300+commands = {}
4301+is_install = False
4302+
4303+options = {}
4304+arg_line = []
4305+launch_dir = ''
4306+tooldir = ''
4307+lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
4308+try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
4309+except KeyError: cache_global = ''
4310+platform = Utils.unversioned_sys_platform()
4311+conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
4312+
4313+remote_repo = ['http://waf.googlecode.com/svn/']
4314+"""remote directory for the plugins"""
4315+
4316+
4317+# Such a command-line should work: JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
4318+default_prefix = os.environ.get('PREFIX')
4319+if not default_prefix:
4320+ if platform == 'win32':
4321+ d = tempfile.gettempdir()
4322+ default_prefix = d[0].upper() + d[1:]
4323+ # win32 preserves the case, but gettempdir does not
4324+ else: default_prefix = '/usr/local/'
4325+
4326+default_jobs = os.environ.get('JOBS', -1)
4327+if default_jobs < 1:
4328+ try:
4329+ if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
4330+ default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
4331+ else:
4332+ default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
4333+ except:
4334+ if os.name == 'java': # platform.system() == 'Java'
4335+ from java.lang import Runtime
4336+ default_jobs = Runtime.getRuntime().availableProcessors()
4337+ else:
4338+ # environment var defined on win32
4339+ default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
4340+
4341+default_destdir = os.environ.get('DESTDIR', '')
4342+
4343+def get_usage(self):
4344+ cmds_str = []
4345+ module = Utils.g_module
4346+ if module:
4347+ # create the help messages for commands
4348+ tbl = module.__dict__
4349+ keys = list(tbl.keys())
4350+ keys.sort()
4351+
4352+ if 'build' in tbl:
4353+ if not module.build.__doc__:
4354+ module.build.__doc__ = 'builds the project'
4355+ if 'configure' in tbl:
4356+ if not module.configure.__doc__:
4357+ module.configure.__doc__ = 'configures the project'
4358+
4359+ ban = ['set_options', 'init', 'shutdown']
4360+
4361+ optlst = [x for x in keys if not x in ban
4362+ and type(tbl[x]) is type(parse_args_impl)
4363+ and tbl[x].__doc__
4364+ and not x.startswith('_')]
4365+
4366+ just = max([len(x) for x in optlst])
4367+
4368+ for x in optlst:
4369+ cmds_str.append(' %s: %s' % (x.ljust(just), tbl[x].__doc__))
4370+ ret = '\n'.join(cmds_str)
4371+ else:
4372+ ret = ' '.join(cmds)
4373+ return '''waf [command] [options]
4374+
4375+Main commands (example: ./waf build -j4)
4376+%s
4377+''' % ret
4378+
4379+
4380+setattr(optparse.OptionParser, 'get_usage', get_usage)
4381+
4382+def create_parser(module=None):
4383+ Logs.debug('options: create_parser is called')
4384+ parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
4385+
4386+ parser.formatter.width = Utils.get_term_cols()
4387+ p = parser.add_option
4388+
4389+ p('-j', '--jobs',
4390+ type = 'int',
4391+ default = default_jobs,
4392+ help = 'amount of parallel jobs (%r)' % default_jobs,
4393+ dest = 'jobs')
4394+
4395+ p('-k', '--keep',
4396+ action = 'store_true',
4397+ default = False,
4398+ help = 'keep running happily on independent task groups',
4399+ dest = 'keep')
4400+
4401+ p('-v', '--verbose',
4402+ action = 'count',
4403+ default = 0,
4404+ help = 'verbosity level -v -vv or -vvv [default: 0]',
4405+ dest = 'verbose')
4406+
4407+ p('--nocache',
4408+ action = 'store_true',
4409+ default = False,
4410+ help = 'ignore the WAFCACHE (if set)',
4411+ dest = 'nocache')
4412+
4413+ p('--zones',
4414+ action = 'store',
4415+ default = '',
4416+ help = 'debugging zones (task_gen, deps, tasks, etc)',
4417+ dest = 'zones')
4418+
4419+ p('-p', '--progress',
4420+ action = 'count',
4421+ default = 0,
4422+ help = '-p: progress bar; -pp: ide output',
4423+ dest = 'progress_bar')
4424+
4425+ p('--targets',
4426+ action = 'store',
4427+ default = '',
4428+ help = 'build given task generators, e.g. "target1,target2"',
4429+ dest = 'compile_targets')
4430+
4431+ gr = optparse.OptionGroup(parser, 'configuration options')
4432+ parser.add_option_group(gr)
4433+ gr.add_option('-b', '--blddir',
4434+ action = 'store',
4435+ default = '',
4436+ help = 'out dir for the project (configuration)',
4437+ dest = 'blddir')
4438+ gr.add_option('-s', '--srcdir',
4439+ action = 'store',
4440+ default = '',
4441+ help = 'top dir for the project (configuration)',
4442+ dest = 'srcdir')
4443+ gr.add_option('--prefix',
4444+ help = 'installation prefix (configuration) [default: %r]' % default_prefix,
4445+ default = default_prefix,
4446+ dest = 'prefix')
4447+
4448+ gr.add_option('--download',
4449+ action = 'store_true',
4450+ default = False,
4451+ help = 'try to download the tools if missing',
4452+ dest = 'download')
4453+
4454+ gr = optparse.OptionGroup(parser, 'installation options')
4455+ parser.add_option_group(gr)
4456+ gr.add_option('--destdir',
4457+ help = 'installation root [default: %r]' % default_destdir,
4458+ default = default_destdir,
4459+ dest = 'destdir')
4460+ gr.add_option('-f', '--force',
4461+ action = 'store_true',
4462+ default = False,
4463+ help = 'force file installation',
4464+ dest = 'force')
4465+
4466+ return parser
4467+
4468+def parse_args_impl(parser, _args=None):
4469+ global options, commands, arg_line
4470+ (options, args) = parser.parse_args(args=_args)
4471+
4472+ arg_line = args
4473+ #arg_line = args[:] # copy
4474+
4475+ # By default, 'waf' is equivalent to 'waf build'
4476+ commands = {}
4477+ for var in cmds: commands[var] = 0
4478+ if not args:
4479+ commands['build'] = 1
4480+ args.append('build')
4481+
4482+ # Parse the command arguments
4483+ for arg in args:
4484+ commands[arg] = True
4485+
4486+ # the check thing depends on the build
4487+ if 'check' in args:
4488+ idx = args.index('check')
4489+ try:
4490+ bidx = args.index('build')
4491+ if bidx > idx:
4492+ raise ValueError('build before check')
4493+ except ValueError, e:
4494+ args.insert(idx, 'build')
4495+
4496+ if args[0] != 'init':
4497+ args.insert(0, 'init')
4498+
4499+ # TODO -k => -j0
4500+ if options.keep: options.jobs = 1
4501+ if options.jobs < 1: options.jobs = 1
4502+
4503+ if 'install' in sys.argv or 'uninstall' in sys.argv:
4504+ # absolute path only if set
4505+ options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
4506+
4507+ Logs.verbose = options.verbose
4508+ Logs.init_log()
4509+
4510+ if options.zones:
4511+ Logs.zones = options.zones.split(',')
4512+ if not Logs.verbose: Logs.verbose = 1
4513+ elif Logs.verbose > 0:
4514+ Logs.zones = ['runner']
4515+ if Logs.verbose > 2:
4516+ Logs.zones = ['*']
4517+
4518+# TODO waf 1.6
4519+# 1. rename the class to OptionsContext
4520+# 2. instead of a class attribute, use a module (static 'parser')
4521+# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
4522+
4523+class Handler(Utils.Context):
4524+ """loads wscript modules in folders for adding options
4525+ This class should be named 'OptionsContext'
4526+ A method named 'recurse' is bound when used by the module Scripting"""
4527+
4528+ parser = None
4529+ # make it possible to access the reference, like Build.bld
4530+
4531+ def __init__(self, module=None):
4532+ self.parser = create_parser(module)
4533+ self.cwd = os.getcwd()
4534+ Handler.parser = self
4535+
4536+ def add_option(self, *k, **kw):
4537+ self.parser.add_option(*k, **kw)
4538+
4539+ def add_option_group(self, *k, **kw):
4540+ return self.parser.add_option_group(*k, **kw)
4541+
4542+ def get_option_group(self, opt_str):
4543+ return self.parser.get_option_group(opt_str)
4544+
4545+ def sub_options(self, *k, **kw):
4546+ if not k: raise Utils.WscriptError('folder expected')
4547+ self.recurse(k[0], name='set_options')
4548+
4549+ def tool_options(self, *k, **kw):
4550+ Utils.python_24_guard()
4551+
4552+ if not k[0]:
4553+ raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
4554+ tools = Utils.to_list(k[0])
4555+
4556+ # TODO waf 1.6 remove the global variable tooldir
4557+ path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
4558+
4559+ for tool in tools:
4560+ tool = tool.replace('++', 'xx')
4561+ if tool == 'java': tool = 'javaw'
4562+ if tool.lower() == 'unittest': tool = 'unittestw'
4563+ module = Utils.load_tool(tool, path)
4564+ try:
4565+ fun = module.set_options
4566+ except AttributeError:
4567+ pass
4568+ else:
4569+ fun(kw.get('option_group', self))
4570+
4571+ def parse_args(self, args=None):
4572+ parse_args_impl(self.parser, args)
4573+
4574diff --git a/buildtools/wafadmin/Runner.py b/buildtools/wafadmin/Runner.py
4575new file mode 100644
4576index 0000000..94db0fb
4577--- /dev/null
4578+++ b/buildtools/wafadmin/Runner.py
4579@@ -0,0 +1,236 @@
4580+#!/usr/bin/env python
4581+# encoding: utf-8
4582+# Thomas Nagy, 2005-2008 (ita)
4583+
4584+"Execute the tasks"
4585+
4586+import os, sys, random, time, threading, traceback
4587+try: from Queue import Queue
4588+except ImportError: from queue import Queue
4589+import Build, Utils, Logs, Options
4590+from Logs import debug, error
4591+from Constants import *
4592+
4593+GAP = 15
4594+
4595+run_old = threading.Thread.run
4596+def run(*args, **kwargs):
4597+ try:
4598+ run_old(*args, **kwargs)
4599+ except (KeyboardInterrupt, SystemExit):
4600+ raise
4601+ except:
4602+ sys.excepthook(*sys.exc_info())
4603+threading.Thread.run = run
4604+
4605+def process_task(tsk):
4606+
4607+ m = tsk.master
4608+ if m.stop:
4609+ m.out.put(tsk)
4610+ return
4611+
4612+ try:
4613+ tsk.generator.bld.printout(tsk.display())
4614+ if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
4615+ # actual call to task's run() function
4616+ else: ret = tsk.call_run()
4617+ except Exception, e:
4618+ tsk.err_msg = Utils.ex_stack()
4619+ tsk.hasrun = EXCEPTION
4620+
4621+ # TODO cleanup
4622+ m.error_handler(tsk)
4623+ m.out.put(tsk)
4624+ return
4625+
4626+ if ret:
4627+ tsk.err_code = ret
4628+ tsk.hasrun = CRASHED
4629+ else:
4630+ try:
4631+ tsk.post_run()
4632+ except Utils.WafError:
4633+ pass
4634+ except Exception:
4635+ tsk.err_msg = Utils.ex_stack()
4636+ tsk.hasrun = EXCEPTION
4637+ else:
4638+ tsk.hasrun = SUCCESS
4639+ if tsk.hasrun != SUCCESS:
4640+ m.error_handler(tsk)
4641+
4642+ m.out.put(tsk)
4643+
4644+class TaskConsumer(threading.Thread):
4645+ ready = Queue(0)
4646+ consumers = []
4647+
4648+ def __init__(self):
4649+ threading.Thread.__init__(self)
4650+ self.setDaemon(1)
4651+ self.start()
4652+
4653+ def run(self):
4654+ try:
4655+ self.loop()
4656+ except:
4657+ pass
4658+
4659+ def loop(self):
4660+ while 1:
4661+ tsk = TaskConsumer.ready.get()
4662+ process_task(tsk)
4663+
4664+class Parallel(object):
4665+ """
4666+ keep the consumer threads busy, and avoid consuming cpu cycles
4667+ when no more tasks can be added (end of the build, etc)
4668+ """
4669+ def __init__(self, bld, j=2):
4670+
4671+ # number of consumers
4672+ self.numjobs = j
4673+
4674+ self.manager = bld.task_manager
4675+ self.manager.current_group = 0
4676+
4677+ self.total = self.manager.total()
4678+
4679+ # tasks waiting to be processed - IMPORTANT
4680+ self.outstanding = []
4681+ self.maxjobs = MAXJOBS
4682+
4683+ # tasks that are awaiting for another task to complete
4684+ self.frozen = []
4685+
4686+ # tasks returned by the consumers
4687+ self.out = Queue(0)
4688+
4689+ self.count = 0 # tasks not in the producer area
4690+
4691+ self.processed = 1 # progress indicator
4692+
4693+ self.stop = False # error condition to stop the build
4694+ self.error = False # error flag
4695+
4696+ def get_next(self):
4697+ "override this method to schedule the tasks in a particular order"
4698+ if not self.outstanding:
4699+ return None
4700+ return self.outstanding.pop(0)
4701+
4702+ def postpone(self, tsk):
4703+ "override this method to schedule the tasks in a particular order"
4704+ # TODO consider using a deque instead
4705+ if random.randint(0, 1):
4706+ self.frozen.insert(0, tsk)
4707+ else:
4708+ self.frozen.append(tsk)
4709+
4710+ def refill_task_list(self):
4711+ "called to set the next group of tasks"
4712+
4713+ while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
4714+ self.get_out()
4715+
4716+ while not self.outstanding:
4717+ if self.count:
4718+ self.get_out()
4719+
4720+ if self.frozen:
4721+ self.outstanding += self.frozen
4722+ self.frozen = []
4723+ elif not self.count:
4724+ (jobs, tmp) = self.manager.get_next_set()
4725+ if jobs != None: self.maxjobs = jobs
4726+ if tmp: self.outstanding += tmp
4727+ break
4728+
4729+ def get_out(self):
4730+ "the tasks that are put to execute are all collected using get_out"
4731+ ret = self.out.get()
4732+ self.manager.add_finished(ret)
4733+ if not self.stop and getattr(ret, 'more_tasks', None):
4734+ self.outstanding += ret.more_tasks
4735+ self.total += len(ret.more_tasks)
4736+ self.count -= 1
4737+
4738+ def error_handler(self, tsk):
4739+ "by default, errors make the build stop (not thread safe so be careful)"
4740+ if not Options.options.keep:
4741+ self.stop = True
4742+ self.error = True
4743+
4744+ def start(self):
4745+ "execute the tasks"
4746+
4747+ if TaskConsumer.consumers:
4748+ # the worker pool is usually loaded lazily (see below)
4749+ # in case it is re-used with a different value of numjobs:
4750+ while len(TaskConsumer.consumers) < self.numjobs:
4751+ TaskConsumer.consumers.append(TaskConsumer())
4752+
4753+ while not self.stop:
4754+
4755+ self.refill_task_list()
4756+
4757+ # consider the next task
4758+ tsk = self.get_next()
4759+ if not tsk:
4760+ if self.count:
4761+ # tasks may add new ones after they are run
4762+ continue
4763+ else:
4764+ # no tasks to run, no tasks running, time to exit
4765+ break
4766+
4767+ if tsk.hasrun:
4768+ # if the task is marked as "run", just skip it
4769+ self.processed += 1
4770+ self.manager.add_finished(tsk)
4771+ continue
4772+
4773+ try:
4774+ st = tsk.runnable_status()
4775+ except Exception, e:
4776+ self.processed += 1
4777+ if self.stop and not Options.options.keep:
4778+ tsk.hasrun = SKIPPED
4779+ self.manager.add_finished(tsk)
4780+ continue
4781+ self.error_handler(tsk)
4782+ self.manager.add_finished(tsk)
4783+ tsk.hasrun = EXCEPTION
4784+ tsk.err_msg = Utils.ex_stack()
4785+ continue
4786+
4787+ if st == ASK_LATER:
4788+ self.postpone(tsk)
4789+ elif st == SKIP_ME:
4790+ self.processed += 1
4791+ tsk.hasrun = SKIPPED
4792+ self.manager.add_finished(tsk)
4793+ else:
4794+ # run me: put the task in ready queue
4795+ tsk.position = (self.processed, self.total)
4796+ self.count += 1
4797+ tsk.master = self
4798+ self.processed += 1
4799+
4800+ if self.numjobs == 1:
4801+ process_task(tsk)
4802+ else:
4803+ TaskConsumer.ready.put(tsk)
4804+ # create the consumer threads only if there is something to consume
4805+ if not TaskConsumer.consumers:
4806+ TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
4807+
4808+ # self.count represents the tasks that have been made available to the consumer threads
4809+ # collect all the tasks after an error else the message may be incomplete
4810+ while self.error and self.count:
4811+ self.get_out()
4812+
4813+ #print loop
4814+ assert (self.count == 0 or self.stop)
4815+
4816diff --git a/buildtools/wafadmin/Scripting.py b/buildtools/wafadmin/Scripting.py
4817new file mode 100644
4818index 0000000..d975bd9
4819--- /dev/null
4820+++ b/buildtools/wafadmin/Scripting.py
4821@@ -0,0 +1,586 @@
4822+#!/usr/bin/env python
4823+# encoding: utf-8
4824+# Thomas Nagy, 2005 (ita)
4825+
4826+"Module called for configuring, compiling and installing targets"
4827+
4828+import os, sys, shutil, traceback, datetime, inspect, errno
4829+
4830+import Utils, Configure, Build, Logs, Options, Environment, Task
4831+from Logs import error, warn, info
4832+from Constants import *
4833+
4834+g_gz = 'bz2'
4835+commands = []
4836+
4837+def prepare_impl(t, cwd, ver, wafdir):
4838+ Options.tooldir = [t]
4839+ Options.launch_dir = cwd
4840+
4841+ # some command-line options can be processed immediately
4842+ if '--version' in sys.argv:
4843+ opt_obj = Options.Handler()
4844+ opt_obj.curdir = cwd
4845+ opt_obj.parse_args()
4846+ sys.exit(0)
4847+
4848+ # now find the wscript file
4849+ msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
4850+
4851+ # in theory projects can be configured in an autotool-like manner:
4852+ # mkdir build && cd build && ../waf configure && ../waf
4853+ build_dir_override = None
4854+ candidate = None
4855+
4856+ lst = os.listdir(cwd)
4857+
4858+ search_for_candidate = True
4859+ if WSCRIPT_FILE in lst:
4860+ candidate = cwd
4861+
4862+ elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
4863+ # autotool-like configuration
4864+ calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
4865+ if WSCRIPT_FILE in os.listdir(calldir):
4866+ candidate = calldir
4867+ search_for_candidate = False
4868+ else:
4869+ error('arg[0] directory does not contain a wscript file')
4870+ sys.exit(1)
4871+ build_dir_override = cwd
4872+
4873+ # climb up to find a script if it is not found
4874+ while search_for_candidate:
4875+ if len(cwd) <= 3:
4876+ break # stop at / or c:
4877+ dirlst = os.listdir(cwd)
4878+ if WSCRIPT_FILE in dirlst:
4879+ candidate = cwd
4880+ if 'configure' in sys.argv and candidate:
4881+ break
4882+ if Options.lockfile in dirlst:
4883+ env = Environment.Environment()
4884+ try:
4885+ env.load(os.path.join(cwd, Options.lockfile))
4886+ except:
4887+ error('could not load %r' % Options.lockfile)
4888+ try:
4889+ os.stat(env['cwd'])
4890+ except:
4891+ candidate = cwd
4892+ else:
4893+ candidate = env['cwd']
4894+ break
4895+ cwd = os.path.dirname(cwd) # climb up
4896+
4897+ if not candidate:
4898+ # check if the user only wanted to display the help
4899+ if '-h' in sys.argv or '--help' in sys.argv:
4900+ warn('No wscript file found: the help message may be incomplete')
4901+ opt_obj = Options.Handler()
4902+ opt_obj.curdir = cwd
4903+ opt_obj.parse_args()
4904+ else:
4905+ error(msg1)
4906+ sys.exit(0)
4907+
4908+ # We have found wscript, but there is no guarantee that it is valid
4909+ try:
4910+ os.chdir(candidate)
4911+ except OSError:
4912+ raise Utils.WafError("the folder %r is unreadable" % candidate)
4913+
4914+ # define the main module containing the functions init, shutdown, ..
4915+ Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
4916+
4917+ if build_dir_override:
4918+ d = getattr(Utils.g_module, BLDDIR, None)
4919+ if d:
4920+ # test if user has set the blddir in wscript.
4921+ msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
4922+ warn(msg)
4923+ Utils.g_module.blddir = build_dir_override
4924+
4925+ # bind a few methods and classes by default
4926+
4927+ def set_def(obj, name=''):
4928+ n = name or obj.__name__
4929+ if not n in Utils.g_module.__dict__:
4930+ setattr(Utils.g_module, n, obj)
4931+
4932+ for k in [dist, distclean, distcheck, clean, install, uninstall]:
4933+ set_def(k)
4934+
4935+ set_def(Configure.ConfigurationContext, 'configure_context')
4936+
4937+ for k in ['build', 'clean', 'install', 'uninstall']:
4938+ set_def(Build.BuildContext, k + '_context')
4939+
4940+ # now parse the options from the user wscript file
4941+ opt_obj = Options.Handler(Utils.g_module)
4942+ opt_obj.curdir = candidate
4943+ try:
4944+ f = Utils.g_module.set_options
4945+ except AttributeError:
4946+ pass
4947+ else:
4948+ opt_obj.sub_options([''])
4949+ opt_obj.parse_args()
4950+
4951+ if not 'init' in Utils.g_module.__dict__:
4952+ Utils.g_module.init = Utils.nada
4953+ if not 'shutdown' in Utils.g_module.__dict__:
4954+ Utils.g_module.shutdown = Utils.nada
4955+
4956+ main()
4957+
4958+def prepare(t, cwd, ver, wafdir):
4959+ if WAFVERSION != ver:
4960+ msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
4961+ print('\033[91mError: %s\033[0m' % msg)
4962+ sys.exit(1)
4963+
4964+ #"""
4965+ try:
4966+ prepare_impl(t, cwd, ver, wafdir)
4967+ except Utils.WafError, e:
4968+ error(str(e))
4969+ sys.exit(1)
4970+ except KeyboardInterrupt:
4971+ Utils.pprint('RED', 'Interrupted')
4972+ sys.exit(68)
4973+ """
4974+ import cProfile, pstats
4975+ cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
4976+ {'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
4977+ 'profi.txt')
4978+ p = pstats.Stats('profi.txt')
4979+ p.sort_stats('time').print_stats(45)
4980+ #"""
4981+
4982+def main():
4983+ global commands
4984+ commands = Options.arg_line[:]
4985+
4986+ while commands:
4987+ x = commands.pop(0)
4988+
4989+ ini = datetime.datetime.now()
4990+ if x == 'configure':
4991+ fun = configure
4992+ elif x == 'build':
4993+ fun = build
4994+ else:
4995+ fun = getattr(Utils.g_module, x, None)
4996+
4997+ if not fun:
4998+ raise Utils.WscriptError('No such command %r' % x)
4999+
5000+ ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
5001+
5002+ if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
5003+ # compatibility TODO remove in waf 1.6
5004+ try:
5005+ fun(ctx)
5006+ except TypeError:
5007+ fun()
5008+ else:
5009+ fun(ctx)
5010+
5011+ ela = ''
5012+ if not Options.options.progress_bar:
5013+ ela = ' (%s)' % Utils.get_elapsed_time(ini)
5014+
5015+ if x != 'init' and x != 'shutdown':
5016+ info('%r finished successfully%s' % (x, ela))
5017+
5018+ if not commands and x != 'shutdown':
5019+ commands.append('shutdown')
5020+
5021+def configure(conf):
5022+
5023+ src = getattr(Options.options, SRCDIR, None)
5024+ if not src: src = getattr(Utils.g_module, SRCDIR, None)
5025+ if not src: src = getattr(Utils.g_module, 'top', None)
5026+ if not src:
5027+ src = '.'
5028+ incomplete_src = 1
5029+ src = os.path.abspath(src)
5030+
5031+ bld = getattr(Options.options, BLDDIR, None)
5032+ if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
5033+ if not bld: bld = getattr(Utils.g_module, 'out', None)
5034+ if not bld:
5035+ bld = 'build'
5036+ incomplete_bld = 1
5037+ if bld == '.':
5038+ raise Utils.WafError('Setting blddir="." may cause distclean problems')
5039+ bld = os.path.abspath(bld)
5040+
5041+ try: os.makedirs(bld)
5042+ except OSError: pass
5043+
5044+ # It is not possible to compile specific targets in the configuration
5045+ # this may cause configuration errors if autoconfig is set
5046+ targets = Options.options.compile_targets
5047+ Options.options.compile_targets = None
5048+ Options.is_install = False
5049+
5050+ conf.srcdir = src
5051+ conf.blddir = bld
5052+ conf.post_init()
5053+
5054+ if 'incomplete_src' in vars():
5055+ conf.check_message_1('Setting srcdir to')
5056+ conf.check_message_2(src)
5057+ if 'incomplete_bld' in vars():
5058+ conf.check_message_1('Setting blddir to')
5059+ conf.check_message_2(bld)
5060+
5061+ # calling to main wscript's configure()
5062+ conf.sub_config([''])
5063+
5064+ conf.store()
5065+
5066+ # this will write a configure lock so that subsequent builds will
5067+ # consider the current path as the root directory (see prepare_impl).
5068+ # to remove: use 'waf distclean'
5069+ env = Environment.Environment()
5070+ env[BLDDIR] = bld
5071+ env[SRCDIR] = src
5072+ env['argv'] = sys.argv
5073+ env['commands'] = Options.commands
5074+ env['options'] = Options.options.__dict__
5075+
5076+ # conf.hash & conf.files hold wscript files paths and hash
5077+ # (used only by Configure.autoconfig)
5078+ env['hash'] = conf.hash
5079+ env['files'] = conf.files
5080+ env['environ'] = dict(conf.environ)
5081+ env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
5082+
5083+ if Utils.g_module.root_path != src:
5084+ # in case the source dir is somewhere else
5085+ env.store(os.path.join(src, Options.lockfile))
5086+
5087+ env.store(Options.lockfile)
5088+
5089+ Options.options.compile_targets = targets
5090+
5091+def clean(bld):
5092+ '''removes the build files'''
5093+ try:
5094+ proj = Environment.Environment(Options.lockfile)
5095+ except IOError:
5096+ raise Utils.WafError('Nothing to clean (project not configured)')
5097+
5098+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5099+ bld.load_envs()
5100+
5101+ bld.is_install = 0 # False
5102+
5103+ # read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
5104+ bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
5105+
5106+ try:
5107+ bld.clean()
5108+ finally:
5109+ bld.save()
5110+
5111+def check_configured(bld):
5112+ if not Configure.autoconfig:
5113+ return bld
5114+
5115+ conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
5116+ bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
5117+
5118+ def reconf(proj):
5119+ back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
5120+
5121+ Options.commands = proj['commands']
5122+ Options.options.__dict__ = proj['options']
5123+ conf = conf_cls()
5124+ conf.environ = proj['environ']
5125+ configure(conf)
5126+
5127+ (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
5128+
5129+ try:
5130+ proj = Environment.Environment(Options.lockfile)
5131+ except IOError:
5132+ conf = conf_cls()
5133+ configure(conf)
5134+ else:
5135+ try:
5136+ bld = bld_cls()
5137+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5138+ bld.load_envs()
5139+ except Utils.WafError:
5140+ reconf(proj)
5141+ return bld_cls()
5142+
5143+ try:
5144+ proj = Environment.Environment(Options.lockfile)
5145+ except IOError:
5146+ raise Utils.WafError('Auto-config: project does not configure (bug)')
5147+
5148+ h = 0
5149+ try:
5150+ for file in proj['files']:
5151+ if file.endswith('configure'):
5152+ h = hash((h, Utils.readf(file)))
5153+ else:
5154+ mod = Utils.load_module(file)
5155+ h = hash((h, mod.waf_hash_val))
5156+ except (OSError, IOError):
5157+ warn('Reconfiguring the project: a file is unavailable')
5158+ reconf(proj)
5159+ else:
5160+ if (h != proj['hash']):
5161+ warn('Reconfiguring the project: the configuration has changed')
5162+ reconf(proj)
5163+
5164+ return bld_cls()
5165+
5166+def install(bld):
5167+ '''installs the build files'''
5168+ bld = check_configured(bld)
5169+
5170+ Options.commands['install'] = True
5171+ Options.commands['uninstall'] = False
5172+ Options.is_install = True
5173+
5174+ bld.is_install = INSTALL
5175+
5176+ build_impl(bld)
5177+ bld.install()
5178+
5179+def uninstall(bld):
5180+ '''removes the installed files'''
5181+ Options.commands['install'] = False
5182+ Options.commands['uninstall'] = True
5183+ Options.is_install = True
5184+
5185+ bld.is_install = UNINSTALL
5186+
5187+ try:
5188+ def runnable_status(self):
5189+ return SKIP_ME
5190+ setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
5191+ setattr(Task.Task, 'runnable_status', runnable_status)
5192+
5193+ build_impl(bld)
5194+ bld.install()
5195+ finally:
5196+ setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
5197+
5198+def build(bld):
5199+ bld = check_configured(bld)
5200+
5201+ Options.commands['install'] = False
5202+ Options.commands['uninstall'] = False
5203+ Options.is_install = False
5204+
5205+ bld.is_install = 0 # False
5206+
5207+ return build_impl(bld)
5208+
5209+def build_impl(bld):
5210+ # compile the project and/or install the files
5211+ try:
5212+ proj = Environment.Environment(Options.lockfile)
5213+ except IOError:
5214+ raise Utils.WafError("Project not configured (run 'waf configure' first)")
5215+
5216+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5217+ bld.load_envs()
5218+
5219+ info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
5220+ bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
5221+
5222+ # execute something immediately before the build starts
5223+ bld.pre_build()
5224+
5225+ try:
5226+ bld.compile()
5227+ finally:
5228+ if Options.options.progress_bar: print('')
5229+ info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
5230+
5231+ # execute something immediately after a successful build
5232+ bld.post_build()
5233+
5234+ bld.install()
5235+
5236+excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
5237+dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
5238+def dont_dist(name, src, build_dir):
5239+ global excludes, dist_exts
5240+
5241+ if (name.startswith(',,')
5242+ or name.startswith('++')
5243+ or name.startswith('.waf')
5244+ or (src == '.' and name == Options.lockfile)
5245+ or name in excludes
5246+ or name == build_dir
5247+ ):
5248+ return True
5249+
5250+ for ext in dist_exts:
5251+ if name.endswith(ext):
5252+ return True
5253+
5254+ return False
5255+
5256+# like shutil.copytree
5257+# exclude files and to raise exceptions immediately
5258+def copytree(src, dst, build_dir):
5259+ names = os.listdir(src)
5260+ os.makedirs(dst)
5261+ for name in names:
5262+ srcname = os.path.join(src, name)
5263+ dstname = os.path.join(dst, name)
5264+
5265+ if dont_dist(name, src, build_dir):
5266+ continue
5267+
5268+ if os.path.isdir(srcname):
5269+ copytree(srcname, dstname, build_dir)
5270+ else:
5271+ shutil.copy2(srcname, dstname)
5272+
5273+# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
5274+def distclean(ctx=None):
5275+ '''removes the build directory'''
5276+ global commands
5277+ lst = os.listdir('.')
5278+ for f in lst:
5279+ if f == Options.lockfile:
5280+ try:
5281+ proj = Environment.Environment(f)
5282+ except:
5283+ Logs.warn('could not read %r' % f)
5284+ continue
5285+
5286+ try:
5287+ shutil.rmtree(proj[BLDDIR])
5288+ except IOError:
5289+ pass
5290+ except OSError, e:
5291+ if e.errno != errno.ENOENT:
5292+ Logs.warn('project %r cannot be removed' % proj[BLDDIR])
5293+
5294+ try:
5295+ os.remove(f)
5296+ except OSError, e:
5297+ if e.errno != errno.ENOENT:
5298+ Logs.warn('file %r cannot be removed' % f)
5299+
5300+ # remove the local waf cache
5301+ if not commands and f.startswith('.waf'):
5302+ shutil.rmtree(f, ignore_errors=True)
5303+
5304+# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
5305+def dist(appname='', version=''):
5306+ '''makes a tarball for redistributing the sources'''
5307+ # return return (distdirname, tarballname)
5308+ import tarfile
5309+
5310+ if not appname: appname = Utils.g_module.APPNAME
5311+ if not version: version = Utils.g_module.VERSION
5312+
5313+ tmp_folder = appname + '-' + version
5314+ if g_gz in ['gz', 'bz2']:
5315+ arch_name = tmp_folder + '.tar.' + g_gz
5316+ else:
5317+ arch_name = tmp_folder + '.' + 'zip'
5318+
5319+ # remove the previous dir
5320+ try:
5321+ shutil.rmtree(tmp_folder)
5322+ except (OSError, IOError):
5323+ pass
5324+
5325+ # remove the previous archive
5326+ try:
5327+ os.remove(arch_name)
5328+ except (OSError, IOError):
5329+ pass
5330+
5331+ # copy the files into the temporary folder
5332+ blddir = getattr(Utils.g_module, BLDDIR, None)
5333+ if not blddir:
5334+ blddir = getattr(Utils.g_module, 'out', None)
5335+ copytree('.', tmp_folder, blddir)
5336+
5337+ # undocumented hook for additional cleanup
5338+ dist_hook = getattr(Utils.g_module, 'dist_hook', None)
5339+ if dist_hook:
5340+ back = os.getcwd()
5341+ os.chdir(tmp_folder)
5342+ try:
5343+ dist_hook()
5344+ finally:
5345+ # go back to the root directory
5346+ os.chdir(back)
5347+
5348+ if g_gz in ['gz', 'bz2']:
5349+ tar = tarfile.open(arch_name, 'w:' + g_gz)
5350+ tar.add(tmp_folder)
5351+ tar.close()
5352+ else:
5353+ Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
5354+
5355+ try: from hashlib import sha1 as sha
5356+ except ImportError: from sha import sha
5357+ try:
5358+ digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
5359+ except:
5360+ digest = ''
5361+
5362+ info('New archive created: %s%s' % (arch_name, digest))
5363+
5364+ if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
5365+ return arch_name
5366+
5367+# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
5368+def distcheck(appname='', version='', subdir=''):
5369+ '''checks if the sources compile (tarball from 'dist')'''
5370+ import tempfile, tarfile
5371+
5372+ if not appname: appname = Utils.g_module.APPNAME
5373+ if not version: version = Utils.g_module.VERSION
5374+
5375+ waf = os.path.abspath(sys.argv[0])
5376+ tarball = dist(appname, version)
5377+
5378+ path = appname + '-' + version
5379+
5380+ # remove any previous instance
5381+ if os.path.exists(path):
5382+ shutil.rmtree(path)
5383+
5384+ t = tarfile.open(tarball)
5385+ for x in t: t.extract(x)
5386+ t.close()
5387+
5388+ # build_path is the directory for the waf invocation
5389+ if subdir:
5390+ build_path = os.path.join(path, subdir)
5391+ else:
5392+ build_path = path
5393+
5394+ instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
5395+ ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
5396+ if ret:
5397+ raise Utils.WafError('distcheck failed with code %i' % ret)
5398+
5399+ if os.path.exists(instdir):
5400+ raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
5401+
5402+ shutil.rmtree(path)
5403+
5404+# FIXME remove in Waf 1.6 (kept for compatibility)
5405+def add_subdir(dir, bld):
5406+ bld.recurse(dir, 'build')
5407+
5408diff --git a/buildtools/wafadmin/Task.py b/buildtools/wafadmin/Task.py
5409new file mode 100644
5410index 0000000..5cda2ec
5411--- /dev/null
5412+++ b/buildtools/wafadmin/Task.py
5413@@ -0,0 +1,1200 @@
5414+#!/usr/bin/env python
5415+# encoding: utf-8
5416+# Thomas Nagy, 2005-2008 (ita)
5417+
5418+"""
5419+Running tasks in parallel is a simple problem, but in practice it is more complicated:
5420+* dependencies discovered during the build (dynamic task creation)
5421+* dependencies discovered after files are compiled
5422+* the amount of tasks and dependencies (graph size) can be huge
5423+
5424+This is why the dependency management is split on three different levels:
5425+1. groups of tasks that run all after another group of tasks
5426+2. groups of tasks that can be run in parallel
5427+3. tasks that can run in parallel, but with possible unknown ad-hoc dependencies
5428+
5429+The point #1 represents a strict sequential order between groups of tasks, for example a compiler is produced
5430+and used to compile the rest, whereas #2 and #3 represent partial order constraints where #2 applies to the kind of task
5431+and #3 applies to the task instances.
5432+
5433+#1 is held by the task manager: ordered list of TaskGroups (see bld.add_group)
5434+#2 is held by the task groups and the task types: precedence after/before (topological sort),
5435+ and the constraints extracted from file extensions
5436+#3 is held by the tasks individually (attribute run_after),
5437+ and the scheduler (Runner.py) use Task::runnable_status to reorder the tasks
5438+
5439+--
5440+
5441+To try, use something like this in your code:
5442+import Constants, Task
5443+Task.algotype = Constants.MAXPARALLEL
5444+
5445+--
5446+
5447+There are two concepts with the tasks (individual units of change):
5448+* dependency (if 1 is recompiled, recompile 2)
5449+* order (run 2 after 1)
5450+
5451+example 1: if t1 depends on t2 and t2 depends on t3 it is not necessary to make t1 depend on t3 (dependency is transitive)
5452+example 2: if t1 depends on a node produced by t2, it is not immediately obvious that t1 must run after t2 (order is not obvious)
5453+
5454+The role of the Task Manager is to give the tasks in order (groups of task that may be run in parallel one after the other)
5455+
5456+"""
5457+
5458+import os, shutil, sys, re, random, datetime, tempfile, shlex
5459+from Utils import md5
5460+import Build, Runner, Utils, Node, Logs, Options
5461+from Logs import debug, warn, error
5462+from Constants import *
5463+
5464+algotype = NORMAL
5465+#algotype = JOBCONTROL
5466+#algotype = MAXPARALLEL
5467+
5468+COMPILE_TEMPLATE_SHELL = '''
5469+def f(task):
5470+ env = task.env
5471+ wd = getattr(task, 'cwd', None)
5472+ p = env.get_flat
5473+ cmd = \'\'\' %s \'\'\' % s
5474+ return task.exec_command(cmd, cwd=wd)
5475+'''
5476+
5477+COMPILE_TEMPLATE_NOSHELL = '''
5478+def f(task):
5479+ env = task.env
5480+ wd = getattr(task, 'cwd', None)
5481+ def to_list(xx):
5482+ if isinstance(xx, str): return [xx]
5483+ return xx
5484+ lst = []
5485+ %s
5486+ lst = [x for x in lst if x]
5487+ return task.exec_command(lst, cwd=wd)
5488+'''
5489+
5490+
5491+"""
5492+Enable different kind of dependency algorithms:
5493+1 make groups: first compile all cpps and then compile all links (NORMAL)
5494+2 parallelize all (each link task run after its dependencies) (MAXPARALLEL)
5495+3 like 1 but provide additional constraints for the parallelization (MAXJOBS)
5496+
5497+In theory 1. will be faster than 2 for waf, but might be slower for builds
5498+The scheme 2 will not allow for running tasks one by one so it can cause disk thrashing on huge builds
5499+"""
5500+
5501+file_deps = Utils.nada
5502+"""
5503+Additional dependency pre-check may be added by replacing the function file_deps.
5504+e.g. extract_outputs, extract_deps below.
5505+"""
5506+
5507+class TaskManager(object):
5508+ """The manager is attached to the build object, it holds a list of TaskGroup"""
5509+ def __init__(self):
5510+ self.groups = []
5511+ self.tasks_done = []
5512+ self.current_group = 0
5513+ self.groups_names = {}
5514+
5515+ def group_name(self, g):
5516+ """name for the group g (utility)"""
5517+ if not isinstance(g, TaskGroup):
5518+ g = self.groups[g]
5519+ for x in self.groups_names:
5520+ if id(self.groups_names[x]) == id(g):
5521+ return x
5522+ return ''
5523+
5524+ def group_idx(self, tg):
5525+ """group the task generator tg is in"""
5526+ se = id(tg)
5527+ for i in range(len(self.groups)):
5528+ g = self.groups[i]
5529+ for t in g.tasks_gen:
5530+ if id(t) == se:
5531+ return i
5532+ return None
5533+
5534+ def get_next_set(self):
5535+ """return the next set of tasks to execute
5536+ the first parameter is the maximum amount of parallelization that may occur"""
5537+ ret = None
5538+ while not ret and self.current_group < len(self.groups):
5539+ ret = self.groups[self.current_group].get_next_set()
5540+ if ret: return ret
5541+ else:
5542+ self.groups[self.current_group].process_install()
5543+ self.current_group += 1
5544+ return (None, None)
5545+
5546+ def add_group(self, name=None, set=True):
5547+ #if self.groups and not self.groups[0].tasks:
5548+ # error('add_group: an empty group is already present')
5549+ g = TaskGroup()
5550+
5551+ if name and name in self.groups_names:
5552+ error('add_group: name %s already present' % name)
5553+ self.groups_names[name] = g
5554+ self.groups.append(g)
5555+ if set:
5556+ self.current_group = len(self.groups) - 1
5557+
5558+ def set_group(self, idx):
5559+ if isinstance(idx, str):
5560+ g = self.groups_names[idx]
5561+ for x in xrange(len(self.groups)):
5562+ if id(g) == id(self.groups[x]):
5563+ self.current_group = x
5564+ else:
5565+ self.current_group = idx
5566+
5567+ def add_task_gen(self, tgen):
5568+ if not self.groups: self.add_group()
5569+ self.groups[self.current_group].tasks_gen.append(tgen)
5570+
5571+ def add_task(self, task):
5572+ if not self.groups: self.add_group()
5573+ self.groups[self.current_group].tasks.append(task)
5574+
5575+ def total(self):
5576+ total = 0
5577+ if not self.groups: return 0
5578+ for group in self.groups:
5579+ total += len(group.tasks)
5580+ return total
5581+
5582+ def add_finished(self, tsk):
5583+ self.tasks_done.append(tsk)
5584+ bld = tsk.generator.bld
5585+ if bld.is_install:
5586+ f = None
5587+ if 'install' in tsk.__dict__:
5588+ f = tsk.__dict__['install']
5589+ # install=0 to prevent installation
5590+ if f: f(tsk)
5591+ else:
5592+ tsk.install()
5593+
5594+class TaskGroup(object):
5595+ "the compilation of one group does not begin until the previous group has finished (in the manager)"
5596+ def __init__(self):
5597+ self.tasks = [] # this list will be consumed
5598+ self.tasks_gen = []
5599+
5600+ self.cstr_groups = Utils.DefaultDict(list) # tasks having equivalent constraints
5601+ self.cstr_order = Utils.DefaultDict(set) # partial order between the cstr groups
5602+ self.temp_tasks = [] # tasks put on hold
5603+ self.ready = 0
5604+ self.post_funs = []
5605+
5606+ def reset(self):
5607+ "clears the state of the object (put back the tasks into self.tasks)"
5608+ for x in self.cstr_groups:
5609+ self.tasks += self.cstr_groups[x]
5610+ self.tasks = self.temp_tasks + self.tasks
5611+ self.temp_tasks = []
5612+ self.cstr_groups = Utils.DefaultDict(list)
5613+ self.cstr_order = Utils.DefaultDict(set)
5614+ self.ready = 0
5615+
5616+ def process_install(self):
5617+ for (f, k, kw) in self.post_funs:
5618+ f(*k, **kw)
5619+
5620+ def prepare(self):
5621+ "prepare the scheduling"
5622+ self.ready = 1
5623+ file_deps(self.tasks)
5624+ self.make_cstr_groups()
5625+ self.extract_constraints()
5626+
5627+ def get_next_set(self):
5628+ "next list of tasks to execute using max job settings, returns (maxjobs, task_list)"
5629+ global algotype
5630+ if algotype == NORMAL:
5631+ tasks = self.tasks_in_parallel()
5632+ maxj = MAXJOBS
5633+ elif algotype == JOBCONTROL:
5634+ (maxj, tasks) = self.tasks_by_max_jobs()
5635+ elif algotype == MAXPARALLEL:
5636+ tasks = self.tasks_with_inner_constraints()
5637+ maxj = MAXJOBS
5638+ else:
5639+ raise Utils.WafError("unknown algorithm type %s" % (algotype))
5640+
5641+ if not tasks: return ()
5642+ return (maxj, tasks)
5643+
5644+ def make_cstr_groups(self):
5645+ "unite the tasks that have similar constraints"
5646+ self.cstr_groups = Utils.DefaultDict(list)
5647+ for x in self.tasks:
5648+ h = x.hash_constraints()
5649+ self.cstr_groups[h].append(x)
5650+
5651+ def set_order(self, a, b):
5652+ self.cstr_order[a].add(b)
5653+
5654+ def compare_exts(self, t1, t2):
5655+ "extension production"
5656+ x = "ext_in"
5657+ y = "ext_out"
5658+ in_ = t1.attr(x, ())
5659+ out_ = t2.attr(y, ())
5660+ for k in in_:
5661+ if k in out_:
5662+ return -1
5663+ in_ = t2.attr(x, ())
5664+ out_ = t1.attr(y, ())
5665+ for k in in_:
5666+ if k in out_:
5667+ return 1
5668+ return 0
5669+
5670+ def compare_partial(self, t1, t2):
5671+ "partial relations after/before"
5672+ m = "after"
5673+ n = "before"
5674+ name = t2.__class__.__name__
5675+ if name in Utils.to_list(t1.attr(m, ())): return -1
5676+ elif name in Utils.to_list(t1.attr(n, ())): return 1
5677+ name = t1.__class__.__name__
5678+ if name in Utils.to_list(t2.attr(m, ())): return 1
5679+ elif name in Utils.to_list(t2.attr(n, ())): return -1
5680+ return 0
5681+
5682+ def extract_constraints(self):
5683+ "extract the parallelization constraints from the tasks with different constraints"
5684+ keys = self.cstr_groups.keys()
5685+ max = len(keys)
5686+ # hopefully the length of this list is short
5687+ for i in xrange(max):
5688+ t1 = self.cstr_groups[keys[i]][0]
5689+ for j in xrange(i + 1, max):
5690+ t2 = self.cstr_groups[keys[j]][0]
5691+
5692+ # add the constraints based on the comparisons
5693+ val = (self.compare_exts(t1, t2)
5694+ or self.compare_partial(t1, t2)
5695+ )
5696+ if val > 0:
5697+ self.set_order(keys[i], keys[j])
5698+ elif val < 0:
5699+ self.set_order(keys[j], keys[i])
5700+
5701+ def tasks_in_parallel(self):
5702+ "(NORMAL) next list of tasks that may be executed in parallel"
5703+
5704+ if not self.ready: self.prepare()
5705+
5706+ keys = self.cstr_groups.keys()
5707+
5708+ unconnected = []
5709+ remainder = []
5710+
5711+ for u in keys:
5712+ for k in self.cstr_order.values():
5713+ if u in k:
5714+ remainder.append(u)
5715+ break
5716+ else:
5717+ unconnected.append(u)
5718+
5719+ toreturn = []
5720+ for y in unconnected:
5721+ toreturn.extend(self.cstr_groups[y])
5722+
5723+ # remove stuff only after
5724+ for y in unconnected:
5725+ try: self.cstr_order.__delitem__(y)
5726+ except KeyError: pass
5727+ self.cstr_groups.__delitem__(y)
5728+
5729+ if not toreturn and remainder:
5730+ raise Utils.WafError("circular order constraint detected %r" % remainder)
5731+
5732+ return toreturn
5733+
5734+ def tasks_by_max_jobs(self):
5735+ "(JOBCONTROL) returns the tasks that can run in parallel with the max amount of jobs"
5736+ if not self.ready: self.prepare()
5737+ if not self.temp_tasks: self.temp_tasks = self.tasks_in_parallel()
5738+ if not self.temp_tasks: return (None, None)
5739+
5740+ maxjobs = MAXJOBS
5741+ ret = []
5742+ remaining = []
5743+ for t in self.temp_tasks:
5744+ m = getattr(t, "maxjobs", getattr(self.__class__, "maxjobs", MAXJOBS))
5745+ if m > maxjobs:
5746+ remaining.append(t)
5747+ elif m < maxjobs:
5748+ remaining += ret
5749+ ret = [t]
5750+ maxjobs = m
5751+ else:
5752+ ret.append(t)
5753+ self.temp_tasks = remaining
5754+ return (maxjobs, ret)
5755+
5756+ def tasks_with_inner_constraints(self):
5757+ """(MAXPARALLEL) returns all tasks in this group, but add the constraints on each task instance
5758+ as an optimization, it might be desirable to discard the tasks which do not have to run"""
5759+ if not self.ready: self.prepare()
5760+
5761+ if getattr(self, "done", None): return None
5762+
5763+ for p in self.cstr_order:
5764+ for v in self.cstr_order[p]:
5765+ for m in self.cstr_groups[p]:
5766+ for n in self.cstr_groups[v]:
5767+ n.set_run_after(m)
5768+ self.cstr_order = Utils.DefaultDict(set)
5769+ self.cstr_groups = Utils.DefaultDict(list)
5770+ self.done = 1
5771+ return self.tasks[:] # make a copy
5772+
5773+class store_task_type(type):
5774+ "store the task types that have a name ending in _task into a map (remember the existing task types)"
5775+ def __init__(cls, name, bases, dict):
5776+ super(store_task_type, cls).__init__(name, bases, dict)
5777+ name = cls.__name__
5778+
5779+ if name.endswith('_task'):
5780+ name = name.replace('_task', '')
5781+ if name != 'TaskBase':
5782+ TaskBase.classes[name] = cls
5783+
5784+class TaskBase(object):
5785+ """Base class for all Waf tasks
5786+
5787+ The most important methods are (by usual order of call):
5788+ 1 runnable_status: ask the task if it should be run, skipped, or if we have to ask later
5789+ 2 __str__: string to display to the user
5790+ 3 run: execute the task
5791+ 4 post_run: after the task is run, update the cache about the task
5792+
5793+ This class should be seen as an interface, it provides the very minimum necessary for the scheduler
5794+ so it does not do much.
5795+
5796+ For illustration purposes, TaskBase instances try to execute self.fun (if provided)
5797+ """
5798+
5799+ __metaclass__ = store_task_type
5800+
5801+ color = "GREEN"
5802+ maxjobs = MAXJOBS
5803+ classes = {}
5804+ stat = None
5805+
5806+ def __init__(self, *k, **kw):
5807+ self.hasrun = NOT_RUN
5808+
5809+ try:
5810+ self.generator = kw['generator']
5811+ except KeyError:
5812+ self.generator = self
5813+ self.bld = Build.bld
5814+
5815+ if kw.get('normal', 1):
5816+ self.generator.bld.task_manager.add_task(self)
5817+
5818+ def __repr__(self):
5819+ "used for debugging"
5820+ return '\n\t{task: %s %s}' % (self.__class__.__name__, str(getattr(self, "fun", "")))
5821+
5822+ def __str__(self):
5823+ "string to display to the user"
5824+ if hasattr(self, 'fun'):
5825+ return 'executing: %s\n' % self.fun.__name__
5826+ return self.__class__.__name__ + '\n'
5827+
5828+ def exec_command(self, *k, **kw):
5829+ "use this for executing commands from tasks"
5830+ # TODO in waf 1.6, eliminate bld.exec_command, and move the cwd processing to here
5831+ if self.env['env']:
5832+ kw['env'] = self.env['env']
5833+ return self.generator.bld.exec_command(*k, **kw)
5834+
5835+ def runnable_status(self):
5836+ "RUN_ME SKIP_ME or ASK_LATER"
5837+ return RUN_ME
5838+
5839+ def can_retrieve_cache(self):
5840+ return False
5841+
5842+ def call_run(self):
5843+ if self.can_retrieve_cache():
5844+ return 0
5845+ return self.run()
5846+
5847+ def run(self):
5848+ "called if the task must run"
5849+ if hasattr(self, 'fun'):
5850+ return self.fun(self)
5851+ return 0
5852+
5853+ def post_run(self):
5854+ "update the dependency tree (node stats)"
5855+ pass
5856+
5857+ def display(self):
5858+ "print either the description (using __str__) or the progress bar or the ide output"
5859+ col1 = Logs.colors(self.color)
5860+ col2 = Logs.colors.NORMAL
5861+
5862+ if Options.options.progress_bar == 1:
5863+ return self.generator.bld.progress_line(self.position[0], self.position[1], col1, col2)
5864+
5865+ if Options.options.progress_bar == 2:
5866+ ela = Utils.get_elapsed_time(self.generator.bld.ini)
5867+ try:
5868+ ins = ','.join([n.name for n in self.inputs])
5869+ except AttributeError:
5870+ ins = ''
5871+ try:
5872+ outs = ','.join([n.name for n in self.outputs])
5873+ except AttributeError:
5874+ outs = ''
5875+ return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (self.position[1], self.position[0], ins, outs, ela)
5876+
5877+ total = self.position[1]
5878+ n = len(str(total))
5879+ fs = '[%%%dd/%%%dd] %%s%%s%%s' % (n, n)
5880+ return fs % (self.position[0], self.position[1], col1, str(self), col2)
5881+
5882+ def attr(self, att, default=None):
5883+ "retrieve an attribute from the instance or from the class (microoptimization here)"
5884+ ret = getattr(self, att, self)
5885+ if ret is self: return getattr(self.__class__, att, default)
5886+ return ret
5887+
5888+ def hash_constraints(self):
5889+ "identify a task type for all the constraints relevant for the scheduler: precedence, file production"
5890+ a = self.attr
5891+ sum = hash((self.__class__.__name__,
5892+ str(a('before', '')),
5893+ str(a('after', '')),
5894+ str(a('ext_in', '')),
5895+ str(a('ext_out', '')),
5896+ self.__class__.maxjobs))
5897+ return sum
5898+
5899+ def format_error(self):
5900+ "error message to display to the user (when a build fails)"
5901+ if getattr(self, "err_msg", None):
5902+ return self.err_msg
5903+ elif self.hasrun == CRASHED:
5904+ try:
5905+ return " -> task failed (err #%d): %r" % (self.err_code, self)
5906+ except AttributeError:
5907+ return " -> task failed: %r" % self
5908+ elif self.hasrun == MISSING:
5909+ return " -> missing files: %r" % self
5910+ else:
5911+ return ''
5912+
5913+ def install(self):
5914+ """
5915+ installation is performed by looking at the task attributes:
5916+ * install_path: installation path like "${PREFIX}/bin"
5917+ * filename: install the first node in the outputs as a file with a particular name, be certain to give os.sep
5918+ * chmod: permissions
5919+ """
5920+ bld = self.generator.bld
5921+ d = self.attr('install')
5922+
5923+ if self.attr('install_path'):
5924+ lst = [a.relpath_gen(bld.srcnode) for a in self.outputs]
5925+ perm = self.attr('chmod', O644)
5926+ if self.attr('src'):
5927+ # if src is given, install the sources too
5928+ lst += [a.relpath_gen(bld.srcnode) for a in self.inputs]
5929+ if self.attr('filename'):
5930+ dir = self.install_path.rstrip(os.sep) + os.sep + self.attr('filename')
5931+ bld.install_as(dir, lst[0], self.env, perm)
5932+ else:
5933+ bld.install_files(self.install_path, lst, self.env, perm)
5934+
5935+class Task(TaskBase):
5936+ """The parent class is quite limited, in this version:
5937+ * file system interaction: input and output nodes
5938+ * persistence: do not re-execute tasks that have already run
5939+ * caching: same files can be saved and retrieved from a cache directory
5940+ * dependencies:
5941+ implicit, like .c files depending on .h files
5942+ explicit, like the input nodes or the dep_nodes
5943+ environment variables, like the CXXFLAGS in self.env
5944+ """
5945+ vars = []
5946+ def __init__(self, env, **kw):
5947+ TaskBase.__init__(self, **kw)
5948+ self.env = env
5949+
5950+ # inputs and outputs are nodes
5951+ # use setters when possible
5952+ self.inputs = []
5953+ self.outputs = []
5954+
5955+ self.dep_nodes = []
5956+ self.run_after = []
5957+
5958+ # Additionally, you may define the following
5959+ #self.dep_vars = 'PREFIX DATADIR'
5960+
5961+ def __str__(self):
5962+ "string to display to the user"
5963+ env = self.env
5964+ src_str = ' '.join([a.nice_path(env) for a in self.inputs])
5965+ tgt_str = ' '.join([a.nice_path(env) for a in self.outputs])
5966+ if self.outputs: sep = ' -> '
5967+ else: sep = ''
5968+ return '%s: %s%s%s\n' % (self.__class__.__name__.replace('_task', ''), src_str, sep, tgt_str)
5969+
5970+ def __repr__(self):
5971+ return "".join(['\n\t{task: ', self.__class__.__name__, " ", ",".join([x.name for x in self.inputs]), " -> ", ",".join([x.name for x in self.outputs]), '}'])
5972+
5973+ def unique_id(self):
5974+ "get a unique id: hash the node paths, the variant, the class, the function"
5975+ try:
5976+ return self.uid
5977+ except AttributeError:
5978+ "this is not a real hot zone, but we want to avoid surprizes here"
5979+ m = md5()
5980+ up = m.update
5981+ up(self.__class__.__name__)
5982+ up(self.env.variant())
5983+ p = None
5984+ for x in self.inputs + self.outputs:
5985+ if p != x.parent.id:
5986+ p = x.parent.id
5987+ up(x.parent.abspath())
5988+ up(x.name)
5989+ self.uid = m.digest()
5990+ return self.uid
5991+
5992+ def set_inputs(self, inp):
5993+ if isinstance(inp, list): self.inputs += inp
5994+ else: self.inputs.append(inp)
5995+
5996+ def set_outputs(self, out):
5997+ if isinstance(out, list): self.outputs += out
5998+ else: self.outputs.append(out)
5999+
6000+ def set_run_after(self, task):
6001+ "set (scheduler) order on another task"
6002+ # TODO: handle list or object
6003+ assert isinstance(task, TaskBase)
6004+ self.run_after.append(task)
6005+
6006+ def add_file_dependency(self, filename):
6007+ "TODO user-provided file dependencies"
6008+ node = self.generator.bld.path.find_resource(filename)
6009+ self.dep_nodes.append(node)
6010+
6011+ def signature(self):
6012+ # compute the result one time, and suppose the scan_signature will give the good result
6013+ try: return self.cache_sig[0]
6014+ except AttributeError: pass
6015+
6016+ self.m = md5()
6017+
6018+ # explicit deps
6019+ exp_sig = self.sig_explicit_deps()
6020+
6021+ # env vars
6022+ var_sig = self.sig_vars()
6023+
6024+ # implicit deps
6025+
6026+ imp_sig = SIG_NIL
6027+ if self.scan:
6028+ try:
6029+ imp_sig = self.sig_implicit_deps()
6030+ except ValueError:
6031+ return self.signature()
6032+
6033+ # we now have the signature (first element) and the details (for debugging)
6034+ ret = self.m.digest()
6035+ self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
6036+ return ret
6037+
6038+ def runnable_status(self):
6039+ "SKIP_ME RUN_ME or ASK_LATER"
6040+ #return 0 # benchmarking
6041+
6042+ if self.inputs and (not self.outputs):
6043+ if not getattr(self.__class__, 'quiet', None):
6044+ warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r" % self)
6045+
6046+ for t in self.run_after:
6047+ if not t.hasrun:
6048+ return ASK_LATER
6049+
6050+ env = self.env
6051+ bld = self.generator.bld
6052+
6053+ # first compute the signature
6054+ new_sig = self.signature()
6055+
6056+ # compare the signature to a signature computed previously
6057+ key = self.unique_id()
6058+ try:
6059+ prev_sig = bld.task_sigs[key][0]
6060+ except KeyError:
6061+ debug("task: task %r must run as it was never run before or the task code changed", self)
6062+ return RUN_ME
6063+
6064+ # compare the signatures of the outputs
6065+ for node in self.outputs:
6066+ variant = node.variant(env)
6067+ try:
6068+ if bld.node_sigs[variant][node.id] != new_sig:
6069+ return RUN_ME
6070+ except KeyError:
6071+ debug("task: task %r must run as the output nodes do not exist", self)
6072+ return RUN_ME
6073+
6074+ # debug if asked to
6075+ if Logs.verbose: self.debug_why(bld.task_sigs[key])
6076+
6077+ if new_sig != prev_sig:
6078+ return RUN_ME
6079+ return SKIP_ME
6080+
6081+ def post_run(self):
6082+ "called after a successful task run"
6083+ bld = self.generator.bld
6084+ env = self.env
6085+ sig = self.signature()
6086+ ssig = sig.encode('hex')
6087+
6088+ variant = env.variant()
6089+ for node in self.outputs:
6090+ # check if the node exists ..
6091+ try:
6092+ os.stat(node.abspath(env))
6093+ except OSError:
6094+ self.hasrun = MISSING
6095+ self.err_msg = '-> missing file: %r' % node.abspath(env)
6096+ raise Utils.WafError
6097+
6098+ # important, store the signature for the next run
6099+ bld.node_sigs[variant][node.id] = sig
6100+ bld.task_sigs[self.unique_id()] = self.cache_sig
6101+
6102+ # file caching, if possible
6103+ # try to avoid data corruption as much as possible
6104+ if not Options.cache_global or Options.options.nocache or not self.outputs:
6105+ return None
6106+
6107+ if getattr(self, 'cached', None):
6108+ return None
6109+
6110+ dname = os.path.join(Options.cache_global, ssig)
6111+ tmpdir = tempfile.mkdtemp(prefix=Options.cache_global + os.sep + 'waf')
6112+
6113+ try:
6114+ shutil.rmtree(dname)
6115+ except:
6116+ pass
6117+
6118+ try:
6119+ i = 0
6120+ for node in self.outputs:
6121+ variant = node.variant(env)
6122+ dest = os.path.join(tmpdir, str(i) + node.name)
6123+ shutil.copy2(node.abspath(env), dest)
6124+ i += 1
6125+ except (OSError, IOError):
6126+ try:
6127+ shutil.rmtree(tmpdir)
6128+ except:
6129+ pass
6130+ else:
6131+ try:
6132+ os.rename(tmpdir, dname)
6133+ except OSError:
6134+ try:
6135+ shutil.rmtree(tmpdir)
6136+ except:
6137+ pass
6138+ else:
6139+ try:
6140+ os.chmod(dname, O755)
6141+ except:
6142+ pass
6143+
6144+ def can_retrieve_cache(self):
6145+ """
6146+ Retrieve build nodes from the cache
6147+ update the file timestamps to help cleaning the least used entries from the cache
6148+ additionally, set an attribute 'cached' to avoid re-creating the same cache files
6149+
6150+ suppose there are files in cache/dir1/file1 and cache/dir2/file2
6151+ first, read the timestamp of dir1
6152+ then try to copy the files
6153+ then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
6154+ should an exception occur, ignore the data
6155+ """
6156+ if not Options.cache_global or Options.options.nocache or not self.outputs:
6157+ return None
6158+
6159+ env = self.env
6160+ sig = self.signature()
6161+ ssig = sig.encode('hex')
6162+
6163+ # first try to access the cache folder for the task
6164+ dname = os.path.join(Options.cache_global, ssig)
6165+ try:
6166+ t1 = os.stat(dname).st_mtime
6167+ except OSError:
6168+ return None
6169+
6170+ i = 0
6171+ for node in self.outputs:
6172+ variant = node.variant(env)
6173+
6174+ orig = os.path.join(dname, str(i) + node.name)
6175+ try:
6176+ shutil.copy2(orig, node.abspath(env))
6177+ # mark the cache file as used recently (modified)
6178+ os.utime(orig, None)
6179+ except (OSError, IOError):
6180+ debug('task: failed retrieving file')
6181+ return None
6182+ i += 1
6183+
6184+ # is it the same folder?
6185+ try:
6186+ t2 = os.stat(dname).st_mtime
6187+ except OSError:
6188+ return None
6189+
6190+ if t1 != t2:
6191+ return None
6192+
6193+ for node in self.outputs:
6194+ self.generator.bld.node_sigs[variant][node.id] = sig
6195+ if Options.options.progress_bar < 1:
6196+ self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
6197+
6198+ self.cached = True
6199+ return 1
6200+
6201+ def debug_why(self, old_sigs):
6202+ "explains why a task is run"
6203+
6204+ new_sigs = self.cache_sig
6205+ def v(x):
6206+ return x.encode('hex')
6207+
6208+ debug("Task %r", self)
6209+ msgs = ['Task must run', '* Source file or manual dependency', '* Implicit dependency', '* Environment variable']
6210+ tmp = 'task: -> %s: %s %s'
6211+ for x in xrange(len(msgs)):
6212+ if (new_sigs[x] != old_sigs[x]):
6213+ debug(tmp, msgs[x], v(old_sigs[x]), v(new_sigs[x]))
6214+
6215+ def sig_explicit_deps(self):
6216+ bld = self.generator.bld
6217+ up = self.m.update
6218+
6219+ # the inputs
6220+ for x in self.inputs + getattr(self, 'dep_nodes', []):
6221+ if not x.parent.id in bld.cache_scanned_folders:
6222+ bld.rescan(x.parent)
6223+
6224+ variant = x.variant(self.env)
6225+ try:
6226+ up(bld.node_sigs[variant][x.id])
6227+ except KeyError:
6228+ raise Utils.WafError('Missing node signature for %r (required by %r)' % (x, self))
6229+
6230+ # manual dependencies, they can slow down the builds
6231+ if bld.deps_man:
6232+ additional_deps = bld.deps_man
6233+ for x in self.inputs + self.outputs:
6234+ try:
6235+ d = additional_deps[x.id]
6236+ except KeyError:
6237+ continue
6238+
6239+ for v in d:
6240+ if isinstance(v, Node.Node):
6241+ bld.rescan(v.parent)
6242+ variant = v.variant(self.env)
6243+ try:
6244+ v = bld.node_sigs[variant][v.id]
6245+ except KeyError:
6246+ raise Utils.WafError('Missing node signature for %r (required by %r)' % (v, self))
6247+ elif hasattr(v, '__call__'):
6248+ v = v() # dependency is a function, call it
6249+ up(v)
6250+
6251+ for x in self.dep_nodes:
6252+ v = bld.node_sigs[x.variant(self.env)][x.id]
6253+ up(v)
6254+
6255+ return self.m.digest()
6256+
6257+ def sig_vars(self):
6258+ bld = self.generator.bld
6259+ env = self.env
6260+
6261+ # dependencies on the environment vars
6262+ act_sig = bld.hash_env_vars(env, self.__class__.vars)
6263+ self.m.update(act_sig)
6264+
6265+ # additional variable dependencies, if provided
6266+ dep_vars = getattr(self, 'dep_vars', None)
6267+ if dep_vars:
6268+ self.m.update(bld.hash_env_vars(env, dep_vars))
6269+
6270+ return self.m.digest()
6271+
6272+ #def scan(self, node):
6273+ # """this method returns a tuple containing:
6274+ # * a list of nodes corresponding to real files
6275+ # * a list of names for files not found in path_lst
6276+ # the input parameters may have more parameters that the ones used below
6277+ # """
6278+ # return ((), ())
6279+ scan = None
6280+
6281+ # compute the signature, recompute it if there is no match in the cache
6282+ def sig_implicit_deps(self):
6283+ "the signature obtained may not be the one if the files have changed, we do it in two steps"
6284+
6285+ bld = self.generator.bld
6286+
6287+ # get the task signatures from previous runs
6288+ key = self.unique_id()
6289+ prev_sigs = bld.task_sigs.get(key, ())
6290+ if prev_sigs:
6291+ try:
6292+ # for issue #379
6293+ if prev_sigs[2] == self.compute_sig_implicit_deps():
6294+ return prev_sigs[2]
6295+ except (KeyError, OSError):
6296+ pass
6297+ del bld.task_sigs[key]
6298+ raise ValueError('rescan')
6299+
6300+ # no previous run or the signature of the dependencies has changed, rescan the dependencies
6301+ (nodes, names) = self.scan()
6302+ if Logs.verbose:
6303+ debug('deps: scanner for %s returned %s %s', str(self), str(nodes), str(names))
6304+
6305+ # store the dependencies in the cache
6306+ bld.node_deps[key] = nodes
6307+ bld.raw_deps[key] = names
6308+
6309+ # recompute the signature and return it
6310+ try:
6311+ sig = self.compute_sig_implicit_deps()
6312+ except KeyError:
6313+ try:
6314+ nodes = []
6315+ for k in bld.node_deps.get(self.unique_id(), []):
6316+ if k.id & 3 == 2: # Node.FILE:
6317+ if not k.id in bld.node_sigs[0]:
6318+ nodes.append(k)
6319+ else:
6320+ if not k.id in bld.node_sigs[self.env.variant()]:
6321+ nodes.append(k)
6322+ except:
6323+ nodes = '?'
6324+ raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
6325+
6326+ return sig
6327+
6328+ def compute_sig_implicit_deps(self):
6329+ """it is intended for .cpp and inferred .h files
6330+ there is a single list (no tree traversal)
6331+ this is the hot spot so ... do not touch"""
6332+ upd = self.m.update
6333+
6334+ bld = self.generator.bld
6335+ tstamp = bld.node_sigs
6336+ env = self.env
6337+
6338+ for k in bld.node_deps.get(self.unique_id(), []):
6339+ # unlikely but necessary if it happens
6340+ if not k.parent.id in bld.cache_scanned_folders:
6341+ # if the parent folder is removed, an OSError may be thrown
6342+ bld.rescan(k.parent)
6343+
6344+ # if the parent folder is removed, a KeyError will be thrown
6345+ if k.id & 3 == 2: # Node.FILE:
6346+ upd(tstamp[0][k.id])
6347+ else:
6348+ upd(tstamp[env.variant()][k.id])
6349+
6350+ return self.m.digest()
6351+
6352+def funex(c):
6353+ dc = {}
6354+ exec(c, dc)
6355+ return dc['f']
6356+
6357+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})", re.M)
6358+def compile_fun_shell(name, line):
6359+ """Compiles a string (once) into a function, eg:
6360+ simple_task_type('c++', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
6361+
6362+ The env variables (CXX, ..) on the task must not hold dicts (order)
6363+ The reserved keywords TGT and SRC represent the task input and output nodes
6364+
6365+ quick test:
6366+ bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
6367+ """
6368+
6369+ extr = []
6370+ def repl(match):
6371+ g = match.group
6372+ if g('dollar'): return "$"
6373+ elif g('backslash'): return '\\\\'
6374+ elif g('subst'): extr.append((g('var'), g('code'))); return "%s"
6375+ return None
6376+
6377+ line = reg_act.sub(repl, line) or line
6378+
6379+ parm = []
6380+ dvars = []
6381+ app = parm.append
6382+ for (var, meth) in extr:
6383+ if var == 'SRC':
6384+ if meth: app('task.inputs%s' % meth)
6385+ else: app('" ".join([a.srcpath(env) for a in task.inputs])')
6386+ elif var == 'TGT':
6387+ if meth: app('task.outputs%s' % meth)
6388+ else: app('" ".join([a.bldpath(env) for a in task.outputs])')
6389+ else:
6390+ if not var in dvars: dvars.append(var)
6391+ app("p('%s')" % var)
6392+ if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
6393+ else: parm = ''
6394+
6395+ c = COMPILE_TEMPLATE_SHELL % (line, parm)
6396+
6397+ debug('action: %s', c)
6398+ return (funex(c), dvars)
6399+
6400+def compile_fun_noshell(name, line):
6401+
6402+ extr = []
6403+ def repl(match):
6404+ g = match.group
6405+ if g('dollar'): return "$"
6406+ elif g('subst'): extr.append((g('var'), g('code'))); return "<<|@|>>"
6407+ return None
6408+
6409+ line2 = reg_act.sub(repl, line)
6410+ params = line2.split('<<|@|>>')
6411+
6412+ buf = []
6413+ dvars = []
6414+ app = buf.append
6415+ for x in xrange(len(extr)):
6416+ params[x] = params[x].strip()
6417+ if params[x]:
6418+ app("lst.extend(%r)" % params[x].split())
6419+ (var, meth) = extr[x]
6420+ if var == 'SRC':
6421+ if meth: app('lst.append(task.inputs%s)' % meth)
6422+ else: app("lst.extend([a.srcpath(env) for a in task.inputs])")
6423+ elif var == 'TGT':
6424+ if meth: app('lst.append(task.outputs%s)' % meth)
6425+ else: app("lst.extend([a.bldpath(env) for a in task.outputs])")
6426+ else:
6427+ app('lst.extend(to_list(env[%r]))' % var)
6428+ if not var in dvars: dvars.append(var)
6429+
6430+ if params[-1]:
6431+ app("lst.extend(%r)" % shlex.split(params[-1]))
6432+
6433+ fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
6434+ debug('action: %s', fun)
6435+ return (funex(fun), dvars)
6436+
6437+def compile_fun(name, line, shell=None):
6438+ "commands can be launched by the shell or not"
6439+ if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
6440+ shell = True
6441+ #else:
6442+ # shell = False
6443+
6444+ if shell is None:
6445+ if sys.platform == 'win32':
6446+ shell = False
6447+ else:
6448+ shell = True
6449+
6450+ if shell:
6451+ return compile_fun_shell(name, line)
6452+ else:
6453+ return compile_fun_noshell(name, line)
6454+
6455+def simple_task_type(name, line, color='GREEN', vars=[], ext_in=[], ext_out=[], before=[], after=[], shell=None):
6456+ """return a new Task subclass with the function run compiled from the line given"""
6457+ (fun, dvars) = compile_fun(name, line, shell)
6458+ fun.code = line
6459+ return task_type_from_func(name, fun, vars or dvars, color, ext_in, ext_out, before, after)
6460+
6461+def task_type_from_func(name, func, vars=[], color='GREEN', ext_in=[], ext_out=[], before=[], after=[]):
6462+ """return a new Task subclass with the function run compiled from the line given"""
6463+ params = {
6464+ 'run': func,
6465+ 'vars': vars,
6466+ 'color': color,
6467+ 'name': name,
6468+ 'ext_in': Utils.to_list(ext_in),
6469+ 'ext_out': Utils.to_list(ext_out),
6470+ 'before': Utils.to_list(before),
6471+ 'after': Utils.to_list(after),
6472+ }
6473+
6474+ cls = type(Task)(name, (Task,), params)
6475+ TaskBase.classes[name] = cls
6476+ return cls
6477+
6478+def always_run(cls):
6479+ """Set all task instances of this class to be executed whenever a build is started
6480+ The task signature is calculated, but the result of the comparation between
6481+ task signatures is bypassed
6482+ """
6483+ old = cls.runnable_status
6484+ def always(self):
6485+ ret = old(self)
6486+ if ret == SKIP_ME:
6487+ return RUN_ME
6488+ return ret
6489+ cls.runnable_status = always
6490+
6491+def update_outputs(cls):
6492+ """When a command is always run, it is possible that the output only change
6493+ sometimes. By default the build node have as a hash the signature of the task
6494+ which may not change. With this, the output nodes (produced) are hashed,
6495+ and the hashes are set to the build nodes
6496+
6497+ This may avoid unnecessary recompilations, but it uses more resources
6498+ (hashing the output files) so it is not used by default
6499+ """
6500+ old_post_run = cls.post_run
6501+ def post_run(self):
6502+ old_post_run(self)
6503+ bld = self.generator.bld
6504+ for output in self.outputs:
6505+ bld.node_sigs[self.env.variant()][output.id] = Utils.h_file(output.abspath(self.env))
6506+ bld.task_sigs[output.id] = self.unique_id()
6507+ cls.post_run = post_run
6508+
6509+ old_runnable_status = cls.runnable_status
6510+ def runnable_status(self):
6511+ status = old_runnable_status(self)
6512+ if status != RUN_ME:
6513+ return status
6514+
6515+ uid = self.unique_id()
6516+ try:
6517+ bld = self.outputs[0].__class__.bld
6518+ new_sig = self.signature()
6519+ prev_sig = bld.task_sigs[uid][0]
6520+ if prev_sig == new_sig:
6521+ for x in self.outputs:
6522+ if not x.id in bld.node_sigs[self.env.variant()]:
6523+ return RUN_ME
6524+ if bld.task_sigs[x.id] != uid: # ensure the outputs are associated with *this* task
6525+ return RUN_ME
6526+ return SKIP_ME
6527+ except KeyError:
6528+ pass
6529+ except IndexError:
6530+ pass
6531+ return RUN_ME
6532+ cls.runnable_status = runnable_status
6533+
6534+def extract_outputs(tasks):
6535+ """file_deps: Infer additional dependencies from task input and output nodes
6536+ """
6537+ v = {}
6538+ for x in tasks:
6539+ try:
6540+ (ins, outs) = v[x.env.variant()]
6541+ except KeyError:
6542+ ins = {}
6543+ outs = {}
6544+ v[x.env.variant()] = (ins, outs)
6545+
6546+ for a in getattr(x, 'inputs', []):
6547+ try: ins[a.id].append(x)
6548+ except KeyError: ins[a.id] = [x]
6549+ for a in getattr(x, 'outputs', []):
6550+ try: outs[a.id].append(x)
6551+ except KeyError: outs[a.id] = [x]
6552+
6553+ for (ins, outs) in v.values():
6554+ links = set(ins.iterkeys()).intersection(outs.iterkeys())
6555+ for k in links:
6556+ for a in ins[k]:
6557+ for b in outs[k]:
6558+ a.set_run_after(b)
6559+
6560+def extract_deps(tasks):
6561+ """file_deps: Infer additional dependencies from task input and output nodes and from implicit dependencies
6562+ returned by the scanners - that will only work if all tasks are created
6563+
6564+ this is aimed at people who have pathological builds and who do not care enough
6565+ to implement the build dependencies properly
6566+
6567+ with two loops over the list of tasks, do not expect this to be really fast
6568+ """
6569+
6570+ # first reuse the function above
6571+ extract_outputs(tasks)
6572+
6573+ # map the output nodes to the tasks producing them
6574+ out_to_task = {}
6575+ for x in tasks:
6576+ v = x.env.variant()
6577+ try:
6578+ lst = x.outputs
6579+ except AttributeError:
6580+ pass
6581+ else:
6582+ for node in lst:
6583+ out_to_task[(v, node.id)] = x
6584+
6585+ # map the dependencies found to the tasks compiled
6586+ dep_to_task = {}
6587+ for x in tasks:
6588+ try:
6589+ x.signature()
6590+ except: # this is on purpose
6591+ pass
6592+
6593+ v = x.env.variant()
6594+ key = x.unique_id()
6595+ for k in x.generator.bld.node_deps.get(x.unique_id(), []):
6596+ try: dep_to_task[(v, k.id)].append(x)
6597+ except KeyError: dep_to_task[(v, k.id)] = [x]
6598+
6599+ # now get the intersection
6600+ deps = set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
6601+
6602+ # and add the dependencies from task to task
6603+ for idx in deps:
6604+ for k in dep_to_task[idx]:
6605+ k.set_run_after(out_to_task[idx])
6606+
6607+ # cleanup, remove the signatures
6608+ for x in tasks:
6609+ try:
6610+ delattr(x, 'cache_sig')
6611+ except AttributeError:
6612+ pass
6613+
6614diff --git a/buildtools/wafadmin/TaskGen.py b/buildtools/wafadmin/TaskGen.py
6615new file mode 100644
6616index 0000000..ae1834a
6617--- /dev/null
6618+++ b/buildtools/wafadmin/TaskGen.py
6619@@ -0,0 +1,612 @@
6620+#!/usr/bin/env python
6621+# encoding: utf-8
6622+# Thomas Nagy, 2005-2008 (ita)
6623+
6624+"""
6625+The class task_gen encapsulates the creation of task objects (low-level code)
6626+The instances can have various parameters, but the creation of task nodes (Task.py)
6627+is delayed. To achieve this, various methods are called from the method "apply"
6628+
6629+The class task_gen contains lots of methods, and a configuration table:
6630+* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
6631+* the order of the methods (self.prec or by default task_gen.prec) is configurable
6632+* new methods can be inserted dynamically without pasting old code
6633+
6634+Additionally, task_gen provides the method apply_core
6635+* file extensions are mapped to methods: def meth(self, name_or_node)
6636+* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
6637+* when called, the functions may modify self.allnodes to re-add source to process
6638+* the mappings can map an extension or a filename (see the code below)
6639+
6640+WARNING: subclasses must reimplement the clone method
6641+"""
6642+
6643+import os, traceback, copy
6644+import Build, Task, Utils, Logs, Options
6645+from Logs import debug, error, warn
6646+from Constants import *
6647+
6648+typos = {
6649+'sources':'source',
6650+'targets':'target',
6651+'include':'includes',
6652+'define':'defines',
6653+'importpath':'importpaths',
6654+'install_var':'install_path',
6655+'install_subdir':'install_path',
6656+'inst_var':'install_path',
6657+'inst_dir':'install_path',
6658+'feature':'features',
6659+}
6660+
6661+class register_obj(type):
6662+ """no decorators for classes, so we use a metaclass
6663+ we store into task_gen.classes the classes that inherit task_gen
6664+ and whose names end in '_taskgen'
6665+ """
6666+ def __init__(cls, name, bases, dict):
6667+ super(register_obj, cls).__init__(name, bases, dict)
6668+ name = cls.__name__
6669+ suffix = '_taskgen'
6670+ if name.endswith(suffix):
6671+ task_gen.classes[name.replace(suffix, '')] = cls
6672+
6673+class task_gen(object):
6674+ """
6675+ Most methods are of the form 'def meth(self):' without any parameters
6676+ there are many of them, and they do many different things:
6677+ * task creation
6678+ * task results installation
6679+ * environment modification
6680+ * attribute addition/removal
6681+
6682+ The inheritance approach is complicated
6683+ * mixing several languages at once
6684+ * subclassing is needed even for small changes
6685+ * inserting new methods is complicated
6686+
6687+ This new class uses a configuration table:
6688+ * adding new methods easily
6689+ * obtaining the order in which to call the methods
6690+ * postponing the method calls (post() -> apply)
6691+
6692+ Additionally, a 'traits' static attribute is provided:
6693+ * this list contains methods
6694+ * the methods can remove or add methods from self.meths
6695+ Example1: the attribute 'staticlib' is set on an instance
6696+ a method set in the list of traits is executed when the
6697+ instance is posted, it finds that flag and adds another method for execution
6698+ Example2: a method set in the list of traits finds the msvc
6699+ compiler (from self.env['MSVC']==1); more methods are added to self.meths
6700+ """
6701+
6702+ __metaclass__ = register_obj
6703+ mappings = {}
6704+ mapped = {}
6705+ prec = Utils.DefaultDict(list)
6706+ traits = Utils.DefaultDict(set)
6707+ classes = {}
6708+
6709+ def __init__(self, *kw, **kwargs):
6710+ self.prec = Utils.DefaultDict(list)
6711+ "map precedence of function names to call"
6712+ # so we will have to play with directed acyclic graphs
6713+ # detect cycles, etc
6714+
6715+ self.source = ''
6716+ self.target = ''
6717+
6718+ # list of methods to execute - does not touch it by hand unless you know
6719+ self.meths = []
6720+
6721+ # list of mappings extension -> function
6722+ self.mappings = {}
6723+
6724+ # list of features (see the documentation on traits)
6725+ self.features = list(kw)
6726+
6727+ # not always a good idea
6728+ self.tasks = []
6729+
6730+ self.default_chmod = O644
6731+ self.default_install_path = None
6732+
6733+ # kind of private, beware of what you put in it, also, the contents are consumed
6734+ self.allnodes = []
6735+
6736+ self.bld = kwargs.get('bld', Build.bld)
6737+ self.env = self.bld.env.copy()
6738+
6739+ self.path = self.bld.path # emulate chdir when reading scripts
6740+ self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
6741+
6742+ # provide a unique id
6743+ self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
6744+
6745+ for key, val in kwargs.iteritems():
6746+ setattr(self, key, val)
6747+
6748+ self.bld.task_manager.add_task_gen(self)
6749+ self.bld.all_task_gen.append(self)
6750+
6751+ def __str__(self):
6752+ return ("<task_gen '%s' of type %s defined in %s>"
6753+ % (self.name or self.target, self.__class__.__name__, str(self.path)))
6754+
6755+ def __setattr__(self, name, attr):
6756+ real = typos.get(name, name)
6757+ if real != name:
6758+ warn('typo %s -> %s' % (name, real))
6759+ if Logs.verbose > 0:
6760+ traceback.print_stack()
6761+ object.__setattr__(self, real, attr)
6762+
6763+ def to_list(self, value):
6764+ "helper: returns a list"
6765+ if isinstance(value, str): return value.split()
6766+ else: return value
6767+
6768+ def apply(self):
6769+ "order the methods to execute using self.prec or task_gen.prec"
6770+ keys = set(self.meths)
6771+
6772+ # add the methods listed in the features
6773+ self.features = Utils.to_list(self.features)
6774+ for x in self.features + ['*']:
6775+ st = task_gen.traits[x]
6776+ if not st:
6777+ warn('feature %r does not exist - bind at least one method to it' % x)
6778+ keys.update(st)
6779+
6780+ # copy the precedence table
6781+ prec = {}
6782+ prec_tbl = self.prec or task_gen.prec
6783+ for x in prec_tbl:
6784+ if x in keys:
6785+ prec[x] = prec_tbl[x]
6786+
6787+ # elements disconnected
6788+ tmp = []
6789+ for a in keys:
6790+ for x in prec.values():
6791+ if a in x: break
6792+ else:
6793+ tmp.append(a)
6794+
6795+ # topological sort
6796+ out = []
6797+ while tmp:
6798+ e = tmp.pop()
6799+ if e in keys: out.append(e)
6800+ try:
6801+ nlst = prec[e]
6802+ except KeyError:
6803+ pass
6804+ else:
6805+ del prec[e]
6806+ for x in nlst:
6807+ for y in prec:
6808+ if x in prec[y]:
6809+ break
6810+ else:
6811+ tmp.append(x)
6812+
6813+ if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
6814+ out.reverse()
6815+ self.meths = out
6816+
6817+ # then we run the methods in order
6818+ debug('task_gen: posting %s %d', self, id(self))
6819+ for x in out:
6820+ try:
6821+ v = getattr(self, x)
6822+ except AttributeError:
6823+ raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
6824+ debug('task_gen: -> %s (%d)', x, id(self))
6825+ v()
6826+
6827+ def post(self):
6828+ "runs the code to create the tasks, do not subclass"
6829+ if not self.name:
6830+ if isinstance(self.target, list):
6831+ self.name = ' '.join(self.target)
6832+ else:
6833+ self.name = self.target
6834+
6835+ if getattr(self, 'posted', None):
6836+ #error("OBJECT ALREADY POSTED" + str( self))
6837+ return
6838+
6839+ self.apply()
6840+ self.posted = True
6841+ debug('task_gen: posted %s', self.name)
6842+
6843+ def get_hook(self, ext):
6844+ try: return self.mappings[ext]
6845+ except KeyError:
6846+ try: return task_gen.mappings[ext]
6847+ except KeyError: return None
6848+
6849+ # TODO waf 1.6: always set the environment
6850+ # TODO waf 1.6: create_task(self, name, inputs, outputs)
6851+ def create_task(self, name, src=None, tgt=None, env=None):
6852+ env = env or self.env
6853+ task = Task.TaskBase.classes[name](env.copy(), generator=self)
6854+ if src:
6855+ task.set_inputs(src)
6856+ if tgt:
6857+ task.set_outputs(tgt)
6858+ self.tasks.append(task)
6859+ return task
6860+
6861+ def name_to_obj(self, name):
6862+ return self.bld.name_to_obj(name, self.env)
6863+
6864+ def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
6865+ """
6866+ The attributes "excludes" and "exts" must be lists to avoid the confusion
6867+ find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
6868+
6869+ do not use absolute paths
6870+ do not use paths outside of the source tree
6871+ the files or folder beginning by . are not returned
6872+
6873+ # TODO: remove in Waf 1.6
6874+ """
6875+
6876+ err_msg = "'%s' attribute must be a list"
6877+ if not isinstance(excludes, list):
6878+ raise Utils.WscriptError(err_msg % 'excludes')
6879+ if not isinstance(exts, list):
6880+ raise Utils.WscriptError(err_msg % 'exts')
6881+
6882+ lst = []
6883+
6884+ #make sure dirnames is a list helps with dirnames with spaces
6885+ dirnames = self.to_list(dirnames)
6886+
6887+ ext_lst = exts or list(self.mappings.keys()) + list(task_gen.mappings.keys())
6888+
6889+ for name in dirnames:
6890+ anode = self.path.find_dir(name)
6891+
6892+ if not anode or not anode.is_child_of(self.bld.srcnode):
6893+ raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
6894+ ", or it's not child of '%s'." % (name, self.bld.srcnode))
6895+
6896+ self.bld.rescan(anode)
6897+ for name in self.bld.cache_dir_contents[anode.id]:
6898+
6899+ # ignore hidden files
6900+ if name.startswith('.'):
6901+ continue
6902+
6903+ (base, ext) = os.path.splitext(name)
6904+ if ext in ext_lst and not name in lst and not name in excludes:
6905+ lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
6906+
6907+ lst.sort()
6908+ self.source = self.to_list(self.source)
6909+ if not self.source: self.source = lst
6910+ else: self.source += lst
6911+
6912+ def clone(self, env):
6913+ """when creating a clone in a task generator method,
6914+ make sure to set posted=False on the clone
6915+ else the other task generator will not create its tasks"""
6916+ newobj = task_gen(bld=self.bld)
6917+ for x in self.__dict__:
6918+ if x in ['env', 'bld']:
6919+ continue
6920+ elif x in ["path", "features"]:
6921+ setattr(newobj, x, getattr(self, x))
6922+ else:
6923+ setattr(newobj, x, copy.copy(getattr(self, x)))
6924+
6925+ newobj.__class__ = self.__class__
6926+ if isinstance(env, str):
6927+ newobj.env = self.bld.all_envs[env].copy()
6928+ else:
6929+ newobj.env = env.copy()
6930+
6931+ return newobj
6932+
6933+ def get_inst_path(self):
6934+ return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
6935+
6936+ def set_inst_path(self, val):
6937+ self._install_path = val
6938+
6939+ install_path = property(get_inst_path, set_inst_path)
6940+
6941+
6942+ def get_chmod(self):
6943+ return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
6944+
6945+ def set_chmod(self, val):
6946+ self._chmod = val
6947+
6948+ chmod = property(get_chmod, set_chmod)
6949+
6950+def declare_extension(var, func):
6951+ try:
6952+ for x in Utils.to_list(var):
6953+ task_gen.mappings[x] = func
6954+ except:
6955+ raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
6956+ task_gen.mapped[func.__name__] = func
6957+
6958+def declare_order(*k):
6959+ assert(len(k) > 1)
6960+ n = len(k) - 1
6961+ for i in xrange(n):
6962+ f1 = k[i]
6963+ f2 = k[i+1]
6964+ if not f1 in task_gen.prec[f2]:
6965+ task_gen.prec[f2].append(f1)
6966+
6967+def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
6968+ install=0, before=[], after=[], decider=None, rule=None, scan=None):
6969+ """
6970+ see Tools/flex.py for an example
6971+ while i do not like such wrappers, some people really do
6972+ """
6973+
6974+ action = action or rule
6975+ if isinstance(action, str):
6976+ act = Task.simple_task_type(name, action, color=color)
6977+ else:
6978+ act = Task.task_type_from_func(name, action, color=color)
6979+ act.ext_in = tuple(Utils.to_list(ext_in))
6980+ act.ext_out = tuple(Utils.to_list(ext_out))
6981+ act.before = Utils.to_list(before)
6982+ act.after = Utils.to_list(after)
6983+ act.scan = scan
6984+
6985+ def x_file(self, node):
6986+ if decider:
6987+ ext = decider(self, node)
6988+ else:
6989+ ext = ext_out
6990+
6991+ if isinstance(ext, str):
6992+ out_source = node.change_ext(ext)
6993+ if reentrant:
6994+ self.allnodes.append(out_source)
6995+ elif isinstance(ext, list):
6996+ out_source = [node.change_ext(x) for x in ext]
6997+ if reentrant:
6998+ for i in xrange((reentrant is True) and len(out_source) or reentrant):
6999+ self.allnodes.append(out_source[i])
7000+ else:
7001+ # XXX: useless: it will fail on Utils.to_list above...
7002+ raise Utils.WafError("do not know how to process %s" % str(ext))
7003+
7004+ tsk = self.create_task(name, node, out_source)
7005+
7006+ if node.__class__.bld.is_install:
7007+ tsk.install = install
7008+
7009+ declare_extension(act.ext_in, x_file)
7010+ return x_file
7011+
7012+def bind_feature(name, methods):
7013+ lst = Utils.to_list(methods)
7014+ task_gen.traits[name].update(lst)
7015+
7016+"""
7017+All the following decorators are registration decorators, i.e add an attribute to current class
7018+ (task_gen and its derivatives), with same name as func, which points to func itself.
7019+For example:
7020+ @taskgen
7021+ def sayHi(self):
7022+ print("hi")
7023+Now taskgen.sayHi() may be called
7024+
7025+If python were really smart, it could infer itself the order of methods by looking at the
7026+attributes. A prerequisite for execution is to have the attribute set before.
7027+Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
7028+"""
7029+def taskgen(func):
7030+ """
7031+ register a method as a task generator method
7032+ """
7033+ setattr(task_gen, func.__name__, func)
7034+ return func
7035+
7036+def feature(*k):
7037+ """
7038+ declare a task generator method that will be executed when the
7039+ object attribute 'feature' contains the corresponding key(s)
7040+ """
7041+ def deco(func):
7042+ setattr(task_gen, func.__name__, func)
7043+ for name in k:
7044+ task_gen.traits[name].update([func.__name__])
7045+ return func
7046+ return deco
7047+
7048+def before(*k):
7049+ """
7050+ declare a task generator method which will be executed
7051+ before the functions of given name(s)
7052+ """
7053+ def deco(func):
7054+ setattr(task_gen, func.__name__, func)
7055+ for fun_name in k:
7056+ if not func.__name__ in task_gen.prec[fun_name]:
7057+ task_gen.prec[fun_name].append(func.__name__)
7058+ return func
7059+ return deco
7060+
7061+def after(*k):
7062+ """
7063+ declare a task generator method which will be executed
7064+ after the functions of given name(s)
7065+ """
7066+ def deco(func):
7067+ setattr(task_gen, func.__name__, func)
7068+ for fun_name in k:
7069+ if not fun_name in task_gen.prec[func.__name__]:
7070+ task_gen.prec[func.__name__].append(fun_name)
7071+ return func
7072+ return deco
7073+
7074+def extension(var):
7075+ """
7076+ declare a task generator method which will be invoked during
7077+ the processing of source files for the extension given
7078+ """
7079+ def deco(func):
7080+ setattr(task_gen, func.__name__, func)
7081+ try:
7082+ for x in Utils.to_list(var):
7083+ task_gen.mappings[x] = func
7084+ except:
7085+ raise Utils.WafError('extension takes either a list or a string %r' % var)
7086+ task_gen.mapped[func.__name__] = func
7087+ return func
7088+ return deco
7089+
7090+# TODO make certain the decorators may be used here
7091+
7092+def apply_core(self):
7093+ """Process the attribute source
7094+ transform the names into file nodes
7095+ try to process the files by name first, later by extension"""
7096+ # get the list of folders to use by the scanners
7097+ # all our objects share the same include paths anyway
7098+ find_resource = self.path.find_resource
7099+
7100+ for filename in self.to_list(self.source):
7101+ # if self.mappings or task_gen.mappings contains a file of the same name
7102+ x = self.get_hook(filename)
7103+ if x:
7104+ x(self, filename)
7105+ else:
7106+ node = find_resource(filename)
7107+ if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
7108+ self.allnodes.append(node)
7109+
7110+ for node in self.allnodes:
7111+ # self.mappings or task_gen.mappings map the file extension to a function
7112+ x = self.get_hook(node.suffix())
7113+
7114+ if not x:
7115+ raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
7116+ (str(node), self.__class__.mappings.keys(), self.__class__))
7117+ x(self, node)
7118+feature('*')(apply_core)
7119+
7120+def exec_rule(self):
7121+ """Process the attribute rule, when provided the method apply_core will be disabled
7122+ """
7123+ if not getattr(self, 'rule', None):
7124+ return
7125+
7126+ # someone may have removed it already
7127+ try:
7128+ self.meths.remove('apply_core')
7129+ except ValueError:
7130+ pass
7131+
7132+ # get the function and the variables
7133+ func = self.rule
7134+
7135+ vars2 = []
7136+ if isinstance(func, str):
7137+ # use the shell by default for user-defined commands
7138+ (func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
7139+ func.code = self.rule
7140+
7141+ # create the task class
7142+ name = getattr(self, 'name', None) or self.target or self.rule
7143+ if not isinstance(name, str):
7144+ name = str(self.idx)
7145+ cls = Task.task_type_from_func(name, func, getattr(self, 'vars', vars2))
7146+ cls.color = getattr(self, 'color', 'BLUE')
7147+
7148+ # now create one instance
7149+ tsk = self.create_task(name)
7150+
7151+ dep_vars = getattr(self, 'dep_vars', ['ruledeps'])
7152+ if dep_vars:
7153+ tsk.dep_vars = dep_vars
7154+ if isinstance(self.rule, str):
7155+ tsk.env.ruledeps = self.rule
7156+ else:
7157+ # only works if the function is in a global module such as a waf tool
7158+ tsk.env.ruledeps = Utils.h_fun(self.rule)
7159+
7160+ # we assume that the user knows that without inputs or outputs
7161+ #if not getattr(self, 'target', None) and not getattr(self, 'source', None):
7162+ # cls.quiet = True
7163+
7164+ if getattr(self, 'target', None):
7165+ cls.quiet = True
7166+ tsk.outputs = [self.path.find_or_declare(x) for x in self.to_list(self.target)]
7167+
7168+ if getattr(self, 'source', None):
7169+ cls.quiet = True
7170+ tsk.inputs = []
7171+ for x in self.to_list(self.source):
7172+ y = self.path.find_resource(x)
7173+ if not y:
7174+ raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
7175+ tsk.inputs.append(y)
7176+
7177+ if self.allnodes:
7178+ tsk.inputs.extend(self.allnodes)
7179+
7180+ if getattr(self, 'scan', None):
7181+ cls.scan = self.scan
7182+
7183+ if getattr(self, 'install_path', None):
7184+ tsk.install_path = self.install_path
7185+
7186+ if getattr(self, 'cwd', None):
7187+ tsk.cwd = self.cwd
7188+
7189+ if getattr(self, 'on_results', None):
7190+ Task.update_outputs(cls)
7191+
7192+ if getattr(self, 'always', None):
7193+ Task.always_run(cls)
7194+
7195+ for x in ['after', 'before', 'ext_in', 'ext_out']:
7196+ setattr(cls, x, getattr(self, x, []))
7197+feature('*')(exec_rule)
7198+before('apply_core')(exec_rule)
7199+
7200+def sequence_order(self):
7201+ """
7202+ add a strict sequential constraint between the tasks generated by task generators
7203+ it uses the fact that task generators are posted in order
7204+ it will not post objects which belong to other folders
7205+ there is also an awesome trick for executing the method in last position
7206+
7207+ to use:
7208+ bld(features='javac seq')
7209+ bld(features='jar seq')
7210+
7211+ to start a new sequence, set the attribute seq_start, for example:
7212+ obj.seq_start = True
7213+ """
7214+ if self.meths and self.meths[-1] != 'sequence_order':
7215+ self.meths.append('sequence_order')
7216+ return
7217+
7218+ if getattr(self, 'seq_start', None):
7219+ return
7220+
7221+ # all the tasks previously declared must be run before these
7222+ if getattr(self.bld, 'prev', None):
7223+ self.bld.prev.post()
7224+ for x in self.bld.prev.tasks:
7225+ for y in self.tasks:
7226+ y.set_run_after(x)
7227+
7228+ self.bld.prev = self
7229+
7230+feature('seq')(sequence_order)
7231+
7232diff --git a/buildtools/wafadmin/Tools/__init__.py b/buildtools/wafadmin/Tools/__init__.py
7233new file mode 100644
7234index 0000000..bc6ca23
7235--- /dev/null
7236+++ b/buildtools/wafadmin/Tools/__init__.py
7237@@ -0,0 +1,4 @@
7238+#!/usr/bin/env python
7239+# encoding: utf-8
7240+# Thomas Nagy, 2006 (ita)
7241+
7242diff --git a/buildtools/wafadmin/Tools/ar.py b/buildtools/wafadmin/Tools/ar.py
7243new file mode 100644
7244index 0000000..af9b17f
7245--- /dev/null
7246+++ b/buildtools/wafadmin/Tools/ar.py
7247@@ -0,0 +1,36 @@
7248+#!/usr/bin/env python
7249+# encoding: utf-8
7250+# Thomas Nagy, 2006-2008 (ita)
7251+# Ralf Habacker, 2006 (rh)
7252+
7253+"ar and ranlib"
7254+
7255+import os, sys
7256+import Task, Utils
7257+from Configure import conftest
7258+
7259+ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
7260+cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
7261+cls.maxjobs = 1
7262+cls.install = Utils.nada
7263+
7264+# remove the output in case it already exists
7265+old = cls.run
7266+def wrap(self):
7267+ try: os.remove(self.outputs[0].abspath(self.env))
7268+ except OSError: pass
7269+ return old(self)
7270+setattr(cls, 'run', wrap)
7271+
7272+def detect(conf):
7273+ conf.find_program('ar', var='AR')
7274+ conf.find_program('ranlib', var='RANLIB')
7275+ conf.env.ARFLAGS = 'rcs'
7276+
7277+@conftest
7278+def find_ar(conf):
7279+ v = conf.env
7280+ conf.check_tool('ar')
7281+ if not v['AR']: conf.fatal('ar is required for static libraries - not found')
7282+
7283+
7284diff --git a/buildtools/wafadmin/Tools/bison.py b/buildtools/wafadmin/Tools/bison.py
7285new file mode 100644
7286index 0000000..49c6051
7287--- /dev/null
7288+++ b/buildtools/wafadmin/Tools/bison.py
7289@@ -0,0 +1,38 @@
7290+#!/usr/bin/env python
7291+# encoding: utf-8
7292+# John O'Meara, 2006
7293+# Thomas Nagy 2009
7294+
7295+"Bison processing"
7296+
7297+import Task
7298+from TaskGen import extension
7299+
7300+bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
7301+cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False)
7302+
7303+@extension(['.y', '.yc', '.yy'])
7304+def big_bison(self, node):
7305+ """when it becomes complicated (unlike flex), the old recipes work better (cwd)"""
7306+ has_h = '-d' in self.env['BISONFLAGS']
7307+
7308+ outs = []
7309+ if node.name.endswith('.yc'):
7310+ outs.append(node.change_ext('.tab.cc'))
7311+ if has_h:
7312+ outs.append(node.change_ext('.tab.hh'))
7313+ else:
7314+ outs.append(node.change_ext('.tab.c'))
7315+ if has_h:
7316+ outs.append(node.change_ext('.tab.h'))
7317+
7318+ tsk = self.create_task('bison', node, outs)
7319+ tsk.cwd = node.bld_dir(tsk.env)
7320+
7321+ # and the c/cxx file must be compiled too
7322+ self.allnodes.append(outs[0])
7323+
7324+def detect(conf):
7325+ bison = conf.find_program('bison', var='BISON', mandatory=True)
7326+ conf.env['BISONFLAGS'] = '-d'
7327+
7328diff --git a/buildtools/wafadmin/Tools/cc.py b/buildtools/wafadmin/Tools/cc.py
7329new file mode 100644
7330index 0000000..903a1c5
7331--- /dev/null
7332+++ b/buildtools/wafadmin/Tools/cc.py
7333@@ -0,0 +1,100 @@
7334+#!/usr/bin/env python
7335+# encoding: utf-8
7336+# Thomas Nagy, 2006 (ita)
7337+
7338+"Base for c programs/libraries"
7339+
7340+import os
7341+import TaskGen, Build, Utils, Task
7342+from Logs import debug
7343+import ccroot
7344+from TaskGen import feature, before, extension, after
7345+
7346+g_cc_flag_vars = [
7347+'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
7348+'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
7349+'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
7350+
7351+EXT_CC = ['.c']
7352+
7353+g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
7354+
7355+# TODO remove in waf 1.6
7356+class cc_taskgen(ccroot.ccroot_abstract):
7357+ pass
7358+
7359+@feature('cc')
7360+@before('apply_type_vars')
7361+@after('default_cc')
7362+def init_cc(self):
7363+ self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
7364+ self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
7365+
7366+ if not self.env['CC_NAME']:
7367+ raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
7368+
7369+@feature('cc')
7370+@after('apply_incpaths')
7371+def apply_obj_vars_cc(self):
7372+ """after apply_incpaths for INC_PATHS"""
7373+ env = self.env
7374+ app = env.append_unique
7375+ cpppath_st = env['CPPPATH_ST']
7376+
7377+ # local flags come first
7378+ # set the user-defined includes paths
7379+ for i in env['INC_PATHS']:
7380+ app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
7381+ app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
7382+
7383+ # set the library include paths
7384+ for i in env['CPPPATH']:
7385+ app('_CCINCFLAGS', cpppath_st % i)
7386+
7387+@feature('cc')
7388+@after('apply_lib_vars')
7389+def apply_defines_cc(self):
7390+ """after uselib is set for CCDEFINES"""
7391+ self.defines = getattr(self, 'defines', [])
7392+ lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
7393+ milst = []
7394+
7395+ # now process the local defines
7396+ for defi in lst:
7397+ if not defi in milst:
7398+ milst.append(defi)
7399+
7400+ # CCDEFINES_
7401+ libs = self.to_list(self.uselib)
7402+ for l in libs:
7403+ val = self.env['CCDEFINES_'+l]
7404+ if val: milst += val
7405+ self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
7406+ y = self.env['CCDEFINES_ST']
7407+ self.env.append_unique('_CCDEFFLAGS', [y%x for x in milst])
7408+
7409+@extension(EXT_CC)
7410+def c_hook(self, node):
7411+ # create the compilation task: cpp or cc
7412+ if getattr(self, 'obj_ext', None):
7413+ obj_ext = self.obj_ext
7414+ else:
7415+ obj_ext = '_%d.o' % self.idx
7416+
7417+ task = self.create_task('cc', node, node.change_ext(obj_ext))
7418+ try:
7419+ self.compiled_tasks.append(task)
7420+ except AttributeError:
7421+ raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
7422+ return task
7423+
7424+cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
7425+cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
7426+cls.scan = ccroot.scan
7427+cls.vars.append('CCDEPS')
7428+
7429+link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
7430+cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
7431+cls.maxjobs = 1
7432+cls.install = Utils.nada
7433+
7434diff --git a/buildtools/wafadmin/Tools/ccroot.py b/buildtools/wafadmin/Tools/ccroot.py
7435new file mode 100644
7436index 0000000..f54c82f
7437--- /dev/null
7438+++ b/buildtools/wafadmin/Tools/ccroot.py
7439@@ -0,0 +1,629 @@
7440+#!/usr/bin/env python
7441+# encoding: utf-8
7442+# Thomas Nagy, 2005-2008 (ita)
7443+
7444+"base for all c/c++ programs and libraries"
7445+
7446+import os, sys, re
7447+import TaskGen, Task, Utils, preproc, Logs, Build, Options
7448+from Logs import error, debug, warn
7449+from Utils import md5
7450+from TaskGen import taskgen, after, before, feature
7451+from Constants import *
7452+from Configure import conftest
7453+try:
7454+ from cStringIO import StringIO
7455+except ImportError:
7456+ from io import StringIO
7457+
7458+import config_c # <- necessary for the configuration, do not touch
7459+
7460+USE_TOP_LEVEL = False
7461+
7462+def get_cc_version(conf, cc, gcc=False, icc=False):
7463+
7464+ cmd = cc + ['-dM', '-E', '-']
7465+ try:
7466+ p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
7467+ p.stdin.write('\n')
7468+ out = p.communicate()[0]
7469+ except:
7470+ conf.fatal('could not determine the compiler version %r' % cmd)
7471+
7472+ # PY3K: do not touch
7473+ out = str(out)
7474+
7475+ if gcc:
7476+ if out.find('__INTEL_COMPILER') >= 0:
7477+ conf.fatal('The intel compiler pretends to be gcc')
7478+ if out.find('__GNUC__') < 0:
7479+ conf.fatal('Could not determine the compiler type')
7480+
7481+ if icc and out.find('__INTEL_COMPILER') < 0:
7482+ conf.fatal('Not icc/icpc')
7483+
7484+ k = {}
7485+ if icc or gcc:
7486+ out = out.split('\n')
7487+ import shlex
7488+
7489+ for line in out:
7490+ lst = shlex.split(line)
7491+ if len(lst)>2:
7492+ key = lst[1]
7493+ val = lst[2]
7494+ k[key] = val
7495+
7496+ def isD(var):
7497+ return var in k
7498+
7499+ def isT(var):
7500+ return var in k and k[var] != '0'
7501+
7502+ # Some documentation is available at http://predef.sourceforge.net
7503+ # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
7504+ mp1 = {
7505+ '__linux__' : 'linux',
7506+ '__GNU__' : 'gnu',
7507+ '__FreeBSD__' : 'freebsd',
7508+ '__NetBSD__' : 'netbsd',
7509+ '__OpenBSD__' : 'openbsd',
7510+ '__sun' : 'sunos',
7511+ '__hpux' : 'hpux',
7512+ '__sgi' : 'irix',
7513+ '_AIX' : 'aix',
7514+ '__CYGWIN__' : 'cygwin',
7515+ '__MSYS__' : 'msys',
7516+ '_UWIN' : 'uwin',
7517+ '_WIN64' : 'win32',
7518+ '_WIN32' : 'win32',
7519+ '__POWERPC__' : 'powerpc',
7520+ }
7521+
7522+ for i in mp1:
7523+ if isD(i):
7524+ conf.env.DEST_OS = mp1[i]
7525+ break
7526+ else:
7527+ if isD('__APPLE__') and isD('__MACH__'):
7528+ conf.env.DEST_OS = 'darwin'
7529+ elif isD('__unix__'): # unix must be tested last as it's a generic fallback
7530+ conf.env.DEST_OS = 'generic'
7531+
7532+ if isD('__ELF__'):
7533+ conf.env.DEST_BINFMT = 'elf'
7534+ elif isD('__WINNT__') or isD('__CYGWIN__'):
7535+ conf.env.DEST_BINFMT = 'pe'
7536+ elif isD('__APPLE__'):
7537+ conf.env.DEST_BINFMT = 'mac-o'
7538+
7539+ mp2 = {
7540+ '__x86_64__' : 'x86_64',
7541+ '__i386__' : 'x86',
7542+ '__ia64__' : 'ia',
7543+ '__mips__' : 'mips',
7544+ '__sparc__' : 'sparc',
7545+ '__alpha__' : 'alpha',
7546+ '__arm__' : 'arm',
7547+ '__hppa__' : 'hppa',
7548+ '__powerpc__' : 'powerpc',
7549+ }
7550+ for i in mp2:
7551+ if isD(i):
7552+ conf.env.DEST_CPU = mp2[i]
7553+ break
7554+
7555+ debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
7556+ conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
7557+ return k
7558+
7559+class DEBUG_LEVELS:
7560+ """Will disappear in waf 1.6"""
7561+ ULTRADEBUG = "ultradebug"
7562+ DEBUG = "debug"
7563+ RELEASE = "release"
7564+ OPTIMIZED = "optimized"
7565+ CUSTOM = "custom"
7566+
7567+ ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM]
7568+
7569+def scan(self):
7570+ "look for .h the .cpp need"
7571+ debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
7572+
7573+ # TODO waf 1.6 - assume the default input has exactly one file
7574+
7575+ if len(self.inputs) == 1:
7576+ node = self.inputs[0]
7577+ (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
7578+ if Logs.verbose:
7579+ debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
7580+ return (nodes, names)
7581+
7582+ all_nodes = []
7583+ all_names = []
7584+ seen = set()
7585+ for node in self.inputs:
7586+ (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
7587+ if Logs.verbose:
7588+ debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
7589+ for x in nodes:
7590+ if id(x) in seen: continue
7591+ seen.add(id(x))
7592+ all_nodes.append(x)
7593+ for x in names:
7594+ if not x in all_names:
7595+ all_names.append(x)
7596+ return (all_nodes, all_names)
7597+
7598+class ccroot_abstract(TaskGen.task_gen):
7599+ "Parent class for programs and libraries in languages c, c++ and moc (Qt)"
7600+ def __init__(self, *k, **kw):
7601+ # COMPAT remove in waf 1.6 TODO
7602+ if len(k) > 1:
7603+ k = list(k)
7604+ if k[1][0] != 'c':
7605+ k[1] = 'c' + k[1]
7606+ TaskGen.task_gen.__init__(self, *k, **kw)
7607+
7608+def get_target_name(self):
7609+ tp = 'program'
7610+ for x in self.features:
7611+ if x in ['cshlib', 'cstaticlib']:
7612+ tp = x.lstrip('c')
7613+
7614+ pattern = self.env[tp + '_PATTERN']
7615+ if not pattern: pattern = '%s'
7616+
7617+ dir, name = os.path.split(self.target)
7618+
7619+ if self.env.DEST_BINFMT == 'pe' and getattr(self, 'vnum', None) and 'cshlib' in self.features:
7620+ # include the version in the dll file name,
7621+ # the import lib file name stays unversionned.
7622+ name = name + '-' + self.vnum.split('.')[0]
7623+
7624+ return os.path.join(dir, pattern % name)
7625+
7626+@feature('cc', 'cxx')
7627+@before('apply_core')
7628+def default_cc(self):
7629+ """compiled_tasks attribute must be set before the '.c->.o' tasks can be created"""
7630+ Utils.def_attrs(self,
7631+ includes = '',
7632+ defines= '',
7633+ rpaths = '',
7634+ uselib = '',
7635+ uselib_local = '',
7636+ add_objects = '',
7637+ p_flag_vars = [],
7638+ p_type_vars = [],
7639+ compiled_tasks = [],
7640+ link_task = None)
7641+
7642+ # The only thing we need for cross-compilation is DEST_BINFMT.
7643+ # At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient.
7644+ # Currently, cross-compilation is auto-detected only for the gnu and intel compilers.
7645+ if not self.env.DEST_BINFMT:
7646+ # Infer the binary format from the os name.
7647+ self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format(
7648+ self.env.DEST_OS or Utils.unversioned_sys_platform())
7649+
7650+ if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env)
7651+ if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env)
7652+
7653+@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
7654+def apply_verif(self):
7655+ """no particular order, used for diagnostic"""
7656+ if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)):
7657+ raise Utils.WafError('no source files specified for %s' % self)
7658+ if not self.target:
7659+ raise Utils.WafError('no target for %s' % self)
7660+
7661+# TODO reference the d programs, shlibs in d.py, not here
7662+
7663+@feature('cprogram', 'dprogram')
7664+@after('default_cc')
7665+@before('apply_core')
7666+def vars_target_cprogram(self):
7667+ self.default_install_path = self.env.BINDIR
7668+ self.default_chmod = O755
7669+
7670+@after('default_cc')
7671+@feature('cshlib', 'dshlib')
7672+@before('apply_core')
7673+def vars_target_cshlib(self):
7674+ if self.env.DEST_BINFMT == 'pe':
7675+ # set execute bit on libs to avoid 'permission denied' (issue 283)
7676+ self.default_chmod = O755
7677+ self.default_install_path = self.env.BINDIR
7678+ else:
7679+ self.default_install_path = self.env.LIBDIR
7680+
7681+@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
7682+@after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib')
7683+def default_link_install(self):
7684+ """you may kill this method to inject your own installation for the first element
7685+ any other install should only process its own nodes and not those from the others"""
7686+ if self.install_path:
7687+ self.bld.install_files(self.install_path, self.link_task.outputs[0], env=self.env, chmod=self.chmod)
7688+
7689+@feature('cc', 'cxx')
7690+@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
7691+def apply_incpaths(self):
7692+ """used by the scanner
7693+ after processing the uselib for CPPPATH
7694+ after apply_core because some processing may add include paths
7695+ """
7696+ lst = []
7697+ # TODO move the uselib processing out of here
7698+ for lib in self.to_list(self.uselib):
7699+ for path in self.env['CPPPATH_' + lib]:
7700+ if not path in lst:
7701+ lst.append(path)
7702+ if preproc.go_absolute:
7703+ for path in preproc.standard_includes:
7704+ if not path in lst:
7705+ lst.append(path)
7706+
7707+ for path in self.to_list(self.includes):
7708+ if not path in lst:
7709+ if preproc.go_absolute or not os.path.isabs(path):
7710+ lst.append(path)
7711+ else:
7712+ self.env.prepend_value('CPPPATH', path)
7713+
7714+ for path in lst:
7715+ node = None
7716+ if os.path.isabs(path):
7717+ if preproc.go_absolute:
7718+ node = self.bld.root.find_dir(path)
7719+ elif path[0] == '#':
7720+ node = self.bld.srcnode
7721+ if len(path) > 1:
7722+ node = node.find_dir(path[1:])
7723+ else:
7724+ node = self.path.find_dir(path)
7725+
7726+ if node:
7727+ self.env.append_value('INC_PATHS', node)
7728+
7729+ # TODO WAF 1.6
7730+ if USE_TOP_LEVEL:
7731+ self.env.append_value('INC_PATHS', self.bld.srcnode)
7732+
7733+@feature('cc', 'cxx')
7734+@after('init_cc', 'init_cxx')
7735+@before('apply_lib_vars')
7736+def apply_type_vars(self):
7737+ """before apply_lib_vars because we modify uselib
7738+ after init_cc and init_cxx because web need p_type_vars
7739+ """
7740+ for x in self.features:
7741+ if not x in ['cprogram', 'cstaticlib', 'cshlib']:
7742+ continue
7743+ x = x.lstrip('c')
7744+
7745+ # if the type defines uselib to add, add them
7746+ st = self.env[x + '_USELIB']
7747+ if st: self.uselib = self.uselib + ' ' + st
7748+
7749+ # each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc
7750+ # so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly
7751+ for var in self.p_type_vars:
7752+ compvar = '%s_%s' % (x, var)
7753+ #print compvar
7754+ value = self.env[compvar]
7755+ if value: self.env.append_value(var, value)
7756+
7757+@feature('cprogram', 'cshlib', 'cstaticlib')
7758+@after('apply_core')
7759+def apply_link(self):
7760+ """executes after apply_core for collecting 'compiled_tasks'
7761+ use a custom linker if specified (self.link='name-of-custom-link-task')"""
7762+ link = getattr(self, 'link', None)
7763+ if not link:
7764+ if 'cstaticlib' in self.features: link = 'static_link'
7765+ elif 'cxx' in self.features: link = 'cxx_link'
7766+ else: link = 'cc_link'
7767+
7768+ tsk = self.create_task(link)
7769+ outputs = [t.outputs[0] for t in self.compiled_tasks]
7770+ tsk.set_inputs(outputs)
7771+ tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
7772+
7773+ self.link_task = tsk
7774+
7775+@feature('cc', 'cxx')
7776+@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
7777+def apply_lib_vars(self):
7778+ """after apply_link because of 'link_task'
7779+ after default_cc because of the attribute 'uselib'"""
7780+
7781+ # after 'apply_core' in case if 'cc' if there is no link
7782+
7783+ env = self.env
7784+
7785+ # 1. the case of the libs defined in the project (visit ancestors first)
7786+ # the ancestors external libraries (uselib) will be prepended
7787+ self.uselib = self.to_list(self.uselib)
7788+ names = self.to_list(self.uselib_local)
7789+
7790+ seen = set([])
7791+ tmp = Utils.deque(names) # consume a copy of the list of names
7792+ while tmp:
7793+ lib_name = tmp.popleft()
7794+ # visit dependencies only once
7795+ if lib_name in seen:
7796+ continue
7797+
7798+ y = self.name_to_obj(lib_name)
7799+ if not y:
7800+ raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
7801+ y.post()
7802+ seen.add(lib_name)
7803+
7804+ # object has ancestors to process (shared libraries): add them to the end of the list
7805+ if getattr(y, 'uselib_local', None):
7806+ lst = y.to_list(y.uselib_local)
7807+ if 'cshlib' in y.features or 'cprogram' in y.features:
7808+ lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features]
7809+ tmp.extend(lst)
7810+
7811+ # link task and flags
7812+ if getattr(y, 'link_task', None):
7813+
7814+ link_name = y.target[y.target.rfind(os.sep) + 1:]
7815+ if 'cstaticlib' in y.features:
7816+ env.append_value('STATICLIB', link_name)
7817+ elif 'cshlib' in y.features or 'cprogram' in y.features:
7818+ # WARNING some linkers can link against programs
7819+ env.append_value('LIB', link_name)
7820+
7821+ # the order
7822+ self.link_task.set_run_after(y.link_task)
7823+
7824+ # for the recompilation
7825+ dep_nodes = getattr(self.link_task, 'dep_nodes', [])
7826+ self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
7827+
7828+ # add the link path too
7829+ tmp_path = y.link_task.outputs[0].parent.bldpath(self.env)
7830+ if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path)
7831+
7832+ # add ancestors uselib too - but only propagate those that have no staticlib
7833+ for v in self.to_list(y.uselib):
7834+ if not env['STATICLIB_' + v]:
7835+ if not v in self.uselib:
7836+ self.uselib.insert(0, v)
7837+
7838+ # if the library task generator provides 'export_incdirs', add to the include path
7839+ # the export_incdirs must be a list of paths relative to the other library
7840+ if getattr(y, 'export_incdirs', None):
7841+ for x in self.to_list(y.export_incdirs):
7842+ node = y.path.find_dir(x)
7843+ if not node:
7844+ raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
7845+ self.env.append_unique('INC_PATHS', node)
7846+
7847+ # 2. the case of the libs defined outside
7848+ for x in self.uselib:
7849+ for v in self.p_flag_vars:
7850+ val = self.env[v + '_' + x]
7851+ if val: self.env.append_value(v, val)
7852+
7853+@feature('cprogram', 'cstaticlib', 'cshlib')
7854+@after('init_cc', 'init_cxx', 'apply_link')
7855+def apply_objdeps(self):
7856+ "add the .o files produced by some other object files in the same manner as uselib_local"
7857+ if not getattr(self, 'add_objects', None): return
7858+
7859+ seen = []
7860+ names = self.to_list(self.add_objects)
7861+ while names:
7862+ x = names[0]
7863+
7864+ # visit dependencies only once
7865+ if x in seen:
7866+ names = names[1:]
7867+ continue
7868+
7869+ # object does not exist ?
7870+ y = self.name_to_obj(x)
7871+ if not y:
7872+ raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
7873+
7874+ # object has ancestors to process first ? update the list of names
7875+ if getattr(y, 'add_objects', None):
7876+ added = 0
7877+ lst = y.to_list(y.add_objects)
7878+ lst.reverse()
7879+ for u in lst:
7880+ if u in seen: continue
7881+ added = 1
7882+ names = [u]+names
7883+ if added: continue # list of names modified, loop
7884+
7885+ # safe to process the current object
7886+ y.post()
7887+ seen.append(x)
7888+
7889+ for t in y.compiled_tasks:
7890+ self.link_task.inputs.extend(t.outputs)
7891+
7892+@feature('cprogram', 'cshlib', 'cstaticlib')
7893+@after('apply_lib_vars')
7894+def apply_obj_vars(self):
7895+ """after apply_lib_vars for uselib"""
7896+ v = self.env
7897+ lib_st = v['LIB_ST']
7898+ staticlib_st = v['STATICLIB_ST']
7899+ libpath_st = v['LIBPATH_ST']
7900+ staticlibpath_st = v['STATICLIBPATH_ST']
7901+ rpath_st = v['RPATH_ST']
7902+
7903+ app = v.append_unique
7904+
7905+ if v['FULLSTATIC']:
7906+ v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER'])
7907+
7908+ for i in v['RPATH']:
7909+ if i and rpath_st:
7910+ app('LINKFLAGS', rpath_st % i)
7911+
7912+ for i in v['LIBPATH']:
7913+ app('LINKFLAGS', libpath_st % i)
7914+ app('LINKFLAGS', staticlibpath_st % i)
7915+
7916+ if v['STATICLIB']:
7917+ v.append_value('LINKFLAGS', v['STATICLIB_MARKER'])
7918+ k = [(staticlib_st % i) for i in v['STATICLIB']]
7919+ app('LINKFLAGS', k)
7920+
7921+ # fully static binaries ?
7922+ if not v['FULLSTATIC']:
7923+ if v['STATICLIB'] or v['LIB']:
7924+ v.append_value('LINKFLAGS', v['SHLIB_MARKER'])
7925+
7926+ app('LINKFLAGS', [lib_st % i for i in v['LIB']])
7927+
7928+@after('apply_link')
7929+def process_obj_files(self):
7930+ if not hasattr(self, 'obj_files'): return
7931+ for x in self.obj_files:
7932+ node = self.path.find_resource(x)
7933+ self.link_task.inputs.append(node)
7934+
7935+@taskgen
7936+def add_obj_file(self, file):
7937+ """Small example on how to link object files as if they were source
7938+ obj = bld.create_obj('cc')
7939+ obj.add_obj_file('foo.o')"""
7940+ if not hasattr(self, 'obj_files'): self.obj_files = []
7941+ if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
7942+ self.obj_files.append(file)
7943+
7944+c_attrs = {
7945+'cxxflag' : 'CXXFLAGS',
7946+'cflag' : 'CCFLAGS',
7947+'ccflag' : 'CCFLAGS',
7948+'linkflag' : 'LINKFLAGS',
7949+'ldflag' : 'LINKFLAGS',
7950+'lib' : 'LIB',
7951+'libpath' : 'LIBPATH',
7952+'staticlib': 'STATICLIB',
7953+'staticlibpath': 'STATICLIBPATH',
7954+'rpath' : 'RPATH',
7955+'framework' : 'FRAMEWORK',
7956+'frameworkpath' : 'FRAMEWORKPATH'
7957+}
7958+
7959+@feature('cc', 'cxx')
7960+@before('init_cxx', 'init_cc')
7961+@before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc')
7962+def add_extra_flags(self):
7963+ """case and plural insensitive
7964+ before apply_obj_vars for processing the library attributes
7965+ """
7966+ for x in self.__dict__.keys():
7967+ y = x.lower()
7968+ if y[-1] == 's':
7969+ y = y[:-1]
7970+ if c_attrs.get(y, None):
7971+ self.env.append_unique(c_attrs[y], getattr(self, x))
7972+
7973+# ============ the code above must not know anything about import libs ==========
7974+
7975+@feature('cshlib')
7976+@after('apply_link', 'default_cc')
7977+@before('apply_lib_vars', 'apply_objdeps', 'default_link_install')
7978+def apply_implib(self):
7979+ """On mswindows, handle dlls and their import libs
7980+ the .dll.a is the import lib and it is required for linking so it is installed too
7981+ """
7982+ if not self.env.DEST_BINFMT == 'pe':
7983+ return
7984+
7985+ self.meths.remove('default_link_install')
7986+
7987+ bindir = self.install_path
7988+ if not bindir: return
7989+
7990+ # install the dll in the bin dir
7991+ dll = self.link_task.outputs[0]
7992+ self.bld.install_files(bindir, dll, self.env, self.chmod)
7993+
7994+ # add linker flags to generate the import lib
7995+ implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1]
7996+
7997+ implib = dll.parent.find_or_declare(implib)
7998+ self.link_task.outputs.append(implib)
7999+ self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
8000+
8001+ self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split())
8002+
8003+# ============ the code above must not know anything about vnum processing on unix platforms =========
8004+
8005+@feature('cshlib')
8006+@after('apply_link')
8007+@before('apply_lib_vars', 'default_link_install')
8008+def apply_vnum(self):
8009+ """
8010+ libfoo.so is installed as libfoo.so.1.2.3
8011+ """
8012+ if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
8013+ return
8014+
8015+ self.meths.remove('default_link_install')
8016+
8017+ link = self.link_task
8018+ nums = self.vnum.split('.')
8019+ node = link.outputs[0]
8020+
8021+ libname = node.name
8022+ if libname.endswith('.dylib'):
8023+ name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
8024+ name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
8025+ else:
8026+ name3 = libname + '.' + self.vnum
8027+ name2 = libname + '.' + nums[0]
8028+
8029+ if self.env.SONAME_ST:
8030+ v = self.env.SONAME_ST % name2
8031+ self.env.append_value('LINKFLAGS', v.split())
8032+
8033+ bld = self.bld
8034+ nums = self.vnum.split('.')
8035+
8036+ path = self.install_path
8037+ if not path: return
8038+
8039+ bld.install_as(path + os.sep + name3, node, env=self.env)
8040+ bld.symlink_as(path + os.sep + name2, name3)
8041+ bld.symlink_as(path + os.sep + libname, name3)
8042+
8043+ # the following task is just to enable execution from the build dir :-/
8044+ self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
8045+
8046+def exec_vnum_link(self):
8047+ for x in self.outputs:
8048+ path = x.abspath(self.env)
8049+ try:
8050+ os.remove(path)
8051+ except OSError:
8052+ pass
8053+
8054+ try:
8055+ os.symlink(self.inputs[0].name, path)
8056+ except OSError:
8057+ return 1
8058+
8059+cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN')
8060+cls.quiet = 1
8061+
8062+# ============ the --as-needed flag should added during the configuration, not at runtime =========
8063+
8064+@conftest
8065+def add_as_needed(conf):
8066+ if conf.env.DEST_BINFMT == 'elf' and 'gcc' in (conf.env.CXX_NAME, conf.env.CC_NAME):
8067+ conf.env.append_unique('LINKFLAGS', '--as-needed')
8068+
8069diff --git a/buildtools/wafadmin/Tools/compiler_cc.py b/buildtools/wafadmin/Tools/compiler_cc.py
8070new file mode 100644
8071index 0000000..0421503
8072--- /dev/null
8073+++ b/buildtools/wafadmin/Tools/compiler_cc.py
8074@@ -0,0 +1,67 @@
8075+#!/usr/bin/env python
8076+# encoding: utf-8
8077+# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
8078+
8079+import os, sys, imp, types, ccroot
8080+import optparse
8081+import Utils, Configure, Options
8082+from Logs import debug
8083+
8084+c_compiler = {
8085+ 'win32': ['msvc', 'gcc'],
8086+ 'cygwin': ['gcc'],
8087+ 'darwin': ['gcc'],
8088+ 'aix': ['xlc', 'gcc'],
8089+ 'linux': ['gcc', 'icc', 'suncc'],
8090+ 'sunos': ['gcc', 'suncc'],
8091+ 'irix': ['gcc'],
8092+ 'hpux': ['gcc'],
8093+ 'gnu': ['gcc'],
8094+ 'default': ['gcc']
8095+}
8096+
8097+def __list_possible_compiler(platform):
8098+ try:
8099+ return c_compiler[platform]
8100+ except KeyError:
8101+ return c_compiler["default"]
8102+
8103+def detect(conf):
8104+ """
8105+ for each compiler for the platform, try to configure the compiler
8106+ in theory the tools should raise a configuration error if the compiler
8107+ pretends to be something it is not (setting CC=icc and trying to configure gcc)
8108+ """
8109+ try: test_for_compiler = Options.options.check_c_compiler
8110+ except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
8111+ orig = conf.env
8112+ for compiler in test_for_compiler.split():
8113+ conf.env = orig.copy()
8114+ try:
8115+ conf.check_tool(compiler)
8116+ except Configure.ConfigurationError, e:
8117+ debug('compiler_cc: %r' % e)
8118+ else:
8119+ if conf.env['CC']:
8120+ orig.table = conf.env.get_merged_dict()
8121+ conf.env = orig
8122+ conf.check_message(compiler, '', True)
8123+ conf.env['COMPILER_CC'] = compiler
8124+ break
8125+ conf.check_message(compiler, '', False)
8126+ break
8127+ else:
8128+ conf.fatal('could not configure a c compiler!')
8129+
8130+def set_options(opt):
8131+ build_platform = Utils.unversioned_sys_platform()
8132+ possible_compiler_list = __list_possible_compiler(build_platform)
8133+ test_for_compiler = ' '.join(possible_compiler_list)
8134+ cc_compiler_opts = opt.add_option_group("C Compiler Options")
8135+ cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
8136+ help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
8137+ dest="check_c_compiler")
8138+
8139+ for c_compiler in test_for_compiler.split():
8140+ opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
8141+
8142diff --git a/buildtools/wafadmin/Tools/compiler_cxx.py b/buildtools/wafadmin/Tools/compiler_cxx.py
8143new file mode 100644
8144index 0000000..5308ea9
8145--- /dev/null
8146+++ b/buildtools/wafadmin/Tools/compiler_cxx.py
8147@@ -0,0 +1,62 @@
8148+#!/usr/bin/env python
8149+# encoding: utf-8
8150+# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
8151+
8152+import os, sys, imp, types, ccroot
8153+import optparse
8154+import Utils, Configure, Options
8155+from Logs import debug
8156+
8157+cxx_compiler = {
8158+'win32': ['msvc', 'g++'],
8159+'cygwin': ['g++'],
8160+'darwin': ['g++'],
8161+'aix': ['xlc++', 'g++'],
8162+'linux': ['g++', 'icpc', 'sunc++'],
8163+'sunos': ['g++', 'sunc++'],
8164+'irix': ['g++'],
8165+'hpux': ['g++'],
8166+'gnu': ['g++'],
8167+'default': ['g++']
8168+}
8169+
8170+def __list_possible_compiler(platform):
8171+ try:
8172+ return cxx_compiler[platform]
8173+ except KeyError:
8174+ return cxx_compiler["default"]
8175+
8176+def detect(conf):
8177+ try: test_for_compiler = Options.options.check_cxx_compiler
8178+ except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
8179+ orig = conf.env
8180+ for compiler in test_for_compiler.split():
8181+ try:
8182+ conf.env = orig.copy()
8183+ conf.check_tool(compiler)
8184+ except Configure.ConfigurationError, e:
8185+ debug('compiler_cxx: %r' % e)
8186+ else:
8187+ if conf.env['CXX']:
8188+ orig.table = conf.env.get_merged_dict()
8189+ conf.env = orig
8190+ conf.check_message(compiler, '', True)
8191+ conf.env['COMPILER_CXX'] = compiler
8192+ break
8193+ conf.check_message(compiler, '', False)
8194+ break
8195+ else:
8196+ conf.fatal('could not configure a cxx compiler!')
8197+
8198+def set_options(opt):
8199+ build_platform = Utils.unversioned_sys_platform()
8200+ possible_compiler_list = __list_possible_compiler(build_platform)
8201+ test_for_compiler = ' '.join(possible_compiler_list)
8202+ cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
8203+ cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
8204+ help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
8205+ dest="check_cxx_compiler")
8206+
8207+ for cxx_compiler in test_for_compiler.split():
8208+ opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
8209+
8210diff --git a/buildtools/wafadmin/Tools/compiler_d.py b/buildtools/wafadmin/Tools/compiler_d.py
8211new file mode 100644
8212index 0000000..1ea5efa
8213--- /dev/null
8214+++ b/buildtools/wafadmin/Tools/compiler_d.py
8215@@ -0,0 +1,33 @@
8216+#!/usr/bin/env python
8217+# encoding: utf-8
8218+# Carlos Rafael Giani, 2007 (dv)
8219+
8220+import os, sys, imp, types
8221+import Utils, Configure, Options
8222+
8223+def detect(conf):
8224+ if getattr(Options.options, 'check_dmd_first', None):
8225+ test_for_compiler = ['dmd', 'gdc']
8226+ else:
8227+ test_for_compiler = ['gdc', 'dmd']
8228+
8229+ for d_compiler in test_for_compiler:
8230+ try:
8231+ conf.check_tool(d_compiler)
8232+ except:
8233+ pass
8234+ else:
8235+ break
8236+ else:
8237+ conf.fatal('no suitable d compiler was found')
8238+
8239+def set_options(opt):
8240+ d_compiler_opts = opt.add_option_group('D Compiler Options')
8241+ d_compiler_opts.add_option('--check-dmd-first', action='store_true',
8242+ help='checks for the gdc compiler before dmd (default is the other way round)',
8243+ dest='check_dmd_first',
8244+ default=False)
8245+
8246+ for d_compiler in ['gdc', 'dmd']:
8247+ opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts)
8248+
8249diff --git a/buildtools/wafadmin/Tools/config_c.py b/buildtools/wafadmin/Tools/config_c.py
8250new file mode 100644
8251index 0000000..a32d8aa
8252--- /dev/null
8253+++ b/buildtools/wafadmin/Tools/config_c.py
8254@@ -0,0 +1,736 @@
8255+#!/usr/bin/env python
8256+# encoding: utf-8
8257+# Thomas Nagy, 2005-2008 (ita)
8258+
8259+"""
8260+c/c++ configuration routines
8261+"""
8262+
8263+import os, imp, sys, shlex, shutil
8264+from Utils import md5
8265+import Build, Utils, Configure, Task, Options, Logs, TaskGen
8266+from Constants import *
8267+from Configure import conf, conftest
8268+
8269+cfg_ver = {
8270+ 'atleast-version': '>=',
8271+ 'exact-version': '==',
8272+ 'max-version': '<=',
8273+}
8274+
8275+SNIP1 = '''
8276+ int main() {
8277+ void *p;
8278+ p=(void*)(%s);
8279+ return 0;
8280+}
8281+'''
8282+
8283+SNIP2 = '''
8284+int main() {
8285+ if ((%(type_name)s *) 0) return 0;
8286+ if (sizeof (%(type_name)s)) return 0;
8287+}
8288+'''
8289+
8290+SNIP3 = '''
8291+int main() {
8292+ return 0;
8293+}
8294+'''
8295+
8296+def parse_flags(line, uselib, env):
8297+ """pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/"""
8298+
8299+ lst = shlex.split(line)
8300+ while lst:
8301+ x = lst.pop(0)
8302+ st = x[:2]
8303+ ot = x[2:]
8304+ app = env.append_value
8305+ if st == '-I' or st == '/I':
8306+ if not ot: ot = lst.pop(0)
8307+ app('CPPPATH_' + uselib, ot)
8308+ elif st == '-D':
8309+ if not ot: ot = lst.pop(0)
8310+ app('CXXDEFINES_' + uselib, ot)
8311+ app('CCDEFINES_' + uselib, ot)
8312+ elif st == '-l':
8313+ if not ot: ot = lst.pop(0)
8314+ app('LIB_' + uselib, ot)
8315+ elif st == '-L':
8316+ if not ot: ot = lst.pop(0)
8317+ app('LIBPATH_' + uselib, ot)
8318+ elif x == '-pthread' or x.startswith('+'):
8319+ app('CCFLAGS_' + uselib, x)
8320+ app('CXXFLAGS_' + uselib, x)
8321+ app('LINKFLAGS_' + uselib, x)
8322+ elif x == '-framework':
8323+ app('FRAMEWORK_' + uselib, lst.pop(0))
8324+ elif x.startswith('-F'):
8325+ app('FRAMEWORKPATH_' + uselib, x[2:])
8326+ elif x.startswith('-std'):
8327+ app('CCFLAGS_' + uselib, x)
8328+ app('CXXFLAGS_' + uselib, x)
8329+ app('LINKFLAGS_' + uselib, x)
8330+ elif x.startswith('-Wl'):
8331+ app('LINKFLAGS_' + uselib, x)
8332+ elif x.startswith('-m') or x.startswith('-f'):
8333+ app('CCFLAGS_' + uselib, x)
8334+ app('CXXFLAGS_' + uselib, x)
8335+
8336+@conf
8337+def ret_msg(self, f, kw):
8338+ """execute a function, when provided"""
8339+ if isinstance(f, str):
8340+ return f
8341+ return f(kw)
8342+
8343+@conf
8344+def validate_cfg(self, kw):
8345+ if not 'path' in kw:
8346+ kw['path'] = 'pkg-config --errors-to-stdout --print-errors'
8347+
8348+ # pkg-config version
8349+ if 'atleast_pkgconfig_version' in kw:
8350+ if not 'msg' in kw:
8351+ kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version']
8352+ return
8353+
8354+ # pkg-config --modversion
8355+ if 'modversion' in kw:
8356+ return
8357+
8358+ if 'variables' in kw:
8359+ if not 'msg' in kw:
8360+ kw['msg'] = 'Checking for %s variables' % kw['package']
8361+ return
8362+
8363+ # checking for the version of a module, for the moment, one thing at a time
8364+ for x in cfg_ver.keys():
8365+ y = x.replace('-', '_')
8366+ if y in kw:
8367+ if not 'package' in kw:
8368+ raise ValueError('%s requires a package' % x)
8369+
8370+ if not 'msg' in kw:
8371+ kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y])
8372+ return
8373+
8374+ if not 'msg' in kw:
8375+ kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
8376+ if not 'okmsg' in kw:
8377+ kw['okmsg'] = 'yes'
8378+ if not 'errmsg' in kw:
8379+ kw['errmsg'] = 'not found'
8380+
8381+@conf
8382+def cmd_and_log(self, cmd, kw):
8383+ Logs.debug('runner: %s\n' % cmd)
8384+ if self.log:
8385+ self.log.write('%s\n' % cmd)
8386+
8387+ try:
8388+ p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
8389+ (out, err) = p.communicate()
8390+ except OSError, e:
8391+ self.log.write('error %r' % e)
8392+ self.fatal(str(e))
8393+
8394+ # placeholder, don't touch
8395+ out = str(out)
8396+ err = str(err)
8397+
8398+ if self.log:
8399+ self.log.write(out)
8400+ self.log.write(err)
8401+
8402+ if p.returncode:
8403+ if not kw.get('errmsg', ''):
8404+ if kw.get('mandatory', False):
8405+ kw['errmsg'] = out.strip()
8406+ else:
8407+ kw['errmsg'] = 'no'
8408+ self.fatal('fail')
8409+ return out
8410+
8411+@conf
8412+def exec_cfg(self, kw):
8413+
8414+ # pkg-config version
8415+ if 'atleast_pkgconfig_version' in kw:
8416+ cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
8417+ self.cmd_and_log(cmd, kw)
8418+ if not 'okmsg' in kw:
8419+ kw['okmsg'] = 'yes'
8420+ return
8421+
8422+ # checking for the version of a module
8423+ for x in cfg_ver:
8424+ y = x.replace('-', '_')
8425+ if y in kw:
8426+ self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
8427+ if not 'okmsg' in kw:
8428+ kw['okmsg'] = 'yes'
8429+ self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
8430+ break
8431+
8432+ # retrieving the version of a module
8433+ if 'modversion' in kw:
8434+ version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip()
8435+ self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
8436+ return version
8437+
8438+ # retrieving variables of a module
8439+ if 'variables' in kw:
8440+ env = kw.get('env', self.env)
8441+ uselib = kw.get('uselib_store', kw['package'].upper())
8442+ vars = Utils.to_list(kw['variables'])
8443+ for v in vars:
8444+ val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
8445+ var = '%s_%s' % (uselib, v)
8446+ env[var] = val
8447+ if not 'okmsg' in kw:
8448+ kw['okmsg'] = 'yes'
8449+ return
8450+
8451+ lst = [kw['path']]
8452+
8453+
8454+ defi = kw.get('define_variable', None)
8455+ if not defi:
8456+ defi = self.env.PKG_CONFIG_DEFINES or {}
8457+ for key, val in defi.iteritems():
8458+ lst.append('--define-variable=%s=%s' % (key, val))
8459+
8460+ lst.append(kw.get('args', ''))
8461+ lst.append(kw['package'])
8462+
8463+ # so we assume the command-line will output flags to be parsed afterwards
8464+ cmd = ' '.join(lst)
8465+ ret = self.cmd_and_log(cmd, kw)
8466+ if not 'okmsg' in kw:
8467+ kw['okmsg'] = 'yes'
8468+
8469+ self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
8470+ parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
8471+ return ret
8472+
8473+@conf
8474+def check_cfg(self, *k, **kw):
8475+ """
8476+ for pkg-config mostly, but also all the -config tools
8477+ conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
8478+ conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
8479+ """
8480+
8481+ self.validate_cfg(kw)
8482+ if 'msg' in kw:
8483+ self.check_message_1(kw['msg'])
8484+ ret = None
8485+ try:
8486+ ret = self.exec_cfg(kw)
8487+ except Configure.ConfigurationError, e:
8488+ if 'errmsg' in kw:
8489+ self.check_message_2(kw['errmsg'], 'YELLOW')
8490+ if 'mandatory' in kw and kw['mandatory']:
8491+ if Logs.verbose > 1:
8492+ raise
8493+ else:
8494+ self.fatal('the configuration failed (see %r)' % self.log.name)
8495+ else:
8496+ kw['success'] = ret
8497+ if 'okmsg' in kw:
8498+ self.check_message_2(self.ret_msg(kw['okmsg'], kw))
8499+
8500+ return ret
8501+
8502+# the idea is the following: now that we are certain
8503+# that all the code here is only for c or c++, it is
8504+# easy to put all the logic in one function
8505+#
8506+# this should prevent code duplication (ita)
8507+
8508+# env: an optional environment (modified -> provide a copy)
8509+# compiler: cc or cxx - it tries to guess what is best
8510+# type: cprogram, cshlib, cstaticlib
8511+# code: a c code to execute
8512+# uselib_store: where to add the variables
8513+# uselib: parameters to use for building
8514+# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */
8515+# execute: True or False - will return the result of the execution
8516+
8517+@conf
8518+def validate_c(self, kw):
8519+ """validate the parameters for the test method"""
8520+
8521+ if not 'env' in kw:
8522+ kw['env'] = self.env.copy()
8523+
8524+ env = kw['env']
8525+ if not 'compiler' in kw:
8526+ kw['compiler'] = 'cc'
8527+ if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
8528+ kw['compiler'] = 'cxx'
8529+ if not self.env['CXX']:
8530+ self.fatal('a c++ compiler is required')
8531+ else:
8532+ if not self.env['CC']:
8533+ self.fatal('a c compiler is required')
8534+
8535+ if not 'type' in kw:
8536+ kw['type'] = 'cprogram'
8537+
8538+ assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
8539+
8540+
8541+ #if kw['type'] != 'program' and kw.get('execute', 0):
8542+ # raise ValueError, 'can only execute programs'
8543+
8544+ def to_header(dct):
8545+ if 'header_name' in dct:
8546+ dct = Utils.to_list(dct['header_name'])
8547+ return ''.join(['#include <%s>\n' % x for x in dct])
8548+ return ''
8549+
8550+ # set the file name
8551+ if not 'compile_mode' in kw:
8552+ kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
8553+
8554+ if not 'compile_filename' in kw:
8555+ kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
8556+
8557+ #OSX
8558+ if 'framework_name' in kw:
8559+ try: TaskGen.task_gen.create_task_macapp
8560+ except AttributeError: self.fatal('frameworks require the osx tool')
8561+
8562+ fwkname = kw['framework_name']
8563+ if not 'uselib_store' in kw:
8564+ kw['uselib_store'] = fwkname.upper()
8565+
8566+ if not kw.get('no_header', False):
8567+ if not 'header_name' in kw:
8568+ kw['header_name'] = []
8569+ fwk = '%s/%s.h' % (fwkname, fwkname)
8570+ if kw.get('remove_dot_h', None):
8571+ fwk = fwk[:-2]
8572+ kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
8573+
8574+ kw['msg'] = 'Checking for framework %s' % fwkname
8575+ kw['framework'] = fwkname
8576+ #kw['frameworkpath'] = set it yourself
8577+
8578+ if 'function_name' in kw:
8579+ fu = kw['function_name']
8580+ if not 'msg' in kw:
8581+ kw['msg'] = 'Checking for function %s' % fu
8582+ kw['code'] = to_header(kw) + SNIP1 % fu
8583+ if not 'uselib_store' in kw:
8584+ kw['uselib_store'] = fu.upper()
8585+ if not 'define_name' in kw:
8586+ kw['define_name'] = self.have_define(fu)
8587+
8588+ elif 'type_name' in kw:
8589+ tu = kw['type_name']
8590+ if not 'msg' in kw:
8591+ kw['msg'] = 'Checking for type %s' % tu
8592+ if not 'header_name' in kw:
8593+ kw['header_name'] = 'stdint.h'
8594+ kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
8595+ if not 'define_name' in kw:
8596+ kw['define_name'] = self.have_define(tu.upper())
8597+
8598+ elif 'header_name' in kw:
8599+ if not 'msg' in kw:
8600+ kw['msg'] = 'Checking for header %s' % kw['header_name']
8601+
8602+ l = Utils.to_list(kw['header_name'])
8603+ assert len(l)>0, 'list of headers in header_name is empty'
8604+
8605+ kw['code'] = to_header(kw) + SNIP3
8606+
8607+ if not 'uselib_store' in kw:
8608+ kw['uselib_store'] = l[0].upper()
8609+
8610+ if not 'define_name' in kw:
8611+ kw['define_name'] = self.have_define(l[0])
8612+
8613+ if 'lib' in kw:
8614+ if not 'msg' in kw:
8615+ kw['msg'] = 'Checking for library %s' % kw['lib']
8616+ if not 'uselib_store' in kw:
8617+ kw['uselib_store'] = kw['lib'].upper()
8618+
8619+ if 'staticlib' in kw:
8620+ if not 'msg' in kw:
8621+ kw['msg'] = 'Checking for static library %s' % kw['staticlib']
8622+ if not 'uselib_store' in kw:
8623+ kw['uselib_store'] = kw['staticlib'].upper()
8624+
8625+ if 'fragment' in kw:
8626+ # an additional code fragment may be provided to replace the predefined code
8627+ # in custom headers
8628+ kw['code'] = kw['fragment']
8629+ if not 'msg' in kw:
8630+ kw['msg'] = 'Checking for custom code'
8631+ if not 'errmsg' in kw:
8632+ kw['errmsg'] = 'no'
8633+
8634+ for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
8635+ if flagsname in kw:
8636+ if not 'msg' in kw:
8637+ kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
8638+ if not 'errmsg' in kw:
8639+ kw['errmsg'] = 'no'
8640+
8641+ if not 'execute' in kw:
8642+ kw['execute'] = False
8643+
8644+ if not 'errmsg' in kw:
8645+ kw['errmsg'] = 'not found'
8646+
8647+ if not 'okmsg' in kw:
8648+ kw['okmsg'] = 'yes'
8649+
8650+ if not 'code' in kw:
8651+ kw['code'] = SNIP3
8652+
8653+ if not kw.get('success'): kw['success'] = None
8654+
8655+ assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
8656+
8657+@conf
8658+def post_check(self, *k, **kw):
8659+ "set the variables after a test was run successfully"
8660+
8661+ is_success = False
8662+ if kw['execute']:
8663+ if kw['success'] is not None:
8664+ is_success = True
8665+ else:
8666+ is_success = (kw['success'] == 0)
8667+
8668+ if 'define_name' in kw:
8669+ if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
8670+ if kw['execute']:
8671+ key = kw['success']
8672+ if isinstance(key, str):
8673+ if key:
8674+ self.define(kw['define_name'], key, quote=kw.get('quote', 1))
8675+ else:
8676+ self.define_cond(kw['define_name'], True)
8677+ else:
8678+ self.define_cond(kw['define_name'], False)
8679+ else:
8680+ self.define_cond(kw['define_name'], is_success)
8681+
8682+ if is_success and 'uselib_store' in kw:
8683+ import cc, cxx
8684+ for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
8685+ lk = k.lower()
8686+ # inconsistency: includes -> CPPPATH
8687+ if k == 'CPPPATH': lk = 'includes'
8688+ if k == 'CXXDEFINES': lk = 'defines'
8689+ if k == 'CCDEFINES': lk = 'defines'
8690+ if lk in kw:
8691+ val = kw[lk]
8692+ # remove trailing slash
8693+ if isinstance(val, str):
8694+ val = val.rstrip(os.path.sep)
8695+ self.env.append_unique(k + '_' + kw['uselib_store'], val)
8696+
8697+@conf
8698+def check(self, *k, **kw):
8699+ # so this will be the generic function
8700+ # it will be safer to use check_cxx or check_cc
8701+ self.validate_c(kw)
8702+ self.check_message_1(kw['msg'])
8703+ ret = None
8704+ try:
8705+ ret = self.run_c_code(*k, **kw)
8706+ except Configure.ConfigurationError, e:
8707+ self.check_message_2(kw['errmsg'], 'YELLOW')
8708+ if 'mandatory' in kw and kw['mandatory']:
8709+ if Logs.verbose > 1:
8710+ raise
8711+ else:
8712+ self.fatal('the configuration failed (see %r)' % self.log.name)
8713+ else:
8714+ kw['success'] = ret
8715+ self.check_message_2(self.ret_msg(kw['okmsg'], kw))
8716+
8717+ self.post_check(*k, **kw)
8718+ if not kw.get('execute', False):
8719+ return ret == 0
8720+ return ret
8721+
8722+@conf
8723+def run_c_code(self, *k, **kw):
8724+ test_f_name = kw['compile_filename']
8725+
8726+ k = 0
8727+ while k < 10000:
8728+ # make certain to use a fresh folder - necessary for win32
8729+ dir = os.path.join(self.blddir, '.conf_check_%d' % k)
8730+
8731+ # if the folder already exists, remove it
8732+ try:
8733+ shutil.rmtree(dir)
8734+ except OSError:
8735+ pass
8736+
8737+ try:
8738+ os.stat(dir)
8739+ except OSError:
8740+ break
8741+
8742+ k += 1
8743+
8744+ try:
8745+ os.makedirs(dir)
8746+ except:
8747+ self.fatal('cannot create a configuration test folder %r' % dir)
8748+
8749+ try:
8750+ os.stat(dir)
8751+ except:
8752+ self.fatal('cannot use the configuration test folder %r' % dir)
8753+
8754+ bdir = os.path.join(dir, 'testbuild')
8755+
8756+ if not os.path.exists(bdir):
8757+ os.makedirs(bdir)
8758+
8759+ env = kw['env']
8760+
8761+ dest = open(os.path.join(dir, test_f_name), 'w')
8762+ dest.write(kw['code'])
8763+ dest.close()
8764+
8765+ back = os.path.abspath('.')
8766+
8767+ bld = Build.BuildContext()
8768+ bld.log = self.log
8769+ bld.all_envs.update(self.all_envs)
8770+ bld.all_envs['default'] = env
8771+ bld.lst_variants = bld.all_envs.keys()
8772+ bld.load_dirs(dir, bdir)
8773+
8774+ os.chdir(dir)
8775+
8776+ bld.rescan(bld.srcnode)
8777+
8778+ if not 'features' in kw:
8779+ # conf.check(features='cc cprogram pyext', ...)
8780+ kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
8781+
8782+ o = bld(features=kw['features'], source=test_f_name, target='testprog')
8783+
8784+ for k, v in kw.iteritems():
8785+ setattr(o, k, v)
8786+
8787+ self.log.write("==>\n%s\n<==\n" % kw['code'])
8788+
8789+ # compile the program
8790+ try:
8791+ bld.compile()
8792+ except Utils.WafError:
8793+ ret = Utils.ex_stack()
8794+ else:
8795+ ret = 0
8796+
8797+ # chdir before returning
8798+ os.chdir(back)
8799+
8800+ if ret:
8801+ self.log.write('command returned %r' % ret)
8802+ self.fatal(str(ret))
8803+
8804+ # if we need to run the program, try to get its result
8805+ # keep the name of the program to execute
8806+ if kw['execute']:
8807+ lastprog = o.link_task.outputs[0].abspath(env)
8808+
8809+ args = Utils.to_list(kw.get('exec_args', []))
8810+ proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
8811+ (out, err) = proc.communicate()
8812+ w = self.log.write
8813+ w(str(out))
8814+ w('\n')
8815+ w(str(err))
8816+ w('\n')
8817+ w('returncode %r' % proc.returncode)
8818+ w('\n')
8819+ if proc.returncode:
8820+ self.fatal(Utils.ex_stack())
8821+ ret = out
8822+
8823+ return ret
8824+
8825+@conf
8826+def check_cxx(self, *k, **kw):
8827+ kw['compiler'] = 'cxx'
8828+ return self.check(*k, **kw)
8829+
8830+@conf
8831+def check_cc(self, *k, **kw):
8832+ kw['compiler'] = 'cc'
8833+ return self.check(*k, **kw)
8834+
8835+@conf
8836+def define(self, define, value, quote=1):
8837+ """store a single define and its state into an internal list for later
8838+ writing to a config header file. Value can only be
8839+ a string or int; other types not supported. String
8840+ values will appear properly quoted in the generated
8841+ header file."""
8842+ assert define and isinstance(define, str)
8843+
8844+ # ordered_dict is for writing the configuration header in order
8845+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8846+
8847+ # the user forgot to tell if the value is quoted or not
8848+ if isinstance(value, str):
8849+ if quote:
8850+ tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
8851+ else:
8852+ tbl[define] = value
8853+ elif isinstance(value, int):
8854+ tbl[define] = value
8855+ else:
8856+ raise TypeError('define %r -> %r must be a string or an int' % (define, value))
8857+
8858+ # add later to make reconfiguring faster
8859+ self.env[DEFINES] = tbl
8860+ self.env[define] = value # <- not certain this is necessary
8861+
8862+@conf
8863+def undefine(self, define):
8864+ """store a single define and its state into an internal list
8865+ for later writing to a config header file"""
8866+ assert define and isinstance(define, str)
8867+
8868+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8869+
8870+ value = UNDEFINED
8871+ tbl[define] = value
8872+
8873+ # add later to make reconfiguring faster
8874+ self.env[DEFINES] = tbl
8875+ self.env[define] = value
8876+
8877+@conf
8878+def define_cond(self, name, value):
8879+ """Conditionally define a name.
8880+ Formally equivalent to: if value: define(name, 1) else: undefine(name)"""
8881+ if value:
8882+ self.define(name, 1)
8883+ else:
8884+ self.undefine(name)
8885+
8886+@conf
8887+def is_defined(self, key):
8888+ defines = self.env[DEFINES]
8889+ if not defines:
8890+ return False
8891+ try:
8892+ value = defines[key]
8893+ except KeyError:
8894+ return False
8895+ else:
8896+ return value != UNDEFINED
8897+
8898+@conf
8899+def get_define(self, define):
8900+ "get the value of a previously stored define"
8901+ try: return self.env[DEFINES][define]
8902+ except KeyError: return None
8903+
8904+@conf
8905+def have_define(self, name):
8906+ "prefix the define with 'HAVE_' and make sure it has valid characters."
8907+ return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name)
8908+
8909+@conf
8910+def write_config_header(self, configfile='', env='', guard='', top=False):
8911+ "save the defines into a file"
8912+ if not configfile: configfile = WAF_CONFIG_H
8913+ waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
8914+
8915+ # configfile -> absolute path
8916+ # there is a good reason to concatenate first and to split afterwards
8917+ if not env: env = self.env
8918+ if top:
8919+ diff = ''
8920+ else:
8921+ diff = Utils.diff_path(self.srcdir, self.curdir)
8922+ full = os.sep.join([self.blddir, env.variant(), diff, configfile])
8923+ full = os.path.normpath(full)
8924+ (dir, base) = os.path.split(full)
8925+
8926+ try: os.makedirs(dir)
8927+ except: pass
8928+
8929+ dest = open(full, 'w')
8930+ dest.write('/* Configuration header created by Waf - do not edit */\n')
8931+ dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard))
8932+
8933+ dest.write(self.get_config_header())
8934+
8935+ # config files are not removed on "waf clean"
8936+ env.append_unique(CFG_FILES, os.path.join(diff, configfile))
8937+
8938+ dest.write('\n#endif /* %s */\n' % waf_guard)
8939+ dest.close()
8940+
8941+@conf
8942+def get_config_header(self):
8943+ """Fill-in the contents of the config header. Override when you need to write your own config header."""
8944+ config_header = []
8945+
8946+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8947+ for key in tbl.allkeys:
8948+ value = tbl[key]
8949+ if value is None:
8950+ config_header.append('#define %s' % key)
8951+ elif value is UNDEFINED:
8952+ config_header.append('/* #undef %s */' % key)
8953+ else:
8954+ config_header.append('#define %s %s' % (key, value))
8955+ return "\n".join(config_header)
8956+
8957+@conftest
8958+def find_cpp(conf):
8959+ v = conf.env
8960+ cpp = []
8961+ if v['CPP']: cpp = v['CPP']
8962+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
8963+ if not cpp: cpp = conf.find_program('cpp', var='CPP')
8964+ #if not cpp: cpp = v['CC']
8965+ #if not cpp: cpp = v['CXX']
8966+ v['CPP'] = cpp
8967+
8968+@conftest
8969+def cc_add_flags(conf):
8970+ conf.add_os_flags('CFLAGS', 'CCFLAGS')
8971+ conf.add_os_flags('CPPFLAGS')
8972+
8973+@conftest
8974+def cxx_add_flags(conf):
8975+ conf.add_os_flags('CXXFLAGS')
8976+ conf.add_os_flags('CPPFLAGS')
8977+
8978+@conftest
8979+def link_add_flags(conf):
8980+ conf.add_os_flags('LINKFLAGS')
8981+ conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
8982+
8983+@conftest
8984+def cc_load_tools(conf):
8985+ conf.check_tool('cc')
8986+
8987+@conftest
8988+def cxx_load_tools(conf):
8989+ conf.check_tool('cxx')
8990+
8991diff --git a/buildtools/wafadmin/Tools/cs.py b/buildtools/wafadmin/Tools/cs.py
8992new file mode 100644
8993index 0000000..4354485
8994--- /dev/null
8995+++ b/buildtools/wafadmin/Tools/cs.py
8996@@ -0,0 +1,68 @@
8997+#!/usr/bin/env python
8998+# encoding: utf-8
8999+# Thomas Nagy, 2006 (ita)
9000+
9001+"C# support"
9002+
9003+import TaskGen, Utils, Task, Options
9004+from Logs import error
9005+from TaskGen import before, after, taskgen, feature
9006+
9007+flag_vars= ['FLAGS', 'ASSEMBLIES']
9008+
9009+@feature('cs')
9010+def init_cs(self):
9011+ Utils.def_attrs(self,
9012+ flags = '',
9013+ assemblies = '',
9014+ resources = '',
9015+ uselib = '')
9016+
9017+@feature('cs')
9018+@after('init_cs')
9019+def apply_uselib_cs(self):
9020+ if not self.uselib:
9021+ return
9022+ global flag_vars
9023+ for var in self.to_list(self.uselib):
9024+ for v in self.flag_vars:
9025+ val = self.env[v+'_'+var]
9026+ if val: self.env.append_value(v, val)
9027+
9028+@feature('cs')
9029+@after('apply_uselib_cs')
9030+@before('apply_core')
9031+def apply_cs(self):
9032+ try: self.meths.remove('apply_core')
9033+ except ValueError: pass
9034+
9035+ # process the flags for the assemblies
9036+ for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']:
9037+ self.env.append_unique('_ASSEMBLIES', '/r:'+i)
9038+
9039+ # process the flags for the resources
9040+ for i in self.to_list(self.resources):
9041+ self.env.append_unique('_RESOURCES', '/resource:'+i)
9042+
9043+ # what kind of assembly are we generating?
9044+ self.env['_TYPE'] = getattr(self, 'type', 'exe')
9045+
9046+ # additional flags
9047+ self.env.append_unique('_FLAGS', self.to_list(self.flags))
9048+ self.env.append_unique('_FLAGS', self.env.FLAGS)
9049+
9050+ # process the sources
9051+ nodes = [self.path.find_resource(i) for i in self.to_list(self.source)]
9052+ self.create_task('mcs', nodes, self.path.find_or_declare(self.target))
9053+
9054+Task.simple_task_type('mcs', '${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW')
9055+
9056+def detect(conf):
9057+ csc = getattr(Options.options, 'cscbinary', None)
9058+ if csc:
9059+ conf.env.MCS = csc
9060+ conf.find_program(['gmcs', 'mcs'], var='MCS')
9061+
9062+def set_options(opt):
9063+ opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
9064+
9065diff --git a/buildtools/wafadmin/Tools/cxx.py b/buildtools/wafadmin/Tools/cxx.py
9066new file mode 100644
9067index 0000000..719b821
9068--- /dev/null
9069+++ b/buildtools/wafadmin/Tools/cxx.py
9070@@ -0,0 +1,104 @@
9071+#!/usr/bin/env python
9072+# encoding: utf-8
9073+# Thomas Nagy, 2005 (ita)
9074+
9075+"Base for c++ programs and libraries"
9076+
9077+import TaskGen, Task, Utils
9078+from Logs import debug
9079+import ccroot # <- do not remove
9080+from TaskGen import feature, before, extension, after
9081+
9082+g_cxx_flag_vars = [
9083+'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
9084+'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
9085+'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES']
9086+"main cpp variables"
9087+
9088+EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++']
9089+
9090+g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS']
9091+
9092+# TODO remove in waf 1.6
9093+class cxx_taskgen(ccroot.ccroot_abstract):
9094+ pass
9095+
9096+@feature('cxx')
9097+@before('apply_type_vars')
9098+@after('default_cc')
9099+def init_cxx(self):
9100+ if not 'cc' in self.features:
9101+ self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx']
9102+
9103+ self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars)
9104+ self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars)
9105+
9106+ if not self.env['CXX_NAME']:
9107+ raise Utils.WafError("At least one compiler (g++, ..) must be selected")
9108+
9109+@feature('cxx')
9110+@after('apply_incpaths')
9111+def apply_obj_vars_cxx(self):
9112+ """after apply_incpaths for INC_PATHS"""
9113+ env = self.env
9114+ app = env.append_unique
9115+ cxxpath_st = env['CPPPATH_ST']
9116+
9117+ # local flags come first
9118+ # set the user-defined includes paths
9119+ for i in env['INC_PATHS']:
9120+ app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env))
9121+ app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env))
9122+
9123+ # set the library include paths
9124+ for i in env['CPPPATH']:
9125+ app('_CXXINCFLAGS', cxxpath_st % i)
9126+
9127+@feature('cxx')
9128+@after('apply_lib_vars')
9129+def apply_defines_cxx(self):
9130+ """after uselib is set for CXXDEFINES"""
9131+ self.defines = getattr(self, 'defines', [])
9132+ lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES'])
9133+ milst = []
9134+
9135+ # now process the local defines
9136+ for defi in lst:
9137+ if not defi in milst:
9138+ milst.append(defi)
9139+
9140+ # CXXDEFINES_USELIB
9141+ libs = self.to_list(self.uselib)
9142+ for l in libs:
9143+ val = self.env['CXXDEFINES_'+l]
9144+ if val: milst += self.to_list(val)
9145+
9146+ self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
9147+ y = self.env['CXXDEFINES_ST']
9148+ self.env.append_unique('_CXXDEFFLAGS', [y%x for x in milst])
9149+
9150+@extension(EXT_CXX)
9151+def cxx_hook(self, node):
9152+ # create the compilation task: cpp or cc
9153+ if getattr(self, 'obj_ext', None):
9154+ obj_ext = self.obj_ext
9155+ else:
9156+ obj_ext = '_%d.o' % self.idx
9157+
9158+ task = self.create_task('cxx', node, node.change_ext(obj_ext))
9159+ try:
9160+ self.compiled_tasks.append(task)
9161+ except AttributeError:
9162+ raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self))
9163+ return task
9164+
9165+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
9166+cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False)
9167+cls.scan = ccroot.scan
9168+cls.vars.append('CXXDEPS')
9169+
9170+link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
9171+cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
9172+cls.maxjobs = 1
9173+cls.install = Utils.nada
9174+
9175diff --git a/buildtools/wafadmin/Tools/d.py b/buildtools/wafadmin/Tools/d.py
9176new file mode 100644
9177index 0000000..1a22821
9178--- /dev/null
9179+++ b/buildtools/wafadmin/Tools/d.py
9180@@ -0,0 +1,535 @@
9181+#!/usr/bin/env python
9182+# encoding: utf-8
9183+# Carlos Rafael Giani, 2007 (dv)
9184+# Thomas Nagy, 2007-2008 (ita)
9185+
9186+import os, sys, re, optparse
9187+import ccroot # <- leave this
9188+import TaskGen, Utils, Task, Configure, Logs, Build
9189+from Logs import debug, error
9190+from TaskGen import taskgen, feature, after, before, extension
9191+from Configure import conftest
9192+
9193+EXT_D = ['.d', '.di', '.D']
9194+D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods
9195+
9196+DLIB = """
9197+version(D_Version2) {
9198+ import std.stdio;
9199+ int main() {
9200+ writefln("phobos2");
9201+ return 0;
9202+ }
9203+} else {
9204+ version(Tango) {
9205+ import tango.stdc.stdio;
9206+ int main() {
9207+ printf("tango");
9208+ return 0;
9209+ }
9210+ } else {
9211+ import std.stdio;
9212+ int main() {
9213+ writefln("phobos1");
9214+ return 0;
9215+ }
9216+ }
9217+}
9218+"""
9219+
9220+def filter_comments(filename):
9221+ txt = Utils.readf(filename)
9222+ i = 0
9223+ buf = []
9224+ max = len(txt)
9225+ begin = 0
9226+ while i < max:
9227+ c = txt[i]
9228+ if c == '"' or c == "'": # skip a string or character literal
9229+ buf.append(txt[begin:i])
9230+ delim = c
9231+ i += 1
9232+ while i < max:
9233+ c = txt[i]
9234+ if c == delim: break
9235+ elif c == '\\': # skip the character following backslash
9236+ i += 1
9237+ i += 1
9238+ i += 1
9239+ begin = i
9240+ elif c == '/': # try to replace a comment with whitespace
9241+ buf.append(txt[begin:i])
9242+ i += 1
9243+ if i == max: break
9244+ c = txt[i]
9245+ if c == '+': # eat nesting /+ +/ comment
9246+ i += 1
9247+ nesting = 1
9248+ c = None
9249+ while i < max:
9250+ prev = c
9251+ c = txt[i]
9252+ if prev == '/' and c == '+':
9253+ nesting += 1
9254+ c = None
9255+ elif prev == '+' and c == '/':
9256+ nesting -= 1
9257+ if nesting == 0: break
9258+ c = None
9259+ i += 1
9260+ elif c == '*': # eat /* */ comment
9261+ i += 1
9262+ c = None
9263+ while i < max:
9264+ prev = c
9265+ c = txt[i]
9266+ if prev == '*' and c == '/': break
9267+ i += 1
9268+ elif c == '/': # eat // comment
9269+ i += 1
9270+ while i < max and txt[i] != '\n':
9271+ i += 1
9272+ else: # no comment
9273+ begin = i - 1
9274+ continue
9275+ i += 1
9276+ begin = i
9277+ buf.append(' ')
9278+ else:
9279+ i += 1
9280+ buf.append(txt[begin:])
9281+ return buf
9282+
9283+class d_parser(object):
9284+ def __init__(self, env, incpaths):
9285+ #self.code = ''
9286+ #self.module = ''
9287+ #self.imports = []
9288+
9289+ self.allnames = []
9290+
9291+ self.re_module = re.compile("module\s+([^;]+)")
9292+ self.re_import = re.compile("import\s+([^;]+)")
9293+ self.re_import_bindings = re.compile("([^:]+):(.*)")
9294+ self.re_import_alias = re.compile("[^=]+=(.+)")
9295+
9296+ self.env = env
9297+
9298+ self.nodes = []
9299+ self.names = []
9300+
9301+ self.incpaths = incpaths
9302+
9303+ def tryfind(self, filename):
9304+ found = 0
9305+ for n in self.incpaths:
9306+ found = n.find_resource(filename.replace('.', '/') + '.d')
9307+ if found:
9308+ self.nodes.append(found)
9309+ self.waiting.append(found)
9310+ break
9311+ if not found:
9312+ if not filename in self.names:
9313+ self.names.append(filename)
9314+
9315+ def get_strings(self, code):
9316+ #self.imports = []
9317+ self.module = ''
9318+ lst = []
9319+
9320+ # get the module name (if present)
9321+
9322+ mod_name = self.re_module.search(code)
9323+ if mod_name:
9324+ self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
9325+
9326+ # go through the code, have a look at all import occurrences
9327+
9328+ # first, lets look at anything beginning with "import" and ending with ";"
9329+ import_iterator = self.re_import.finditer(code)
9330+ if import_iterator:
9331+ for import_match in import_iterator:
9332+ import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
9333+
9334+ # does this end with an import bindings declaration?
9335+ # (import bindings always terminate the list of imports)
9336+ bindings_match = self.re_import_bindings.match(import_match_str)
9337+ if bindings_match:
9338+ import_match_str = bindings_match.group(1)
9339+ # if so, extract the part before the ":" (since the module declaration(s) is/are located there)
9340+
9341+ # split the matching string into a bunch of strings, separated by a comma
9342+ matches = import_match_str.split(',')
9343+
9344+ for match in matches:
9345+ alias_match = self.re_import_alias.match(match)
9346+ if alias_match:
9347+ # is this an alias declaration? (alias = module name) if so, extract the module name
9348+ match = alias_match.group(1)
9349+
9350+ lst.append(match)
9351+ return lst
9352+
9353+ def start(self, node):
9354+ self.waiting = [node]
9355+ # while the stack is not empty, add the dependencies
9356+ while self.waiting:
9357+ nd = self.waiting.pop(0)
9358+ self.iter(nd)
9359+
9360+ def iter(self, node):
9361+ path = node.abspath(self.env) # obtain the absolute path
9362+ code = "".join(filter_comments(path)) # read the file and filter the comments
9363+ names = self.get_strings(code) # obtain the import strings
9364+ for x in names:
9365+ # optimization
9366+ if x in self.allnames: continue
9367+ self.allnames.append(x)
9368+
9369+ # for each name, see if it is like a node or not
9370+ self.tryfind(x)
9371+
9372+def scan(self):
9373+ "look for .d/.di the .d source need"
9374+ env = self.env
9375+ gruik = d_parser(env, env['INC_PATHS'])
9376+ gruik.start(self.inputs[0])
9377+
9378+ if Logs.verbose:
9379+ debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names)))
9380+ #debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps')
9381+ return (gruik.nodes, gruik.names)
9382+
9383+def get_target_name(self):
9384+ "for d programs and libs"
9385+ v = self.env
9386+ tp = 'program'
9387+ for x in self.features:
9388+ if x in ['dshlib', 'dstaticlib']:
9389+ tp = x.lstrip('d')
9390+ return v['D_%s_PATTERN' % tp] % self.target
9391+
9392+d_params = {
9393+'dflags': '',
9394+'importpaths':'',
9395+'libs':'',
9396+'libpaths':'',
9397+'generate_headers':False,
9398+}
9399+
9400+@feature('d')
9401+@before('apply_type_vars')
9402+def init_d(self):
9403+ for x in d_params:
9404+ setattr(self, x, getattr(self, x, d_params[x]))
9405+
9406+class d_taskgen(TaskGen.task_gen):
9407+ def __init__(self, *k, **kw):
9408+ TaskGen.task_gen.__init__(self, *k, **kw)
9409+
9410+ # COMPAT
9411+ if len(k) > 1:
9412+ self.features.append('d' + k[1])
9413+
9414+# okay, we borrow a few methods from ccroot
9415+TaskGen.bind_feature('d', D_METHS)
9416+
9417+@feature('d')
9418+@before('apply_d_libs')
9419+def init_d(self):
9420+ Utils.def_attrs(self,
9421+ dflags='',
9422+ importpaths='',
9423+ libs='',
9424+ libpaths='',
9425+ uselib='',
9426+ uselib_local='',
9427+ generate_headers=False, # set to true if you want .di files as well as .o
9428+ compiled_tasks=[],
9429+ add_objects=[],
9430+ link_task=None)
9431+
9432+@feature('d')
9433+@after('apply_d_link', 'init_d')
9434+@before('apply_vnum', 'apply_d_vars')
9435+def apply_d_libs(self):
9436+ """after apply_link because of 'link_task'
9437+ after default_cc because of the attribute 'uselib'"""
9438+ env = self.env
9439+
9440+ # 1. the case of the libs defined in the project (visit ancestors first)
9441+ # the ancestors external libraries (uselib) will be prepended
9442+ self.uselib = self.to_list(self.uselib)
9443+ names = self.to_list(self.uselib_local)
9444+
9445+ seen = set([])
9446+ tmp = Utils.deque(names) # consume a copy of the list of names
9447+ while tmp:
9448+ lib_name = tmp.popleft()
9449+ # visit dependencies only once
9450+ if lib_name in seen:
9451+ continue
9452+
9453+ y = self.name_to_obj(lib_name)
9454+ if not y:
9455+ raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
9456+ y.post()
9457+ seen.add(lib_name)
9458+
9459+ # object has ancestors to process (shared libraries): add them to the end of the list
9460+ if getattr(y, 'uselib_local', None):
9461+ lst = y.to_list(y.uselib_local)
9462+ if 'dshlib' in y.features or 'dprogram' in y.features:
9463+ lst = [x for x in lst if not 'dstaticlib' in self.name_to_obj(x).features]
9464+ tmp.extend(lst)
9465+
9466+ # link task and flags
9467+ if getattr(y, 'link_task', None):
9468+
9469+ link_name = y.target[y.target.rfind(os.sep) + 1:]
9470+ if 'dstaticlib' in y.features or 'dshlib' in y.features:
9471+ env.append_unique('DLINKFLAGS', env.DLIB_ST % link_name)
9472+ env.append_unique('DLINKFLAGS', env.DLIBPATH_ST % y.link_task.outputs[0].parent.bldpath(env))
9473+
9474+ # the order
9475+ self.link_task.set_run_after(y.link_task)
9476+
9477+ # for the recompilation
9478+ dep_nodes = getattr(self.link_task, 'dep_nodes', [])
9479+ self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
9480+
9481+ # add ancestors uselib too - but only propagate those that have no staticlib
9482+ for v in self.to_list(y.uselib):
9483+ if not v in self.uselib:
9484+ self.uselib.insert(0, v)
9485+
9486+ # if the library task generator provides 'export_incdirs', add to the include path
9487+ # the export_incdirs must be a list of paths relative to the other library
9488+ if getattr(y, 'export_incdirs', None):
9489+ for x in self.to_list(y.export_incdirs):
9490+ node = y.path.find_dir(x)
9491+ if not node:
9492+ raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
9493+ self.env.append_unique('INC_PATHS', node)
9494+
9495+@feature('dprogram', 'dshlib', 'dstaticlib')
9496+@after('apply_core')
9497+def apply_d_link(self):
9498+ link = getattr(self, 'link', None)
9499+ if not link:
9500+ if 'dstaticlib' in self.features: link = 'static_link'
9501+ else: link = 'd_link'
9502+
9503+ outputs = [t.outputs[0] for t in self.compiled_tasks]
9504+ self.link_task = self.create_task(link, outputs, self.path.find_or_declare(get_target_name(self)))
9505+
9506+@feature('d')
9507+@after('apply_core')
9508+def apply_d_vars(self):
9509+ env = self.env
9510+ dpath_st = env['DPATH_ST']
9511+ lib_st = env['DLIB_ST']
9512+ libpath_st = env['DLIBPATH_ST']
9513+
9514+ importpaths = self.to_list(self.importpaths)
9515+ libpaths = []
9516+ libs = []
9517+ uselib = self.to_list(self.uselib)
9518+
9519+ for i in uselib:
9520+ if env['DFLAGS_' + i]:
9521+ env.append_unique('DFLAGS', env['DFLAGS_' + i])
9522+
9523+ for x in self.features:
9524+ if not x in ['dprogram', 'dstaticlib', 'dshlib']:
9525+ continue
9526+ x.lstrip('d')
9527+ d_shlib_dflags = env['D_' + x + '_DFLAGS']
9528+ if d_shlib_dflags:
9529+ env.append_unique('DFLAGS', d_shlib_dflags)
9530+
9531+ # add import paths
9532+ for i in uselib:
9533+ if env['DPATH_' + i]:
9534+ for entry in self.to_list(env['DPATH_' + i]):
9535+ if not entry in importpaths:
9536+ importpaths.append(entry)
9537+
9538+ # now process the import paths
9539+ for path in importpaths:
9540+ if os.path.isabs(path):
9541+ env.append_unique('_DIMPORTFLAGS', dpath_st % path)
9542+ else:
9543+ node = self.path.find_dir(path)
9544+ self.env.append_unique('INC_PATHS', node)
9545+ env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env))
9546+ env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env))
9547+
9548+ # add library paths
9549+ for i in uselib:
9550+ if env['LIBPATH_' + i]:
9551+ for entry in self.to_list(env['LIBPATH_' + i]):
9552+ if not entry in libpaths:
9553+ libpaths.append(entry)
9554+ libpaths = self.to_list(self.libpaths) + libpaths
9555+
9556+ # now process the library paths
9557+ # apply same path manipulation as used with import paths
9558+ for path in libpaths:
9559+ if not os.path.isabs(path):
9560+ node = self.path.find_resource(path)
9561+ if not node:
9562+ raise Utils.WafError('could not find libpath %r from %r' % (path, self))
9563+ path = node.abspath(self.env)
9564+
9565+ env.append_unique('DLINKFLAGS', libpath_st % path)
9566+
9567+ # add libraries
9568+ for i in uselib:
9569+ if env['LIB_' + i]:
9570+ for entry in self.to_list(env['LIB_' + i]):
9571+ if not entry in libs:
9572+ libs.append(entry)
9573+ libs.extend(self.to_list(self.libs))
9574+
9575+ # process user flags
9576+ for flag in self.to_list(self.dflags):
9577+ env.append_unique('DFLAGS', flag)
9578+
9579+ # now process the libraries
9580+ for lib in libs:
9581+ env.append_unique('DLINKFLAGS', lib_st % lib)
9582+
9583+ # add linker flags
9584+ for i in uselib:
9585+ dlinkflags = env['DLINKFLAGS_' + i]
9586+ if dlinkflags:
9587+ for linkflag in dlinkflags:
9588+ env.append_unique('DLINKFLAGS', linkflag)
9589+
9590+@feature('dshlib')
9591+@after('apply_d_vars')
9592+def add_shlib_d_flags(self):
9593+ for linkflag in self.env['D_shlib_LINKFLAGS']:
9594+ self.env.append_unique('DLINKFLAGS', linkflag)
9595+
9596+@extension(EXT_D)
9597+def d_hook(self, node):
9598+ # create the compilation task: cpp or cc
9599+ task = self.create_task(self.generate_headers and 'd_with_header' or 'd')
9600+ try: obj_ext = self.obj_ext
9601+ except AttributeError: obj_ext = '_%d.o' % self.idx
9602+
9603+ task.inputs = [node]
9604+ task.outputs = [node.change_ext(obj_ext)]
9605+ self.compiled_tasks.append(task)
9606+
9607+ if self.generate_headers:
9608+ header_node = node.change_ext(self.env['DHEADER_ext'])
9609+ task.outputs += [header_node]
9610+
9611+d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
9612+d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
9613+${D_HDR_F}${TGT[1].bldpath(env)} \
9614+${D_SRC_F}${SRC} \
9615+${D_TGT_F}${TGT[0].bldpath(env)}'
9616+link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
9617+
9618+def override_exec(cls):
9619+ """stupid dmd wants -of stuck to the file name"""
9620+ old_exec = cls.exec_command
9621+ def exec_command(self, *k, **kw):
9622+ if isinstance(k[0], list):
9623+ lst = k[0]
9624+ for i in xrange(len(lst)):
9625+ if lst[i] == '-of':
9626+ del lst[i]
9627+ lst[i] = '-of' + lst[i]
9628+ break
9629+ return old_exec(self, *k, **kw)
9630+ cls.exec_command = exec_command
9631+
9632+cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False)
9633+cls.scan = scan
9634+override_exec(cls)
9635+
9636+cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False)
9637+override_exec(cls)
9638+
9639+cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False)
9640+override_exec(cls)
9641+
9642+# for feature request #104
9643+@taskgen
9644+def generate_header(self, filename, install_path):
9645+ if not hasattr(self, 'header_lst'): self.header_lst = []
9646+ self.meths.append('process_header')
9647+ self.header_lst.append([filename, install_path])
9648+
9649+@before('apply_core')
9650+def process_header(self):
9651+ env = self.env
9652+ for i in getattr(self, 'header_lst', []):
9653+ node = self.path.find_resource(i[0])
9654+
9655+ if not node:
9656+ raise Utils.WafError('file not found on d obj '+i[0])
9657+
9658+ task = self.create_task('d_header')
9659+ task.set_inputs(node)
9660+ task.set_outputs(node.change_ext('.di'))
9661+
9662+d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}'
9663+Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False)
9664+
9665+@conftest
9666+def d_platform_flags(conf):
9667+ v = conf.env
9668+ binfmt = v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(
9669+ v.DEST_OS or Utils.unversioned_sys_platform())
9670+ if binfmt == 'pe':
9671+ v['D_program_PATTERN'] = '%s.exe'
9672+ v['D_shlib_PATTERN'] = 'lib%s.dll'
9673+ v['D_staticlib_PATTERN'] = 'lib%s.a'
9674+ else:
9675+ v['D_program_PATTERN'] = '%s'
9676+ v['D_shlib_PATTERN'] = 'lib%s.so'
9677+ v['D_staticlib_PATTERN'] = 'lib%s.a'
9678+
9679+@conftest
9680+def check_dlibrary(conf):
9681+ ret = conf.check_cc(features='d dprogram', fragment=DLIB, mandatory=True, compile_filename='test.d', execute=True)
9682+ conf.env.DLIBRARY = ret.strip()
9683+
9684+# quick test #
9685+if __name__ == "__main__":
9686+ #Logs.verbose = 2
9687+
9688+ try: arg = sys.argv[1]
9689+ except IndexError: arg = "file.d"
9690+
9691+ print("".join(filter_comments(arg)))
9692+ # TODO
9693+ paths = ['.']
9694+
9695+ #gruik = filter()
9696+ #gruik.start(arg)
9697+
9698+ #code = "".join(gruik.buf)
9699+
9700+ #print "we have found the following code"
9701+ #print code
9702+
9703+ #print "now parsing"
9704+ #print "-------------------------------------------"
9705+ """
9706+ parser_ = d_parser()
9707+ parser_.start(arg)
9708+
9709+ print "module: %s" % parser_.module
9710+ print "imports: ",
9711+ for imp in parser_.imports:
9712+ print imp + " ",
9713+ print
9714+"""
9715+
9716diff --git a/buildtools/wafadmin/Tools/dbus.py b/buildtools/wafadmin/Tools/dbus.py
9717new file mode 100644
9718index 0000000..3179999
9719--- /dev/null
9720+++ b/buildtools/wafadmin/Tools/dbus.py
9721@@ -0,0 +1,34 @@
9722+#!/usr/bin/env python
9723+# encoding: utf-8
9724+# Ali Sabil, 2007
9725+
9726+import Task, Utils
9727+from TaskGen import taskgen, before, after, feature
9728+
9729+@taskgen
9730+def add_dbus_file(self, filename, prefix, mode):
9731+ if not hasattr(self, 'dbus_lst'):
9732+ self.dbus_lst = []
9733+ self.meths.append('process_dbus')
9734+ self.dbus_lst.append([filename, prefix, mode])
9735+
9736+@before('apply_core')
9737+def process_dbus(self):
9738+ for filename, prefix, mode in getattr(self, 'dbus_lst', []):
9739+ node = self.path.find_resource(filename)
9740+
9741+ if not node:
9742+ raise Utils.WafError('file not found ' + filename)
9743+
9744+ tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
9745+
9746+ tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
9747+ tsk.env.DBUS_BINDING_TOOL_MODE = mode
9748+
9749+Task.simple_task_type('dbus_binding_tool',
9750+ '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',
9751+ color='BLUE', before='cc')
9752+
9753+def detect(conf):
9754+ dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
9755+
9756diff --git a/buildtools/wafadmin/Tools/dmd.py b/buildtools/wafadmin/Tools/dmd.py
9757new file mode 100644
9758index 0000000..9c74908
9759--- /dev/null
9760+++ b/buildtools/wafadmin/Tools/dmd.py
9761@@ -0,0 +1,64 @@
9762+#!/usr/bin/env python
9763+# encoding: utf-8
9764+# Carlos Rafael Giani, 2007 (dv)
9765+# Thomas Nagy, 2008 (ita)
9766+
9767+import sys
9768+import Utils, ar
9769+from Configure import conftest
9770+
9771+@conftest
9772+def find_dmd(conf):
9773+ conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True)
9774+
9775+@conftest
9776+def common_flags_ldc(conf):
9777+ v = conf.env
9778+ v['DFLAGS'] = ['-d-version=Posix']
9779+ v['DLINKFLAGS'] = []
9780+ v['D_shlib_DFLAGS'] = ['-relocation-model=pic']
9781+
9782+@conftest
9783+def common_flags_dmd(conf):
9784+ v = conf.env
9785+
9786+ # _DFLAGS _DIMPORTFLAGS
9787+
9788+ # Compiler is dmd so 'gdc' part will be ignored, just
9789+ # ensure key is there, so wscript can append flags to it
9790+ v['DFLAGS'] = ['-version=Posix']
9791+
9792+ v['D_SRC_F'] = ''
9793+ v['D_TGT_F'] = ['-c', '-of']
9794+ v['DPATH_ST'] = '-I%s' # template for adding import paths
9795+
9796+ # linker
9797+ v['D_LINKER'] = v['D_COMPILER']
9798+ v['DLNK_SRC_F'] = ''
9799+ v['DLNK_TGT_F'] = '-of'
9800+
9801+ v['DLIB_ST'] = '-L-l%s' # template for adding libs
9802+ v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths
9803+
9804+ # linker debug levels
9805+ v['DFLAGS_OPTIMIZED'] = ['-O']
9806+ v['DFLAGS_DEBUG'] = ['-g', '-debug']
9807+ v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug']
9808+ v['DLINKFLAGS'] = ['-quiet']
9809+
9810+ v['D_shlib_DFLAGS'] = ['-fPIC']
9811+ v['D_shlib_LINKFLAGS'] = ['-L-shared']
9812+
9813+ v['DHEADER_ext'] = '.di'
9814+ v['D_HDR_F'] = ['-H', '-Hf']
9815+
9816+def detect(conf):
9817+ conf.find_dmd()
9818+ conf.check_tool('ar')
9819+ conf.check_tool('d')
9820+ conf.common_flags_dmd()
9821+ conf.d_platform_flags()
9822+
9823+ if conf.env.D_COMPILER.find('ldc') > -1:
9824+ conf.common_flags_ldc()
9825+
9826diff --git a/buildtools/wafadmin/Tools/flex.py b/buildtools/wafadmin/Tools/flex.py
9827new file mode 100644
9828index 0000000..5ce9f22
9829--- /dev/null
9830+++ b/buildtools/wafadmin/Tools/flex.py
9831@@ -0,0 +1,25 @@
9832+#!/usr/bin/env python
9833+# encoding: utf-8
9834+# John O'Meara, 2006
9835+# Thomas Nagy, 2006-2008
9836+
9837+"Flex processing"
9838+
9839+import TaskGen
9840+
9841+def decide_ext(self, node):
9842+ if 'cxx' in self.features: return '.lex.cc'
9843+ else: return '.lex.c'
9844+
9845+TaskGen.declare_chain(
9846+ name = 'flex',
9847+ rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
9848+ ext_in = '.l',
9849+ ext_out = '.c .cxx',
9850+ decider = decide_ext
9851+)
9852+
9853+def detect(conf):
9854+ conf.find_program('flex', var='FLEX', mandatory=True)
9855+ conf.env['FLEXFLAGS'] = ''
9856+
9857diff --git a/buildtools/wafadmin/Tools/gas.py b/buildtools/wafadmin/Tools/gas.py
9858new file mode 100644
9859index 0000000..c983b0a
9860--- /dev/null
9861+++ b/buildtools/wafadmin/Tools/gas.py
9862@@ -0,0 +1,38 @@
9863+#!/usr/bin/env python
9864+# encoding: utf-8
9865+# Thomas Nagy, 2008 (ita)
9866+
9867+"as and gas"
9868+
9869+import os, sys
9870+import Task
9871+from TaskGen import extension, taskgen, after, before
9872+
9873+EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
9874+
9875+as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
9876+Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False)
9877+
9878+@extension(EXT_ASM)
9879+def asm_hook(self, node):
9880+ # create the compilation task: cpp or cc
9881+ try: obj_ext = self.obj_ext
9882+ except AttributeError: obj_ext = '_%d.o' % self.idx
9883+
9884+ task = self.create_task('asm', node, node.change_ext(obj_ext))
9885+ self.compiled_tasks.append(task)
9886+ self.meths.append('asm_incflags')
9887+
9888+@after('apply_obj_vars_cc')
9889+@after('apply_obj_vars_cxx')
9890+@before('apply_link')
9891+def asm_incflags(self):
9892+ self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
9893+ var = ('cxx' in self.features) and 'CXX' or 'CC'
9894+ self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
9895+
9896+def detect(conf):
9897+ conf.find_program(['gas', 'as'], var='AS')
9898+ if not conf.env.AS: conf.env.AS = conf.env.CC
9899+ #conf.env.ASFLAGS = ['-c'] <- may be necesary for .S files
9900+
9901diff --git a/buildtools/wafadmin/Tools/gcc.py b/buildtools/wafadmin/Tools/gcc.py
9902new file mode 100644
9903index 0000000..420b44f
9904--- /dev/null
9905+++ b/buildtools/wafadmin/Tools/gcc.py
9906@@ -0,0 +1,135 @@
9907+#!/usr/bin/env python
9908+# encoding: utf-8
9909+# Thomas Nagy, 2006-2008 (ita)
9910+# Ralf Habacker, 2006 (rh)
9911+# Yinon Ehrlich, 2009
9912+
9913+import os, sys
9914+import Configure, Options, Utils
9915+import ccroot, ar
9916+from Configure import conftest
9917+
9918+@conftest
9919+def find_gcc(conf):
9920+ cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True)
9921+ cc = conf.cmd_to_list(cc)
9922+ ccroot.get_cc_version(conf, cc, gcc=True)
9923+ conf.env.CC_NAME = 'gcc'
9924+ conf.env.CC = cc
9925+
9926+@conftest
9927+def gcc_common_flags(conf):
9928+ v = conf.env
9929+
9930+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
9931+
9932+ v['CCFLAGS_DEBUG'] = ['-g']
9933+
9934+ v['CCFLAGS_RELEASE'] = ['-O2']
9935+
9936+ v['CC_SRC_F'] = ''
9937+ v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
9938+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
9939+
9940+ # linker
9941+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
9942+ v['CCLNK_SRC_F'] = ''
9943+ v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
9944+
9945+ v['LIB_ST'] = '-l%s' # template for adding libs
9946+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
9947+ v['STATICLIB_ST'] = '-l%s'
9948+ v['STATICLIBPATH_ST'] = '-L%s'
9949+ v['RPATH_ST'] = '-Wl,-rpath,%s'
9950+ v['CCDEFINES_ST'] = '-D%s'
9951+
9952+ v['SONAME_ST'] = '-Wl,-h,%s'
9953+ v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
9954+ v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
9955+ v['FULLSTATIC_MARKER'] = '-static'
9956+
9957+ # program
9958+ v['program_PATTERN'] = '%s'
9959+
9960+ # shared library
9961+ v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
9962+ v['shlib_LINKFLAGS'] = ['-shared']
9963+ v['shlib_PATTERN'] = 'lib%s.so'
9964+
9965+ # static lib
9966+ v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
9967+ v['staticlib_PATTERN'] = 'lib%s.a'
9968+
9969+ # osx stuff
9970+ v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
9971+ v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
9972+ v['macbundle_PATTERN'] = '%s.bundle'
9973+
9974+@conftest
9975+def gcc_modifier_win32(conf):
9976+ v = conf.env
9977+ v['program_PATTERN'] = '%s.exe'
9978+
9979+ v['shlib_PATTERN'] = '%s.dll'
9980+ v['implib_PATTERN'] = 'lib%s.dll.a'
9981+ v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
9982+
9983+ dest_arch = v['DEST_CPU']
9984+ v['shlib_CCFLAGS'] = ['-DPIC']
9985+
9986+ v.append_value('shlib_CCFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
9987+
9988+ # Auto-import is enabled by default even without this option,
9989+ # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
9990+ # that the linker emits otherwise.
9991+ v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
9992+
9993+@conftest
9994+def gcc_modifier_cygwin(conf):
9995+ gcc_modifier_win32(conf)
9996+ v = conf.env
9997+ v['shlib_PATTERN'] = 'cyg%s.dll'
9998+ v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
9999+
10000+@conftest
10001+def gcc_modifier_darwin(conf):
10002+ v = conf.env
10003+ v['shlib_CCFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
10004+ v['shlib_LINKFLAGS'] = ['-dynamiclib']
10005+ v['shlib_PATTERN'] = 'lib%s.dylib'
10006+
10007+ v['staticlib_LINKFLAGS'] = []
10008+
10009+ v['SHLIB_MARKER'] = ''
10010+ v['STATICLIB_MARKER'] = ''
10011+ v['SONAME_ST'] = ''
10012+
10013+@conftest
10014+def gcc_modifier_aix(conf):
10015+ v = conf.env
10016+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
10017+
10018+ v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull']
10019+
10020+ v['SHLIB_MARKER'] = ''
10021+
10022+@conftest
10023+def gcc_modifier_platform(conf):
10024+ # * set configurations specific for a platform.
10025+ # * the destination platform is detected automatically by looking at the macros the compiler predefines,
10026+ # and if it's not recognised, it fallbacks to sys.platform.
10027+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
10028+ gcc_modifier_func = globals().get('gcc_modifier_' + dest_os)
10029+ if gcc_modifier_func:
10030+ gcc_modifier_func(conf)
10031+
10032+def detect(conf):
10033+ conf.find_gcc()
10034+ conf.find_cpp()
10035+ conf.find_ar()
10036+ conf.gcc_common_flags()
10037+ conf.gcc_modifier_platform()
10038+ conf.cc_load_tools()
10039+ conf.cc_add_flags()
10040+ conf.link_add_flags()
10041+
10042diff --git a/buildtools/wafadmin/Tools/gdc.py b/buildtools/wafadmin/Tools/gdc.py
10043new file mode 100644
10044index 0000000..4d2a321
10045--- /dev/null
10046+++ b/buildtools/wafadmin/Tools/gdc.py
10047@@ -0,0 +1,52 @@
10048+#!/usr/bin/env python
10049+# encoding: utf-8
10050+# Carlos Rafael Giani, 2007 (dv)
10051+
10052+import sys
10053+import Utils, ar
10054+from Configure import conftest
10055+
10056+@conftest
10057+def find_gdc(conf):
10058+ conf.find_program('gdc', var='D_COMPILER', mandatory=True)
10059+
10060+@conftest
10061+def common_flags_gdc(conf):
10062+ v = conf.env
10063+
10064+ # _DFLAGS _DIMPORTFLAGS
10065+
10066+ # for mory info about the meaning of this dict see dmd.py
10067+ v['DFLAGS'] = []
10068+
10069+ v['D_SRC_F'] = ''
10070+ v['D_TGT_F'] = ['-c', '-o', '']
10071+ v['DPATH_ST'] = '-I%s' # template for adding import paths
10072+
10073+ # linker
10074+ v['D_LINKER'] = v['D_COMPILER']
10075+ v['DLNK_SRC_F'] = ''
10076+ v['DLNK_TGT_F'] = ['-o', '']
10077+
10078+ v['DLIB_ST'] = '-l%s' # template for adding libs
10079+ v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths
10080+
10081+ # debug levels
10082+ v['DLINKFLAGS'] = []
10083+ v['DFLAGS_OPTIMIZED'] = ['-O3']
10084+ v['DFLAGS_DEBUG'] = ['-O0']
10085+ v['DFLAGS_ULTRADEBUG'] = ['-O0']
10086+
10087+ v['D_shlib_DFLAGS'] = []
10088+ v['D_shlib_LINKFLAGS'] = ['-shared']
10089+
10090+ v['DHEADER_ext'] = '.di'
10091+ v['D_HDR_F'] = '-fintfc -fintfc-file='
10092+
10093+def detect(conf):
10094+ conf.find_gdc()
10095+ conf.check_tool('ar')
10096+ conf.check_tool('d')
10097+ conf.common_flags_gdc()
10098+ conf.d_platform_flags()
10099+
10100diff --git a/buildtools/wafadmin/Tools/glib2.py b/buildtools/wafadmin/Tools/glib2.py
10101new file mode 100644
10102index 0000000..042d612
10103--- /dev/null
10104+++ b/buildtools/wafadmin/Tools/glib2.py
10105@@ -0,0 +1,164 @@
10106+#! /usr/bin/env python
10107+# encoding: utf-8
10108+# Thomas Nagy, 2006-2008 (ita)
10109+
10110+"GLib2 support"
10111+
10112+import Task, Utils
10113+from TaskGen import taskgen, before, after, feature
10114+
10115+#
10116+# glib-genmarshal
10117+#
10118+
10119+@taskgen
10120+def add_marshal_file(self, filename, prefix):
10121+ if not hasattr(self, 'marshal_list'):
10122+ self.marshal_list = []
10123+ self.meths.append('process_marshal')
10124+ self.marshal_list.append((filename, prefix))
10125+
10126+@before('apply_core')
10127+def process_marshal(self):
10128+ for f, prefix in getattr(self, 'marshal_list', []):
10129+ node = self.path.find_resource(f)
10130+
10131+ if not node:
10132+ raise Utils.WafError('file not found %r' % f)
10133+
10134+ h_node = node.change_ext('.h')
10135+ c_node = node.change_ext('.c')
10136+
10137+ task = self.create_task('glib_genmarshal', node, [h_node, c_node])
10138+ task.env.GLIB_GENMARSHAL_PREFIX = prefix
10139+ self.allnodes.append(c_node)
10140+
10141+def genmarshal_func(self):
10142+
10143+ bld = self.inputs[0].__class__.bld
10144+
10145+ get = self.env.get_flat
10146+ cmd1 = "%s %s --prefix=%s --header > %s" % (
10147+ get('GLIB_GENMARSHAL'),
10148+ self.inputs[0].srcpath(self.env),
10149+ get('GLIB_GENMARSHAL_PREFIX'),
10150+ self.outputs[0].abspath(self.env)
10151+ )
10152+
10153+ ret = bld.exec_command(cmd1)
10154+ if ret: return ret
10155+
10156+ #print self.outputs[1].abspath(self.env)
10157+ f = open(self.outputs[1].abspath(self.env), 'wb')
10158+ c = '''#include "%s"\n''' % self.outputs[0].name
10159+ f.write(c)
10160+ f.close()
10161+
10162+ cmd2 = "%s %s --prefix=%s --body >> %s" % (
10163+ get('GLIB_GENMARSHAL'),
10164+ self.inputs[0].srcpath(self.env),
10165+ get('GLIB_GENMARSHAL_PREFIX'),
10166+ self.outputs[1].abspath(self.env)
10167+ )
10168+ ret = Utils.exec_command(cmd2)
10169+ if ret: return ret
10170+
10171+#
10172+# glib-mkenums
10173+#
10174+
10175+@taskgen
10176+def add_enums_from_template(self, source='', target='', template='', comments=''):
10177+ if not hasattr(self, 'enums_list'):
10178+ self.enums_list = []
10179+ self.meths.append('process_enums')
10180+ self.enums_list.append({'source': source,
10181+ 'target': target,
10182+ 'template': template,
10183+ 'file-head': '',
10184+ 'file-prod': '',
10185+ 'file-tail': '',
10186+ 'enum-prod': '',
10187+ 'value-head': '',
10188+ 'value-prod': '',
10189+ 'value-tail': '',
10190+ 'comments': comments})
10191+
10192+@taskgen
10193+def add_enums(self, source='', target='',
10194+ file_head='', file_prod='', file_tail='', enum_prod='',
10195+ value_head='', value_prod='', value_tail='', comments=''):
10196+ if not hasattr(self, 'enums_list'):
10197+ self.enums_list = []
10198+ self.meths.append('process_enums')
10199+ self.enums_list.append({'source': source,
10200+ 'template': '',
10201+ 'target': target,
10202+ 'file-head': file_head,
10203+ 'file-prod': file_prod,
10204+ 'file-tail': file_tail,
10205+ 'enum-prod': enum_prod,
10206+ 'value-head': value_head,
10207+ 'value-prod': value_prod,
10208+ 'value-tail': value_tail,
10209+ 'comments': comments})
10210+
10211+@before('apply_core')
10212+def process_enums(self):
10213+ for enum in getattr(self, 'enums_list', []):
10214+ task = self.create_task('glib_mkenums')
10215+ env = task.env
10216+
10217+ inputs = []
10218+
10219+ # process the source
10220+ source_list = self.to_list(enum['source'])
10221+ if not source_list:
10222+ raise Utils.WafError('missing source ' + str(enum))
10223+ source_list = [self.path.find_resource(k) for k in source_list]
10224+ inputs += source_list
10225+ env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list]
10226+
10227+ # find the target
10228+ if not enum['target']:
10229+ raise Utils.WafError('missing target ' + str(enum))
10230+ tgt_node = self.path.find_or_declare(enum['target'])
10231+ if tgt_node.name.endswith('.c'):
10232+ self.allnodes.append(tgt_node)
10233+ env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env)
10234+
10235+
10236+ options = []
10237+
10238+ if enum['template']: # template, if provided
10239+ template_node = self.path.find_resource(enum['template'])
10240+ options.append('--template %s' % (template_node.abspath(env)))
10241+ inputs.append(template_node)
10242+ params = {'file-head' : '--fhead',
10243+ 'file-prod' : '--fprod',
10244+ 'file-tail' : '--ftail',
10245+ 'enum-prod' : '--eprod',
10246+ 'value-head' : '--vhead',
10247+ 'value-prod' : '--vprod',
10248+ 'value-tail' : '--vtail',
10249+ 'comments': '--comments'}
10250+ for param, option in params.iteritems():
10251+ if enum[param]:
10252+ options.append('%s %r' % (option, enum[param]))
10253+
10254+ env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
10255+
10256+ # update the task instance
10257+ task.set_inputs(inputs)
10258+ task.set_outputs(tgt_node)
10259+
10260+Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'],
10261+ color='BLUE', before='cc cxx')
10262+Task.simple_task_type('glib_mkenums',
10263+ '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',
10264+ color='PINK', before='cc cxx')
10265+
10266+def detect(conf):
10267+ glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
10268+ mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS')
10269+
10270diff --git a/buildtools/wafadmin/Tools/gnome.py b/buildtools/wafadmin/Tools/gnome.py
10271new file mode 100644
10272index 0000000..c098a41
10273--- /dev/null
10274+++ b/buildtools/wafadmin/Tools/gnome.py
10275@@ -0,0 +1,223 @@
10276+#!/usr/bin/env python
10277+# encoding: utf-8
10278+# Thomas Nagy, 2006-2008 (ita)
10279+
10280+"Gnome support"
10281+
10282+import os, re
10283+import TaskGen, Utils, Runner, Task, Build, Options, Logs
10284+import cc
10285+from Logs import error
10286+from TaskGen import taskgen, before, after, feature
10287+
10288+n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M)
10289+n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M)
10290+
10291+def postinstall_schemas(prog_name):
10292+ if Build.bld.is_install:
10293+ dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name)
10294+ if not Options.options.destdir:
10295+ # add the gconf schema
10296+ Utils.pprint('YELLOW', 'Installing GConf schema')
10297+ command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir
10298+ ret = Utils.exec_command(command)
10299+ else:
10300+ Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:')
10301+ Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir)
10302+
10303+def postinstall_icons():
10304+ dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor')
10305+ if Build.bld.is_install:
10306+ if not Options.options.destdir:
10307+ # update the pixmap cache directory
10308+ Utils.pprint('YELLOW', "Updating Gtk icon cache.")
10309+ command = 'gtk-update-icon-cache -q -f -t %s' % dir
10310+ ret = Utils.exec_command(command)
10311+ else:
10312+ Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:')
10313+ Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir)
10314+
10315+def postinstall_scrollkeeper(prog_name):
10316+ if Build.bld.is_install:
10317+ # now the scrollkeeper update if we can write to the log file
10318+ if os.access('/var/log/scrollkeeper.log', os.W_OK):
10319+ dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
10320+ dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name)
10321+ command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2)
10322+ ret = Utils.exec_command(command)
10323+
10324+def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1):
10325+ if schemas: postinstall_schemas(prog_name)
10326+ if icons: postinstall_icons()
10327+ if scrollkeeper: postinstall_scrollkeeper(prog_name)
10328+
10329+# OBSOLETE
10330+class gnome_doc_taskgen(TaskGen.task_gen):
10331+ def __init__(self, *k, **kw):
10332+ TaskGen.task_gen.__init__(self, *k, **kw)
10333+
10334+@feature('gnome_doc')
10335+def init_gnome_doc(self):
10336+ self.default_install_path = '${PREFIX}/share'
10337+
10338+@feature('gnome_doc')
10339+@after('init_gnome_doc')
10340+def apply_gnome_doc(self):
10341+ self.env['APPNAME'] = self.doc_module
10342+ lst = self.to_list(self.doc_linguas)
10343+ bld = self.bld
10344+ lst.append('C')
10345+
10346+ for x in lst:
10347+ if not x == 'C':
10348+ tsk = self.create_task('xml2po')
10349+ node = self.path.find_resource(x+'/'+x+'.po')
10350+ src = self.path.find_resource('C/%s.xml' % self.doc_module)
10351+ out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
10352+ tsk.set_inputs([node, src])
10353+ tsk.set_outputs(out)
10354+ else:
10355+ out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
10356+
10357+ tsk2 = self.create_task('xsltproc2po')
10358+ out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
10359+ tsk2.set_outputs(out2)
10360+ node = self.path.find_resource(self.doc_module+".omf.in")
10361+ tsk2.inputs = [node, out]
10362+
10363+ tsk2.run_after.append(tsk)
10364+
10365+ if bld.is_install:
10366+ path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
10367+ bld.install_files(self.install_path + '/omf', out2, env=self.env)
10368+ for y in self.to_list(self.doc_figures):
10369+ try:
10370+ os.stat(self.path.abspath() + '/' + x + '/' + y)
10371+ bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y)
10372+ except:
10373+ bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
10374+ bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
10375+ if x == 'C':
10376+ xmls = self.to_list(self.doc_includes)
10377+ xmls.append(self.doc_entities)
10378+ for z in xmls:
10379+ out = self.path.find_resource('%s/%s' % (x, z))
10380+ bld.install_as(path + '/%s' % z, out.abspath(self.env))
10381+
10382+# OBSOLETE
10383+class xml_to_taskgen(TaskGen.task_gen):
10384+ def __init__(self, *k, **kw):
10385+ TaskGen.task_gen.__init__(self, *k, **kw)
10386+
10387+@feature('xml_to')
10388+def init_xml_to(self):
10389+ Utils.def_attrs(self,
10390+ source = 'xmlfile',
10391+ xslt = 'xlsltfile',
10392+ target = 'hey',
10393+ default_install_path = '${PREFIX}',
10394+ task_created = None)
10395+
10396+@feature('xml_to')
10397+@after('init_xml_to')
10398+def apply_xml_to(self):
10399+ xmlfile = self.path.find_resource(self.source)
10400+ xsltfile = self.path.find_resource(self.xslt)
10401+ tsk = self.create_task('xmlto', [xmlfile, xsltfile], xmlfile.change_ext('html'))
10402+ tsk.install_path = self.install_path
10403+
10404+def sgml_scan(self):
10405+ node = self.inputs[0]
10406+
10407+ env = self.env
10408+ variant = node.variant(env)
10409+
10410+ fi = open(node.abspath(env), 'r')
10411+ content = fi.read()
10412+ fi.close()
10413+
10414+ # we should use a sgml parser :-/
10415+ name = n1_regexp.findall(content)[0]
10416+ num = n2_regexp.findall(content)[0]
10417+
10418+ doc_name = name+'.'+num
10419+
10420+ if not self.outputs:
10421+ self.outputs = [self.generator.path.find_or_declare(doc_name)]
10422+
10423+ return ([], [doc_name])
10424+
10425+class gnome_sgml2man_taskgen(TaskGen.task_gen):
10426+ def __init__(self, *k, **kw):
10427+ TaskGen.task_gen.__init__(self, *k, **kw)
10428+
10429+@feature('gnome_sgml2man')
10430+def apply_gnome_sgml2man(self):
10431+ """
10432+ we could make it more complicated, but for now we just scan the document each time
10433+ """
10434+ assert(getattr(self, 'appname', None))
10435+
10436+ def install_result(task):
10437+ out = task.outputs[0]
10438+ name = out.name
10439+ ext = name[-1]
10440+ env = task.env
10441+ self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env)
10442+
10443+ self.bld.rescan(self.path)
10444+ for name in self.bld.cache_dir_contents[self.path.id]:
10445+ base, ext = os.path.splitext(name)
10446+ if ext != '.sgml': continue
10447+
10448+ task = self.create_task('sgml2man')
10449+ task.set_inputs(self.path.find_resource(name))
10450+ task.task_generator = self
10451+ if self.bld.is_install: task.install = install_result
10452+ # no outputs, the scanner does it
10453+ # no caching for now, this is not a time-critical feature
10454+ # in the future the scanner can be used to do more things (find dependencies, etc)
10455+ task.scan()
10456+
10457+cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE')
10458+cls.scan = sgml_scan
10459+cls.quiet = 1
10460+
10461+Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
10462+
10463+Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE')
10464+
10465+# how do you expect someone to understand this?!
10466+xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
10467+--stringparam db2omf.basename ${APPNAME} \
10468+--stringparam db2omf.format docbook \
10469+--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
10470+--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
10471+--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
10472+--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
10473+--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
10474+--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
10475+${DB2OMF} ${SRC[1].abspath(env)}"""
10476+
10477+#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
10478+Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE')
10479+
10480+def detect(conf):
10481+ conf.check_tool('gnu_dirs glib2 dbus')
10482+ sgml2man = conf.find_program('docbook2man', var='SGML2MAN')
10483+
10484+ def getstr(varname):
10485+ return getattr(Options.options, varname, '')
10486+
10487+ # addefine also sets the variable to the env
10488+ conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale'))
10489+
10490+ xml2po = conf.find_program('xml2po', var='XML2PO')
10491+ xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO')
10492+ conf.env['XML2POFLAGS'] = '-e -p'
10493+ conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip()
10494+ conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip()
10495+
10496+def set_options(opt):
10497+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
10498+
10499diff --git a/buildtools/wafadmin/Tools/gnu_dirs.py b/buildtools/wafadmin/Tools/gnu_dirs.py
10500new file mode 100644
10501index 0000000..856e4a7
10502--- /dev/null
10503+++ b/buildtools/wafadmin/Tools/gnu_dirs.py
10504@@ -0,0 +1,111 @@
10505+#!/usr/bin/env python
10506+# encoding: utf-8
10507+# Ali Sabil, 2007
10508+
10509+"""
10510+To use this module do not forget to call
10511+opt.tool_options('gnu_dirs')
10512+AND
10513+conf.check_tool('gnu_dirs')
10514+
10515+Add options for the standard GNU directories, this tool will add the options
10516+found in autotools, and will update the environment with the following
10517+installation variables:
10518+
10519+ * PREFIX : architecture-independent files [/usr/local]
10520+ * EXEC_PREFIX : architecture-dependent files [PREFIX]
10521+ * BINDIR : user executables [EXEC_PREFIX/bin]
10522+ * SBINDIR : user executables [EXEC_PREFIX/sbin]
10523+ * LIBEXECDIR : program executables [EXEC_PREFIX/libexec]
10524+ * SYSCONFDIR : read-only single-machine data [PREFIX/etc]
10525+ * SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com]
10526+ * LOCALSTATEDIR : modifiable single-machine data [PREFIX/var]
10527+ * LIBDIR : object code libraries [EXEC_PREFIX/lib]
10528+ * INCLUDEDIR : C header files [PREFIX/include]
10529+ * OLDINCLUDEDIR : C header files for non-gcc [/usr/include]
10530+ * DATAROOTDIR : read-only arch.-independent data root [PREFIX/share]
10531+ * DATADIR : read-only architecture-independent data [DATAROOTDIR]
10532+ * INFODIR : info documentation [DATAROOTDIR/info]
10533+ * LOCALEDIR : locale-dependent data [DATAROOTDIR/locale]
10534+ * MANDIR : man documentation [DATAROOTDIR/man]
10535+ * DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib]
10536+ * HTMLDIR : html documentation [DOCDIR]
10537+ * DVIDIR : dvi documentation [DOCDIR]
10538+ * PDFDIR : pdf documentation [DOCDIR]
10539+ * PSDIR : ps documentation [DOCDIR]
10540+"""
10541+
10542+import Utils, Options
10543+
10544+_options = [x.split(', ') for x in '''
10545+bindir, user executables, ${EXEC_PREFIX}/bin
10546+sbindir, system admin executables, ${EXEC_PREFIX}/sbin
10547+libexecdir, program executables, ${EXEC_PREFIX}/libexec
10548+sysconfdir, read-only single-machine data, ${PREFIX}/etc
10549+sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
10550+localstatedir, modifiable single-machine data, ${PREFIX}/var
10551+libdir, object code libraries, ${EXEC_PREFIX}/lib
10552+includedir, C header files, ${PREFIX}/include
10553+oldincludedir, C header files for non-gcc, /usr/include
10554+datarootdir, read-only arch.-independent data root, ${PREFIX}/share
10555+datadir, read-only architecture-independent data, ${DATAROOTDIR}
10556+infodir, info documentation, ${DATAROOTDIR}/info
10557+localedir, locale-dependent data, ${DATAROOTDIR}/locale
10558+mandir, man documentation, ${DATAROOTDIR}/man
10559+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
10560+htmldir, html documentation, ${DOCDIR}
10561+dvidir, dvi documentation, ${DOCDIR}
10562+pdfdir, pdf documentation, ${DOCDIR}
10563+psdir, ps documentation, ${DOCDIR}
10564+'''.split('\n') if x]
10565+
10566+def detect(conf):
10567+ def get_param(varname, default):
10568+ return getattr(Options.options, varname, '') or default
10569+
10570+ env = conf.env
10571+ env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
10572+ env['PACKAGE'] = Utils.g_module.APPNAME
10573+
10574+ complete = False
10575+ iter = 0
10576+ while not complete and iter < len(_options) + 1:
10577+ iter += 1
10578+ complete = True
10579+ for name, help, default in _options:
10580+ name = name.upper()
10581+ if not env[name]:
10582+ try:
10583+ env[name] = Utils.subst_vars(get_param(name, default), env)
10584+ except TypeError:
10585+ complete = False
10586+ if not complete:
10587+ lst = [name for name, _, _ in _options if not env[name.upper()]]
10588+ raise Utils.WafError('Variable substitution failure %r' % lst)
10589+
10590+def set_options(opt):
10591+
10592+ inst_dir = opt.add_option_group('Installation directories',
10593+'By default, "waf install" will put the files in\
10594+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
10595+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
10596+
10597+ for k in ('--prefix', '--destdir'):
10598+ option = opt.parser.get_option(k)
10599+ if option:
10600+ opt.parser.remove_option(k)
10601+ inst_dir.add_option(option)
10602+
10603+ inst_dir.add_option('--exec-prefix',
10604+ help = 'installation prefix [Default: ${PREFIX}]',
10605+ default = '',
10606+ dest = 'EXEC_PREFIX')
10607+
10608+ dirs_options = opt.add_option_group('Pre-defined installation directories', '')
10609+
10610+ for name, help, default in _options:
10611+ option_name = '--' + name
10612+ str_default = default
10613+ str_help = '%s [Default: %s]' % (help, str_default)
10614+ dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
10615+
10616diff --git a/buildtools/wafadmin/Tools/gob2.py b/buildtools/wafadmin/Tools/gob2.py
10617new file mode 100644
10618index 0000000..00aaa32
10619--- /dev/null
10620+++ b/buildtools/wafadmin/Tools/gob2.py
10621@@ -0,0 +1,18 @@
10622+#!/usr/bin/env python
10623+# encoding: utf-8
10624+# Ali Sabil, 2007
10625+
10626+import TaskGen
10627+
10628+TaskGen.declare_chain(
10629+ name = 'gob2',
10630+ rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',
10631+ ext_in = '.gob',
10632+ ext_out = '.c'
10633+)
10634+
10635+def detect(conf):
10636+ gob2 = conf.find_program('gob2', var='GOB2', mandatory=True)
10637+ conf.env['GOB2'] = gob2
10638+ conf.env['GOB2FLAGS'] = ''
10639+
10640diff --git a/buildtools/wafadmin/Tools/gxx.py b/buildtools/wafadmin/Tools/gxx.py
10641new file mode 100644
10642index 0000000..8f4a0bf
10643--- /dev/null
10644+++ b/buildtools/wafadmin/Tools/gxx.py
10645@@ -0,0 +1,133 @@
10646+#!/usr/bin/env python
10647+# encoding: utf-8
10648+# Thomas Nagy, 2006 (ita)
10649+# Ralf Habacker, 2006 (rh)
10650+# Yinon Ehrlich, 2009
10651+
10652+import os, sys
10653+import Configure, Options, Utils
10654+import ccroot, ar
10655+from Configure import conftest
10656+
10657+@conftest
10658+def find_gxx(conf):
10659+ cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True)
10660+ cxx = conf.cmd_to_list(cxx)
10661+ ccroot.get_cc_version(conf, cxx, gcc=True)
10662+ conf.env.CXX_NAME = 'gcc'
10663+ conf.env.CXX = cxx
10664+
10665+@conftest
10666+def gxx_common_flags(conf):
10667+ v = conf.env
10668+
10669+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
10670+ v['CXXFLAGS_DEBUG'] = ['-g']
10671+ v['CXXFLAGS_RELEASE'] = ['-O2']
10672+
10673+ v['CXX_SRC_F'] = ''
10674+ v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
10675+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
10676+
10677+ # linker
10678+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
10679+ v['CXXLNK_SRC_F'] = ''
10680+ v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
10681+
10682+ v['LIB_ST'] = '-l%s' # template for adding libs
10683+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
10684+ v['STATICLIB_ST'] = '-l%s'
10685+ v['STATICLIBPATH_ST'] = '-L%s'
10686+ v['RPATH_ST'] = '-Wl,-rpath,%s'
10687+ v['CXXDEFINES_ST'] = '-D%s'
10688+
10689+ v['SONAME_ST'] = '-Wl,-h,%s'
10690+ v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
10691+ v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
10692+ v['FULLSTATIC_MARKER'] = '-static'
10693+
10694+ # program
10695+ v['program_PATTERN'] = '%s'
10696+
10697+ # shared library
10698+ v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
10699+ v['shlib_LINKFLAGS'] = ['-shared']
10700+ v['shlib_PATTERN'] = 'lib%s.so'
10701+
10702+ # static lib
10703+ v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
10704+ v['staticlib_PATTERN'] = 'lib%s.a'
10705+
10706+ # osx stuff
10707+ v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
10708+ v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
10709+ v['macbundle_PATTERN'] = '%s.bundle'
10710+
10711+@conftest
10712+def gxx_modifier_win32(conf):
10713+ v = conf.env
10714+ v['program_PATTERN'] = '%s.exe'
10715+
10716+ v['shlib_PATTERN'] = '%s.dll'
10717+ v['implib_PATTERN'] = 'lib%s.dll.a'
10718+ v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
10719+
10720+ dest_arch = v['DEST_CPU']
10721+ v['shlib_CXXFLAGS'] = []
10722+
10723+ v.append_value('shlib_CXXFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
10724+
10725+ # Auto-import is enabled by default even without this option,
10726+ # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
10727+ # that the linker emits otherwise.
10728+ v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
10729+
10730+@conftest
10731+def gxx_modifier_cygwin(conf):
10732+ gxx_modifier_win32(conf)
10733+ v = conf.env
10734+ v['shlib_PATTERN'] = 'cyg%s.dll'
10735+ v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
10736+
10737+@conftest
10738+def gxx_modifier_darwin(conf):
10739+ v = conf.env
10740+ v['shlib_CXXFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
10741+ v['shlib_LINKFLAGS'] = ['-dynamiclib']
10742+ v['shlib_PATTERN'] = 'lib%s.dylib'
10743+
10744+ v['staticlib_LINKFLAGS'] = []
10745+
10746+ v['SHLIB_MARKER'] = ''
10747+ v['STATICLIB_MARKER'] = ''
10748+ v['SONAME_ST'] = ''
10749+
10750+@conftest
10751+def gxx_modifier_aix(conf):
10752+ v = conf.env
10753+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
10754+
10755+ v['shlib_LINKFLAGS'] = ['-shared', '-Wl,-brtl,-bexpfull']
10756+
10757+ v['SHLIB_MARKER'] = ''
10758+
10759+@conftest
10760+def gxx_modifier_platform(conf):
10761+ # * set configurations specific for a platform.
10762+ # * the destination platform is detected automatically by looking at the macros the compiler predefines,
10763+ # and if it's not recognised, it fallbacks to sys.platform.
10764+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
10765+ gxx_modifier_func = globals().get('gxx_modifier_' + dest_os)
10766+ if gxx_modifier_func:
10767+ gxx_modifier_func(conf)
10768+
10769+def detect(conf):
10770+ conf.find_gxx()
10771+ conf.find_cpp()
10772+ conf.find_ar()
10773+ conf.gxx_common_flags()
10774+ conf.gxx_modifier_platform()
10775+ conf.cxx_load_tools()
10776+ conf.cxx_add_flags()
10777+ conf.link_add_flags()
10778+
10779diff --git a/buildtools/wafadmin/Tools/icc.py b/buildtools/wafadmin/Tools/icc.py
10780new file mode 100644
10781index 0000000..9c9a926
10782--- /dev/null
10783+++ b/buildtools/wafadmin/Tools/icc.py
10784@@ -0,0 +1,37 @@
10785+#!/usr/bin/env python
10786+# encoding: utf-8
10787+# Stian Selnes, 2008
10788+# Thomas Nagy 2009
10789+
10790+import os, sys
10791+import Configure, Options, Utils
10792+import ccroot, ar, gcc
10793+from Configure import conftest
10794+
10795+@conftest
10796+def find_icc(conf):
10797+ if sys.platform == 'cygwin':
10798+ conf.fatal('The Intel compiler does not work on Cygwin')
10799+
10800+ v = conf.env
10801+ cc = None
10802+ if v['CC']: cc = v['CC']
10803+ elif 'CC' in conf.environ: cc = conf.environ['CC']
10804+ if not cc: cc = conf.find_program('icc', var='CC')
10805+ if not cc: cc = conf.find_program('ICL', var='CC')
10806+ if not cc: conf.fatal('Intel C Compiler (icc) was not found')
10807+ cc = conf.cmd_to_list(cc)
10808+
10809+ ccroot.get_cc_version(conf, cc, icc=True)
10810+ v['CC'] = cc
10811+ v['CC_NAME'] = 'icc'
10812+
10813+detect = '''
10814+find_icc
10815+find_ar
10816+gcc_common_flags
10817+gcc_modifier_platform
10818+cc_load_tools
10819+cc_add_flags
10820+link_add_flags
10821+'''
10822diff --git a/buildtools/wafadmin/Tools/icpc.py b/buildtools/wafadmin/Tools/icpc.py
10823new file mode 100644
10824index 0000000..7d79c57
10825--- /dev/null
10826+++ b/buildtools/wafadmin/Tools/icpc.py
10827@@ -0,0 +1,35 @@
10828+#!/usr/bin/env python
10829+# encoding: utf-8
10830+# Thomas Nagy 2009
10831+
10832+import os, sys
10833+import Configure, Options, Utils
10834+import ccroot, ar, gxx
10835+from Configure import conftest
10836+
10837+@conftest
10838+def find_icpc(conf):
10839+ if sys.platform == 'cygwin':
10840+ conf.fatal('The Intel compiler does not work on Cygwin')
10841+
10842+ v = conf.env
10843+ cxx = None
10844+ if v['CXX']: cxx = v['CXX']
10845+ elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
10846+ if not cxx: cxx = conf.find_program('icpc', var='CXX')
10847+ if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
10848+ cxx = conf.cmd_to_list(cxx)
10849+
10850+ ccroot.get_cc_version(conf, cxx, icc=True)
10851+ v['CXX'] = cxx
10852+ v['CXX_NAME'] = 'icc'
10853+
10854+detect = '''
10855+find_icpc
10856+find_ar
10857+gxx_common_flags
10858+gxx_modifier_platform
10859+cxx_load_tools
10860+cxx_add_flags
10861+link_add_flags
10862+'''
10863diff --git a/buildtools/wafadmin/Tools/intltool.py b/buildtools/wafadmin/Tools/intltool.py
10864new file mode 100644
10865index 0000000..deb8f4a
10866--- /dev/null
10867+++ b/buildtools/wafadmin/Tools/intltool.py
10868@@ -0,0 +1,139 @@
10869+#!/usr/bin/env python
10870+# encoding: utf-8
10871+# Thomas Nagy, 2006 (ita)
10872+
10873+"intltool support"
10874+
10875+import os, re
10876+import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c
10877+from TaskGen import feature, before, taskgen
10878+from Logs import error
10879+
10880+"""
10881+Usage:
10882+
10883+bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
10884+
10885+"""
10886+
10887+class intltool_in_taskgen(TaskGen.task_gen):
10888+ """deprecated"""
10889+ def __init__(self, *k, **kw):
10890+ TaskGen.task_gen.__init__(self, *k, **kw)
10891+
10892+@before('apply_core')
10893+@feature('intltool_in')
10894+def iapply_intltool_in_f(self):
10895+ try: self.meths.remove('apply_core')
10896+ except ValueError: pass
10897+
10898+ for i in self.to_list(self.source):
10899+ node = self.path.find_resource(i)
10900+
10901+ podir = getattr(self, 'podir', 'po')
10902+ podirnode = self.path.find_dir(podir)
10903+ if not podirnode:
10904+ error("could not find the podir %r" % podir)
10905+ continue
10906+
10907+ cache = getattr(self, 'intlcache', '.intlcache')
10908+ self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache)
10909+ self.env['INTLPODIR'] = podirnode.srcpath(self.env)
10910+ self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
10911+
10912+ task = self.create_task('intltool', node, node.change_ext(''))
10913+ task.install_path = self.install_path
10914+
10915+class intltool_po_taskgen(TaskGen.task_gen):
10916+ """deprecated"""
10917+ def __init__(self, *k, **kw):
10918+ TaskGen.task_gen.__init__(self, *k, **kw)
10919+
10920+
10921+@feature('intltool_po')
10922+def apply_intltool_po(self):
10923+ try: self.meths.remove('apply_core')
10924+ except ValueError: pass
10925+
10926+ self.default_install_path = '${LOCALEDIR}'
10927+ appname = getattr(self, 'appname', 'set_your_app_name')
10928+ podir = getattr(self, 'podir', '')
10929+
10930+ def install_translation(task):
10931+ out = task.outputs[0]
10932+ filename = out.name
10933+ (langname, ext) = os.path.splitext(filename)
10934+ inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
10935+ self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod)
10936+
10937+ linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS'))
10938+ if linguas:
10939+ # scan LINGUAS file for locales to process
10940+ file = open(linguas.abspath())
10941+ langs = []
10942+ for line in file.readlines():
10943+ # ignore lines containing comments
10944+ if not line.startswith('#'):
10945+ langs += line.split()
10946+ file.close()
10947+ re_linguas = re.compile('[-a-zA-Z_@.]+')
10948+ for lang in langs:
10949+ # Make sure that we only process lines which contain locales
10950+ if re_linguas.match(lang):
10951+ node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
10952+ task = self.create_task('po')
10953+ task.set_inputs(node)
10954+ task.set_outputs(node.change_ext('.mo'))
10955+ if self.bld.is_install: task.install = install_translation
10956+ else:
10957+ Utils.pprint('RED', "Error no LINGUAS file found in po directory")
10958+
10959+Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False)
10960+Task.simple_task_type('intltool',
10961+ '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',
10962+ color='BLUE', after="cc_link cxx_link", shell=False)
10963+
10964+def detect(conf):
10965+ pocom = conf.find_program('msgfmt')
10966+ if not pocom:
10967+ # if msgfmt should not be mandatory, catch the thrown exception in your wscript
10968+ conf.fatal('The program msgfmt (gettext) is mandatory!')
10969+ conf.env['POCOM'] = pocom
10970+
10971+ # NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it
10972+
10973+ intltool = conf.find_program('intltool-merge', var='INTLTOOL')
10974+ if not intltool:
10975+ # if intltool-merge should not be mandatory, catch the thrown exception in your wscript
10976+ if Options.platform == 'win32':
10977+ perl = conf.find_program('perl', var='PERL')
10978+ if not perl:
10979+ conf.fatal('The program perl (required by intltool) could not be found')
10980+
10981+ intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep))
10982+ if not intltooldir:
10983+ conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
10984+
10985+ conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge']
10986+ conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL']))
10987+ else:
10988+ conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
10989+
10990+ def getstr(varname):
10991+ return getattr(Options.options, varname, '')
10992+
10993+ prefix = conf.env['PREFIX']
10994+ datadir = getstr('datadir')
10995+ if not datadir: datadir = os.path.join(prefix,'share')
10996+
10997+ conf.define('LOCALEDIR', os.path.join(datadir, 'locale'))
10998+ conf.define('DATADIR', datadir)
10999+
11000+ if conf.env['CC'] or conf.env['CXX']:
11001+ # Define to 1 if <locale.h> is present
11002+ conf.check(header_name='locale.h')
11003+
11004+def set_options(opt):
11005+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
11006+ opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data')
11007+
11008diff --git a/buildtools/wafadmin/Tools/javaw.py b/buildtools/wafadmin/Tools/javaw.py
11009new file mode 100644
11010index 0000000..301ebc4
11011--- /dev/null
11012+++ b/buildtools/wafadmin/Tools/javaw.py
11013@@ -0,0 +1,255 @@
11014+#!/usr/bin/env python
11015+# encoding: utf-8
11016+# Thomas Nagy, 2006-2008 (ita)
11017+
11018+"""
11019+Java support
11020+
11021+Javac is one of the few compilers that behaves very badly:
11022+* it outputs files where it wants to (-d is only for the package root)
11023+* it recompiles files silently behind your back
11024+* it outputs an undefined amount of files (inner classes)
11025+
11026+Fortunately, the convention makes it possible to use the build dir without
11027+too many problems for the moment
11028+
11029+Inner classes must be located and cleaned when a problem arise,
11030+for the moment waf does not track the production of inner classes.
11031+
11032+Adding all the files to a task and executing it if any of the input files
11033+change is only annoying for the compilation times
11034+
11035+Compilation can be run using Jython[1] rather than regular Python. Instead of
11036+running one of the following commands:
11037+ ./waf configure
11038+ python waf configure
11039+You would have to run:
11040+ java -jar /path/to/jython.jar waf configure
11041+
11042+[1] http://www.jython.org/
11043+"""
11044+
11045+import os, re
11046+from Configure import conf
11047+import TaskGen, Task, Utils, Options, Build
11048+from TaskGen import feature, before, taskgen
11049+
11050+class_check_source = '''
11051+public class Test {
11052+ public static void main(String[] argv) {
11053+ Class lib;
11054+ if (argv.length < 1) {
11055+ System.err.println("Missing argument");
11056+ System.exit(77);
11057+ }
11058+ try {
11059+ lib = Class.forName(argv[0]);
11060+ } catch (ClassNotFoundException e) {
11061+ System.err.println("ClassNotFoundException");
11062+ System.exit(1);
11063+ }
11064+ lib = null;
11065+ System.exit(0);
11066+ }
11067+}
11068+'''
11069+
11070+@feature('jar')
11071+@before('apply_core')
11072+def jar_files(self):
11073+ basedir = getattr(self, 'basedir', '.')
11074+ destfile = getattr(self, 'destfile', 'test.jar')
11075+ jaropts = getattr(self, 'jaropts', [])
11076+ jarcreate = getattr(self, 'jarcreate', 'cf')
11077+
11078+ dir = self.path.find_dir(basedir)
11079+ if not dir: raise
11080+
11081+ jaropts.append('-C')
11082+ jaropts.append(dir.abspath(self.env))
11083+ jaropts.append('.')
11084+
11085+ out = self.path.find_or_declare(destfile)
11086+
11087+ tsk = self.create_task('jar_create')
11088+ tsk.set_outputs(out)
11089+ tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id]
11090+ tsk.env['JAROPTS'] = jaropts
11091+ tsk.env['JARCREATE'] = jarcreate
11092+
11093+@feature('javac')
11094+@before('apply_core')
11095+def apply_java(self):
11096+ Utils.def_attrs(self, jarname='', jaropts='', classpath='',
11097+ sourcepath='.', srcdir='.', source_re='**/*.java',
11098+ jar_mf_attributes={}, jar_mf_classpath=[])
11099+
11100+ if getattr(self, 'source_root', None):
11101+ # old stuff
11102+ self.srcdir = self.source_root
11103+
11104+
11105+ nodes_lst = []
11106+
11107+ if not self.classpath:
11108+ if not self.env['CLASSPATH']:
11109+ self.env['CLASSPATH'] = '..' + os.pathsep + '.'
11110+ else:
11111+ self.env['CLASSPATH'] = self.classpath
11112+
11113+ srcdir_node = self.path.find_dir(self.srcdir)
11114+ if not srcdir_node:
11115+ raise Utils.WafError('could not find srcdir %r' % self.srcdir)
11116+
11117+ src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)]
11118+ bld_nodes = [x.change_ext('.class') for x in src_nodes]
11119+
11120+ self.env['OUTDIR'] = [srcdir_node.bldpath(self.env)]
11121+
11122+ tsk = self.create_task('javac')
11123+ tsk.set_inputs(src_nodes)
11124+ tsk.set_outputs(bld_nodes)
11125+
11126+ if getattr(self, 'compat', None):
11127+ tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
11128+
11129+ if hasattr(self, 'sourcepath'):
11130+ fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
11131+ names = os.pathsep.join([x.srcpath() for x in fold])
11132+ else:
11133+ names = srcdir_node.srcpath()
11134+
11135+ if names:
11136+ tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
11137+
11138+ if self.jarname:
11139+ jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
11140+ jtsk.set_run_after(tsk)
11141+
11142+ if not self.env.JAROPTS:
11143+ if self.jaropts:
11144+ self.env.JAROPTS = self.jaropts
11145+ else:
11146+ dirs = '.'
11147+ self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
11148+
11149+Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN', shell=False)
11150+cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}', shell=False)
11151+cls.color = 'BLUE'
11152+def post_run_javac(self):
11153+ """this is for cleaning the folder
11154+ javac creates single files for inner classes
11155+ but it is not possible to know which inner classes in advance"""
11156+
11157+ par = {}
11158+ for x in self.inputs:
11159+ par[x.parent.id] = x.parent
11160+
11161+ inner = {}
11162+ for k in par.values():
11163+ path = k.abspath(self.env)
11164+ lst = os.listdir(path)
11165+
11166+ for u in lst:
11167+ if u.find('$') >= 0:
11168+ inner_class_node = k.find_or_declare(u)
11169+ inner[inner_class_node.id] = inner_class_node
11170+
11171+ to_add = set(inner.keys()) - set([x.id for x in self.outputs])
11172+ for x in to_add:
11173+ self.outputs.append(inner[x])
11174+
11175+ self.cached = True # disable the cache here - inner classes are a problem
11176+ return Task.Task.post_run(self)
11177+cls.post_run = post_run_javac
11178+
11179+def detect(conf):
11180+ # If JAVA_PATH is set, we prepend it to the path list
11181+ java_path = conf.environ['PATH'].split(os.pathsep)
11182+ v = conf.env
11183+
11184+ if 'JAVA_HOME' in conf.environ:
11185+ java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path
11186+ conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']]
11187+
11188+ for x in 'javac java jar'.split():
11189+ conf.find_program(x, var=x.upper(), path_list=java_path)
11190+ conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()])
11191+ v['JAVA_EXT'] = ['.java']
11192+
11193+ if 'CLASSPATH' in conf.environ:
11194+ v['CLASSPATH'] = conf.environ['CLASSPATH']
11195+
11196+ if not v['JAR']: conf.fatal('jar is required for making java packages')
11197+ if not v['JAVAC']: conf.fatal('javac is required for compiling java classes')
11198+ v['JARCREATE'] = 'cf' # can use cvf
11199+
11200+@conf
11201+def check_java_class(self, classname, with_classpath=None):
11202+ """Check if the specified java class is installed"""
11203+
11204+ import shutil
11205+
11206+ javatestdir = '.waf-javatest'
11207+
11208+ classpath = javatestdir
11209+ if self.env['CLASSPATH']:
11210+ classpath += os.pathsep + self.env['CLASSPATH']
11211+ if isinstance(with_classpath, str):
11212+ classpath += os.pathsep + with_classpath
11213+
11214+ shutil.rmtree(javatestdir, True)
11215+ os.mkdir(javatestdir)
11216+
11217+ java_file = open(os.path.join(javatestdir, 'Test.java'), 'w')
11218+ java_file.write(class_check_source)
11219+ java_file.close()
11220+
11221+ # Compile the source
11222+ Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
11223+
11224+ # Try to run the app
11225+ cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
11226+ self.log.write("%s\n" % str(cmd))
11227+ found = Utils.exec_command(cmd, shell=False, log=self.log)
11228+
11229+ self.check_message('Java class %s' % classname, "", not found)
11230+
11231+ shutil.rmtree(javatestdir, True)
11232+
11233+ return found
11234+
11235+@conf
11236+def check_jni_headers(conf):
11237+ """
11238+ Check for jni headers and libraries
11239+
11240+ On success the environment variable xxx_JAVA is added for uselib
11241+ """
11242+
11243+ if not conf.env.CC_NAME and not conf.env.CXX_NAME:
11244+ conf.fatal('load a compiler first (gcc, g++, ..)')
11245+
11246+ if not conf.env.JAVA_HOME:
11247+ conf.fatal('set JAVA_HOME in the system environment')
11248+
11249+ # jni requires the jvm
11250+ javaHome = conf.env['JAVA_HOME'][0]
11251+
11252+ b = Build.BuildContext()
11253+ b.load_dirs(conf.srcdir, conf.blddir)
11254+ dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
11255+ f = dir.ant_glob('**/(jni|jni_md).h', flat=False)
11256+ incDirs = [x.parent.abspath() for x in f]
11257+
11258+ dir = b.root.find_dir(conf.env.JAVA_HOME[0])
11259+ f = dir.ant_glob('**/*jvm.(so|dll)', flat=False)
11260+ libDirs = [x.parent.abspath() for x in f] or [javaHome]
11261+
11262+ for i, d in enumerate(libDirs):
11263+ if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
11264+ libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'):
11265+ break
11266+ else:
11267+ conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
11268+
11269diff --git a/buildtools/wafadmin/Tools/kde4.py b/buildtools/wafadmin/Tools/kde4.py
11270new file mode 100644
11271index 0000000..f480929
11272--- /dev/null
11273+++ b/buildtools/wafadmin/Tools/kde4.py
11274@@ -0,0 +1,74 @@
11275+#!/usr/bin/env python
11276+# encoding: utf-8
11277+# Thomas Nagy, 2006 (ita)
11278+
11279+import os, sys, re
11280+import Options, TaskGen, Task, Utils
11281+from TaskGen import taskgen, feature, after
11282+
11283+class msgfmt_taskgen(TaskGen.task_gen):
11284+ def __init__(self, *k, **kw):
11285+ TaskGen.task_gen.__init__(self, *k, **kw)
11286+
11287+@feature('msgfmt')
11288+def init_msgfmt(self):
11289+ #langs = '' # for example "foo/fr foo/br"
11290+ self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}'
11291+
11292+@feature('msgfmt')
11293+@after('init_msgfmt')
11294+def apply_msgfmt(self):
11295+ for lang in self.to_list(self.langs):
11296+ node = self.path.find_resource(lang+'.po')
11297+ task = self.create_task('msgfmt', node, node.change_ext('.mo'))
11298+
11299+ if not self.bld.is_install: continue
11300+ langname = lang.split('/')
11301+ langname = langname[-1]
11302+ task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES'
11303+ task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo'
11304+ task.chmod = self.chmod
11305+
11306+def detect(conf):
11307+ kdeconfig = conf.find_program('kde4-config')
11308+ if not kdeconfig:
11309+ conf.fatal('we need kde4-config')
11310+ prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip()
11311+ file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
11312+ try: os.stat(file)
11313+ except OSError:
11314+ file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
11315+ try: os.stat(file)
11316+ except OSError: conf.fatal('could not open %s' % file)
11317+
11318+ try:
11319+ txt = Utils.readf(file)
11320+ except (OSError, IOError):
11321+ conf.fatal('could not read %s' % file)
11322+
11323+ txt = txt.replace('\\\n', '\n')
11324+ fu = re.compile('#(.*)\n')
11325+ txt = fu.sub('', txt)
11326+
11327+ setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
11328+ found = setregexp.findall(txt)
11329+
11330+ for (_, key, val) in found:
11331+ #print key, val
11332+ conf.env[key] = val
11333+
11334+ # well well, i could just write an interpreter for cmake files
11335+ conf.env['LIB_KDECORE']='kdecore'
11336+ conf.env['LIB_KDEUI'] ='kdeui'
11337+ conf.env['LIB_KIO'] ='kio'
11338+ conf.env['LIB_KHTML'] ='khtml'
11339+ conf.env['LIB_KPARTS'] ='kparts'
11340+
11341+ conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR']
11342+ conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR']
11343+ conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
11344+
11345+ conf.env['MSGFMT'] = conf.find_program('msgfmt')
11346+
11347+Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False)
11348+
11349diff --git a/buildtools/wafadmin/Tools/libtool.py b/buildtools/wafadmin/Tools/libtool.py
11350new file mode 100644
11351index 0000000..47fa906
11352--- /dev/null
11353+++ b/buildtools/wafadmin/Tools/libtool.py
11354@@ -0,0 +1,330 @@
11355+#!/usr/bin/env python
11356+# encoding: utf-8
11357+# Matthias Jahn, 2008, jahn matthias ath freenet punto de
11358+# Thomas Nagy, 2008 (ita)
11359+
11360+import sys, re, os, optparse
11361+
11362+import TaskGen, Task, Utils, preproc
11363+from Logs import error, debug, warn
11364+from TaskGen import taskgen, after, before, feature
11365+
11366+REVISION="0.1.3"
11367+
11368+"""
11369+if you want to use the code here, you must use something like this:
11370+obj = obj.create(...)
11371+obj.features.append("libtool")
11372+obj.vnum = "1.2.3" # optional, but versioned libraries are common
11373+"""
11374+
11375+# fake libtool files
11376+fakelibtool_vardeps = ['CXX', 'PREFIX']
11377+def fakelibtool_build(task):
11378+ # Writes a .la file, used by libtool
11379+ env = task.env
11380+ dest = open(task.outputs[0].abspath(env), 'w')
11381+ sname = task.inputs[0].name
11382+ fu = dest.write
11383+ fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
11384+ if env['vnum']:
11385+ nums = env['vnum'].split('.')
11386+ libname = task.inputs[0].name
11387+ name3 = libname+'.'+env['vnum']
11388+ name2 = libname+'.'+nums[0]
11389+ name1 = libname
11390+ fu("dlname='%s'\n" % name2)
11391+ strn = " ".join([name3, name2, name1])
11392+ fu("library_names='%s'\n" % (strn) )
11393+ else:
11394+ fu("dlname='%s'\n" % sname)
11395+ fu("library_names='%s %s %s'\n" % (sname, sname, sname) )
11396+ fu("old_library=''\n")
11397+ vars = ' '.join(env['libtoolvars']+env['LINKFLAGS'])
11398+ fu("dependency_libs='%s'\n" % vars)
11399+ fu("current=0\n")
11400+ fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
11401+ fu("dlopen=''\ndlpreopen=''\n")
11402+ fu("libdir='%s/lib'\n" % env['PREFIX'])
11403+ dest.close()
11404+ return 0
11405+
11406+def read_la_file(path):
11407+ sp = re.compile(r'^([^=]+)=\'(.*)\'$')
11408+ dc={}
11409+ file = open(path, "r")
11410+ for line in file.readlines():
11411+ try:
11412+ #print sp.split(line.strip())
11413+ _, left, right, _ = sp.split(line.strip())
11414+ dc[left]=right
11415+ except ValueError:
11416+ pass
11417+ file.close()
11418+ return dc
11419+
11420+@feature("libtool")
11421+@after('apply_link')
11422+def apply_link_libtool(self):
11423+ if self.type != 'program':
11424+ linktask = self.link_task
11425+ self.latask = self.create_task('fakelibtool', linktask.outputs, linktask.outputs[0].change_ext('.la'))
11426+
11427+ if self.bld.is_install:
11428+ self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env)
11429+
11430+@feature("libtool")
11431+@before('apply_core')
11432+def apply_libtool(self):
11433+ self.env['vnum']=self.vnum
11434+
11435+ paths=[]
11436+ libs=[]
11437+ libtool_files=[]
11438+ libtool_vars=[]
11439+
11440+ for l in self.env['LINKFLAGS']:
11441+ if l[:2]=='-L':
11442+ paths.append(l[2:])
11443+ elif l[:2]=='-l':
11444+ libs.append(l[2:])
11445+
11446+ for l in libs:
11447+ for p in paths:
11448+ dict = read_la_file(p+'/lib'+l+'.la')
11449+ linkflags2 = dict.get('dependency_libs', '')
11450+ for v in linkflags2.split():
11451+ if v.endswith('.la'):
11452+ libtool_files.append(v)
11453+ libtool_vars.append(v)
11454+ continue
11455+ self.env.append_unique('LINKFLAGS', v)
11456+ break
11457+
11458+ self.env['libtoolvars']=libtool_vars
11459+
11460+ while libtool_files:
11461+ file = libtool_files.pop()
11462+ dict = read_la_file(file)
11463+ for v in dict['dependency_libs'].split():
11464+ if v[-3:] == '.la':
11465+ libtool_files.append(v)
11466+ continue
11467+ self.env.append_unique('LINKFLAGS', v)
11468+
11469+Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link")
11470+
11471+class libtool_la_file:
11472+ def __init__ (self, la_filename):
11473+ self.__la_filename = la_filename
11474+ #remove path and .la suffix
11475+ self.linkname = str(os.path.split(la_filename)[-1])[:-3]
11476+ if self.linkname.startswith("lib"):
11477+ self.linkname = self.linkname[3:]
11478+ # The name that we can dlopen(3).
11479+ self.dlname = None
11480+ # Names of this library
11481+ self.library_names = None
11482+ # The name of the static archive.
11483+ self.old_library = None
11484+ # Libraries that this one depends upon.
11485+ self.dependency_libs = None
11486+ # Version information for libIlmImf.
11487+ self.current = None
11488+ self.age = None
11489+ self.revision = None
11490+ # Is this an already installed library?
11491+ self.installed = None
11492+ # Should we warn about portability when linking against -modules?
11493+ self.shouldnotlink = None
11494+ # Files to dlopen/dlpreopen
11495+ self.dlopen = None
11496+ self.dlpreopen = None
11497+ # Directory that this library needs to be installed in:
11498+ self.libdir = '/usr/lib'
11499+ if not self.__parse():
11500+ raise ValueError("file %s not found!!" %(la_filename))
11501+
11502+ def __parse(self):
11503+ "Retrieve the variables from a file"
11504+ if not os.path.isfile(self.__la_filename): return 0
11505+ la_file=open(self.__la_filename, 'r')
11506+ for line in la_file:
11507+ ln = line.strip()
11508+ if not ln: continue
11509+ if ln[0]=='#': continue
11510+ (key, value) = str(ln).split('=', 1)
11511+ key = key.strip()
11512+ value = value.strip()
11513+ if value == "no": value = False
11514+ elif value == "yes": value = True
11515+ else:
11516+ try: value = int(value)
11517+ except ValueError: value = value.strip("'")
11518+ setattr(self, key, value)
11519+ la_file.close()
11520+ return 1
11521+
11522+ def get_libs(self):
11523+ """return linkflags for this lib"""
11524+ libs = []
11525+ if self.dependency_libs:
11526+ libs = str(self.dependency_libs).strip().split()
11527+ if libs == None:
11528+ libs = []
11529+ # add la lib and libdir
11530+ libs.insert(0, "-l%s" % self.linkname.strip())
11531+ libs.insert(0, "-L%s" % self.libdir.strip())
11532+ return libs
11533+
11534+ def __str__(self):
11535+ return '''\
11536+dlname = "%(dlname)s"
11537+library_names = "%(library_names)s"
11538+old_library = "%(old_library)s"
11539+dependency_libs = "%(dependency_libs)s"
11540+version = %(current)s.%(age)s.%(revision)s
11541+installed = "%(installed)s"
11542+shouldnotlink = "%(shouldnotlink)s"
11543+dlopen = "%(dlopen)s"
11544+dlpreopen = "%(dlpreopen)s"
11545+libdir = "%(libdir)s"''' % self.__dict__
11546+
11547+class libtool_config:
11548+ def __init__ (self, la_filename):
11549+ self.__libtool_la_file = libtool_la_file(la_filename)
11550+ tmp = self.__libtool_la_file
11551+ self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)]
11552+ self.__sub_la_files = []
11553+ self.__sub_la_files.append(la_filename)
11554+ self.__libs = None
11555+
11556+ def __cmp__(self, other):
11557+ """make it compareable with X.Y.Z versions (Y and Z are optional)"""
11558+ if not other:
11559+ return 1
11560+ othervers = [int(s) for s in str(other).split(".")]
11561+ selfvers = self.__version
11562+ return cmp(selfvers, othervers)
11563+
11564+ def __str__(self):
11565+ return "\n".join([
11566+ str(self.__libtool_la_file),
11567+ ' '.join(self.__libtool_la_file.get_libs()),
11568+ '* New getlibs:',
11569+ ' '.join(self.get_libs())
11570+ ])
11571+
11572+ def __get_la_libs(self, la_filename):
11573+ return libtool_la_file(la_filename).get_libs()
11574+
11575+ def get_libs(self):
11576+ """return the complete uniqe linkflags that do not
11577+ contain .la files anymore"""
11578+ libs_list = list(self.__libtool_la_file.get_libs())
11579+ libs_map = {}
11580+ while len(libs_list) > 0:
11581+ entry = libs_list.pop(0)
11582+ if entry:
11583+ if str(entry).endswith(".la"):
11584+ ## prevents duplicate .la checks
11585+ if entry not in self.__sub_la_files:
11586+ self.__sub_la_files.append(entry)
11587+ libs_list.extend(self.__get_la_libs(entry))
11588+ else:
11589+ libs_map[entry]=1
11590+ self.__libs = libs_map.keys()
11591+ return self.__libs
11592+
11593+ def get_libs_only_L(self):
11594+ if not self.__libs: self.get_libs()
11595+ libs = self.__libs
11596+ libs = [s for s in libs if str(s).startswith('-L')]
11597+ return libs
11598+
11599+ def get_libs_only_l(self):
11600+ if not self.__libs: self.get_libs()
11601+ libs = self.__libs
11602+ libs = [s for s in libs if str(s).startswith('-l')]
11603+ return libs
11604+
11605+ def get_libs_only_other(self):
11606+ if not self.__libs: self.get_libs()
11607+ libs = self.__libs
11608+ libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
11609+ return libs
11610+
11611+def useCmdLine():
11612+ """parse cmdline args and control build"""
11613+ usage = '''Usage: %prog [options] PathToFile.la
11614+example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
11615+nor: %prog --libs /usr/lib/libamarok.la'''
11616+ parser = optparse.OptionParser(usage)
11617+ a = parser.add_option
11618+ a("--version", dest = "versionNumber",
11619+ action = "store_true", default = False,
11620+ help = "output version of libtool-config"
11621+ )
11622+ a("--debug", dest = "debug",
11623+ action = "store_true", default = False,
11624+ help = "enable debug"
11625+ )
11626+ a("--libs", dest = "libs",
11627+ action = "store_true", default = False,
11628+ help = "output all linker flags"
11629+ )
11630+ a("--libs-only-l", dest = "libs_only_l",
11631+ action = "store_true", default = False,
11632+ help = "output -l flags"
11633+ )
11634+ a("--libs-only-L", dest = "libs_only_L",
11635+ action = "store_true", default = False,
11636+ help = "output -L flags"
11637+ )
11638+ a("--libs-only-other", dest = "libs_only_other",
11639+ action = "store_true", default = False,
11640+ help = "output other libs (e.g. -pthread)"
11641+ )
11642+ a("--atleast-version", dest = "atleast_version",
11643+ default=None,
11644+ help = "return 0 if the module is at least version ATLEAST_VERSION"
11645+ )
11646+ a("--exact-version", dest = "exact_version",
11647+ default=None,
11648+ help = "return 0 if the module is exactly version EXACT_VERSION"
11649+ )
11650+ a("--max-version", dest = "max_version",
11651+ default=None,
11652+ help = "return 0 if the module is at no newer than version MAX_VERSION"
11653+ )
11654+
11655+ (options, args) = parser.parse_args()
11656+ if len(args) != 1 and not options.versionNumber:
11657+ parser.error("incorrect number of arguments")
11658+ if options.versionNumber:
11659+ print("libtool-config version %s" % REVISION)
11660+ return 0
11661+ ltf = libtool_config(args[0])
11662+ if options.debug:
11663+ print(ltf)
11664+ if options.atleast_version:
11665+ if ltf >= options.atleast_version: return 0
11666+ sys.exit(1)
11667+ if options.exact_version:
11668+ if ltf == options.exact_version: return 0
11669+ sys.exit(1)
11670+ if options.max_version:
11671+ if ltf <= options.max_version: return 0
11672+ sys.exit(1)
11673+
11674+ def p(x):
11675+ print(" ".join(x))
11676+ if options.libs: p(ltf.get_libs())
11677+ elif options.libs_only_l: p(ltf.get_libs_only_l())
11678+ elif options.libs_only_L: p(ltf.get_libs_only_L())
11679+ elif options.libs_only_other: p(ltf.get_libs_only_other())
11680+ return 0
11681+
11682+if __name__ == '__main__':
11683+ useCmdLine()
11684+
11685diff --git a/buildtools/wafadmin/Tools/lua.py b/buildtools/wafadmin/Tools/lua.py
11686new file mode 100644
11687index 0000000..5b181e1
11688--- /dev/null
11689+++ b/buildtools/wafadmin/Tools/lua.py
11690@@ -0,0 +1,25 @@
11691+#!/usr/bin/env python
11692+# encoding: utf-8
11693+# Sebastian Schlingmann, 2008
11694+# Thomas Nagy, 2008 (ita)
11695+
11696+import TaskGen
11697+from TaskGen import taskgen, feature
11698+from Constants import *
11699+
11700+TaskGen.declare_chain(
11701+ name = 'luac',
11702+ rule = '${LUAC} -s -o ${TGT} ${SRC}',
11703+ ext_in = '.lua',
11704+ ext_out = '.luac',
11705+ reentrant = False,
11706+ install = 'LUADIR', # env variable
11707+)
11708+
11709+@feature('lua')
11710+def init_lua(self):
11711+ self.default_chmod = O755
11712+
11713+def detect(conf):
11714+ conf.find_program('luac', var='LUAC', mandatory = True)
11715+
11716diff --git a/buildtools/wafadmin/Tools/misc.py b/buildtools/wafadmin/Tools/misc.py
11717new file mode 100644
11718index 0000000..9903ee4
11719--- /dev/null
11720+++ b/buildtools/wafadmin/Tools/misc.py
11721@@ -0,0 +1,430 @@
11722+#!/usr/bin/env python
11723+# encoding: utf-8
11724+# Thomas Nagy, 2006 (ita)
11725+
11726+"""
11727+Custom objects:
11728+ - execute a function everytime
11729+ - copy a file somewhere else
11730+"""
11731+
11732+import shutil, re, os
11733+import TaskGen, Node, Task, Utils, Build, Constants
11734+from TaskGen import feature, taskgen, after, before
11735+from Logs import debug
11736+
11737+def copy_func(tsk):
11738+ "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
11739+ env = tsk.env
11740+ infile = tsk.inputs[0].abspath(env)
11741+ outfile = tsk.outputs[0].abspath(env)
11742+ try:
11743+ shutil.copy2(infile, outfile)
11744+ except (OSError, IOError):
11745+ return 1
11746+ else:
11747+ if tsk.chmod: os.chmod(outfile, tsk.chmod)
11748+ return 0
11749+
11750+def action_process_file_func(tsk):
11751+ "Ask the function attached to the task to process it"
11752+ if not tsk.fun: raise Utils.WafError('task must have a function attached to it for copy_func to work!')
11753+ return tsk.fun(tsk)
11754+
11755+class cmd_taskgen(TaskGen.task_gen):
11756+ def __init__(self, *k, **kw):
11757+ TaskGen.task_gen.__init__(self, *k, **kw)
11758+
11759+@feature('cmd')
11760+def apply_cmd(self):
11761+ "call a command everytime"
11762+ if not self.fun: raise Utils.WafError('cmdobj needs a function!')
11763+ tsk = Task.TaskBase()
11764+ tsk.fun = self.fun
11765+ tsk.env = self.env
11766+ self.tasks.append(tsk)
11767+ tsk.install_path = self.install_path
11768+
11769+class copy_taskgen(TaskGen.task_gen):
11770+ "By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)"
11771+ def __init__(self, *k, **kw):
11772+ TaskGen.task_gen.__init__(self, *k, **kw)
11773+
11774+@feature('copy')
11775+@before('apply_core')
11776+def apply_copy(self):
11777+ Utils.def_attrs(self, fun=copy_func)
11778+ self.default_install_path = 0
11779+
11780+ lst = self.to_list(self.source)
11781+ self.meths.remove('apply_core')
11782+
11783+ for filename in lst:
11784+ node = self.path.find_resource(filename)
11785+ if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
11786+
11787+ target = self.target
11788+ if not target or len(lst)>1: target = node.name
11789+
11790+ # TODO the file path may be incorrect
11791+ newnode = self.path.find_or_declare(target)
11792+
11793+ tsk = self.create_task('copy', node, newnode)
11794+ tsk.fun = self.fun
11795+ tsk.chmod = self.chmod
11796+ tsk.install_path = self.install_path
11797+
11798+ if not tsk.env:
11799+ tsk.debug()
11800+ raise Utils.WafError('task without an environment')
11801+
11802+def subst_func(tsk):
11803+ "Substitutes variables in a .in file"
11804+
11805+ m4_re = re.compile('@(\w+)@', re.M)
11806+
11807+ env = tsk.env
11808+ infile = tsk.inputs[0].abspath(env)
11809+ outfile = tsk.outputs[0].abspath(env)
11810+
11811+ code = Utils.readf(infile)
11812+
11813+ # replace all % by %% to prevent errors by % signs in the input file while string formatting
11814+ code = code.replace('%', '%%')
11815+
11816+ s = m4_re.sub(r'%(\1)s', code)
11817+
11818+ di = tsk.dict or {}
11819+ if not di:
11820+ names = m4_re.findall(code)
11821+ for i in names:
11822+ di[i] = env.get_flat(i) or env.get_flat(i.upper())
11823+
11824+ file = open(outfile, 'w')
11825+ file.write(s % di)
11826+ file.close()
11827+ if tsk.chmod: os.chmod(outfile, tsk.chmod)
11828+
11829+class subst_taskgen(TaskGen.task_gen):
11830+ def __init__(self, *k, **kw):
11831+ TaskGen.task_gen.__init__(self, *k, **kw)
11832+
11833+@feature('subst')
11834+@before('apply_core')
11835+def apply_subst(self):
11836+ Utils.def_attrs(self, fun=subst_func)
11837+ self.default_install_path = 0
11838+ lst = self.to_list(self.source)
11839+ self.meths.remove('apply_core')
11840+
11841+ self.dict = getattr(self, 'dict', {})
11842+
11843+ for filename in lst:
11844+ node = self.path.find_resource(filename)
11845+ if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
11846+
11847+ if self.target:
11848+ newnode = self.path.find_or_declare(self.target)
11849+ else:
11850+ newnode = node.change_ext('')
11851+
11852+ try:
11853+ self.dict = self.dict.get_merged_dict()
11854+ except AttributeError:
11855+ pass
11856+
11857+ if self.dict and not self.env['DICT_HASH']:
11858+ self.env = self.env.copy()
11859+ keys = list(self.dict.keys())
11860+ keys.sort()
11861+ lst = [self.dict[x] for x in keys]
11862+ self.env['DICT_HASH'] = str(Utils.h_list(lst))
11863+
11864+ tsk = self.create_task('copy', node, newnode)
11865+ tsk.fun = self.fun
11866+ tsk.dict = self.dict
11867+ tsk.dep_vars = ['DICT_HASH']
11868+ tsk.install_path = self.install_path
11869+ tsk.chmod = self.chmod
11870+
11871+ if not tsk.env:
11872+ tsk.debug()
11873+ raise Utils.WafError('task without an environment')
11874+
11875+####################
11876+## command-output ####
11877+####################
11878+
11879+class cmd_arg(object):
11880+ """command-output arguments for representing files or folders"""
11881+ def __init__(self, name, template='%s'):
11882+ self.name = name
11883+ self.template = template
11884+ self.node = None
11885+
11886+class input_file(cmd_arg):
11887+ def find_node(self, base_path):
11888+ assert isinstance(base_path, Node.Node)
11889+ self.node = base_path.find_resource(self.name)
11890+ if self.node is None:
11891+ raise Utils.WafError("Input file %s not found in " % (self.name, base_path))
11892+
11893+ def get_path(self, env, absolute):
11894+ if absolute:
11895+ return self.template % self.node.abspath(env)
11896+ else:
11897+ return self.template % self.node.srcpath(env)
11898+
11899+class output_file(cmd_arg):
11900+ def find_node(self, base_path):
11901+ assert isinstance(base_path, Node.Node)
11902+ self.node = base_path.find_or_declare(self.name)
11903+ if self.node is None:
11904+ raise Utils.WafError("Output file %s not found in " % (self.name, base_path))
11905+
11906+ def get_path(self, env, absolute):
11907+ if absolute:
11908+ return self.template % self.node.abspath(env)
11909+ else:
11910+ return self.template % self.node.bldpath(env)
11911+
11912+class cmd_dir_arg(cmd_arg):
11913+ def find_node(self, base_path):
11914+ assert isinstance(base_path, Node.Node)
11915+ self.node = base_path.find_dir(self.name)
11916+ if self.node is None:
11917+ raise Utils.WafError("Directory %s not found in " % (self.name, base_path))
11918+
11919+class input_dir(cmd_dir_arg):
11920+ def get_path(self, dummy_env, dummy_absolute):
11921+ return self.template % self.node.abspath()
11922+
11923+class output_dir(cmd_dir_arg):
11924+ def get_path(self, env, dummy_absolute):
11925+ return self.template % self.node.abspath(env)
11926+
11927+
11928+class command_output(Task.Task):
11929+ color = "BLUE"
11930+ def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
11931+ Task.Task.__init__(self, env, normal=1)
11932+ assert isinstance(command, (str, Node.Node))
11933+ self.command = command
11934+ self.command_args = command_args
11935+ self.stdin = stdin
11936+ self.stdout = stdout
11937+ self.cwd = cwd
11938+ self.os_env = os_env
11939+ self.stderr = stderr
11940+
11941+ if command_node is not None: self.dep_nodes = [command_node]
11942+ self.dep_vars = [] # additional environment variables to look
11943+
11944+ def run(self):
11945+ task = self
11946+ #assert len(task.inputs) > 0
11947+
11948+ def input_path(node, template):
11949+ if task.cwd is None:
11950+ return template % node.bldpath(task.env)
11951+ else:
11952+ return template % node.abspath()
11953+ def output_path(node, template):
11954+ fun = node.abspath
11955+ if task.cwd is None: fun = node.bldpath
11956+ return template % fun(task.env)
11957+
11958+ if isinstance(task.command, Node.Node):
11959+ argv = [input_path(task.command, '%s')]
11960+ else:
11961+ argv = [task.command]
11962+
11963+ for arg in task.command_args:
11964+ if isinstance(arg, str):
11965+ argv.append(arg)
11966+ else:
11967+ assert isinstance(arg, cmd_arg)
11968+ argv.append(arg.get_path(task.env, (task.cwd is not None)))
11969+
11970+ if task.stdin:
11971+ stdin = open(input_path(task.stdin, '%s'))
11972+ else:
11973+ stdin = None
11974+
11975+ if task.stdout:
11976+ stdout = open(output_path(task.stdout, '%s'), "w")
11977+ else:
11978+ stdout = None
11979+
11980+ if task.stderr:
11981+ stderr = open(output_path(task.stderr, '%s'), "w")
11982+ else:
11983+ stderr = None
11984+
11985+ if task.cwd is None:
11986+ cwd = ('None (actually %r)' % os.getcwd())
11987+ else:
11988+ cwd = repr(task.cwd)
11989+ debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
11990+ (cwd, stdin, stdout, argv))
11991+
11992+ if task.os_env is None:
11993+ os_env = os.environ
11994+ else:
11995+ os_env = task.os_env
11996+ command = Utils.pproc.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
11997+ return command.wait()
11998+
11999+class cmd_output_taskgen(TaskGen.task_gen):
12000+ def __init__(self, *k, **kw):
12001+ TaskGen.task_gen.__init__(self, *k, **kw)
12002+
12003+@feature('command-output')
12004+def init_cmd_output(self):
12005+ Utils.def_attrs(self,
12006+ stdin = None,
12007+ stdout = None,
12008+ stderr = None,
12009+ # the command to execute
12010+ command = None,
12011+
12012+ # whether it is an external command; otherwise it is assumed
12013+ # to be an executable binary or script that lives in the
12014+ # source or build tree.
12015+ command_is_external = False,
12016+
12017+ # extra parameters (argv) to pass to the command (excluding
12018+ # the command itself)
12019+ argv = [],
12020+
12021+ # dependencies to other objects -> this is probably not what you want (ita)
12022+ # values must be 'task_gen' instances (not names!)
12023+ dependencies = [],
12024+
12025+ # dependencies on env variable contents
12026+ dep_vars = [],
12027+
12028+ # input files that are implicit, i.e. they are not
12029+ # stdin, nor are they mentioned explicitly in argv
12030+ hidden_inputs = [],
12031+
12032+ # output files that are implicit, i.e. they are not
12033+ # stdout, nor are they mentioned explicitly in argv
12034+ hidden_outputs = [],
12035+
12036+ # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
12037+ cwd = None,
12038+
12039+ # OS environment variables to pass to the subprocess
12040+ # if None, use the default environment variables unchanged
12041+ os_env = None)
12042+
12043+@feature('command-output')
12044+@after('init_cmd_output')
12045+def apply_cmd_output(self):
12046+ if self.command is None:
12047+ raise Utils.WafError("command-output missing command")
12048+ if self.command_is_external:
12049+ cmd = self.command
12050+ cmd_node = None
12051+ else:
12052+ cmd_node = self.path.find_resource(self.command)
12053+ assert cmd_node is not None, ('''Could not find command '%s' in source tree.
12054+Hint: if this is an external command,
12055+use command_is_external=True''') % (self.command,)
12056+ cmd = cmd_node
12057+
12058+ if self.cwd is None:
12059+ cwd = None
12060+ else:
12061+ assert isinstance(cwd, CmdDirArg)
12062+ self.cwd.find_node(self.path)
12063+
12064+ args = []
12065+ inputs = []
12066+ outputs = []
12067+
12068+ for arg in self.argv:
12069+ if isinstance(arg, cmd_arg):
12070+ arg.find_node(self.path)
12071+ if isinstance(arg, input_file):
12072+ inputs.append(arg.node)
12073+ if isinstance(arg, output_file):
12074+ outputs.append(arg.node)
12075+
12076+ if self.stdout is None:
12077+ stdout = None
12078+ else:
12079+ assert isinstance(self.stdout, str)
12080+ stdout = self.path.find_or_declare(self.stdout)
12081+ if stdout is None:
12082+ raise Utils.WafError("File %s not found" % (self.stdout,))
12083+ outputs.append(stdout)
12084+
12085+ if self.stderr is None:
12086+ stderr = None
12087+ else:
12088+ assert isinstance(self.stderr, str)
12089+ stderr = self.path.find_or_declare(self.stderr)
12090+ if stderr is None:
12091+ raise Utils.WafError("File %s not found" % (self.stderr,))
12092+ outputs.append(stderr)
12093+
12094+ if self.stdin is None:
12095+ stdin = None
12096+ else:
12097+ assert isinstance(self.stdin, str)
12098+ stdin = self.path.find_resource(self.stdin)
12099+ if stdin is None:
12100+ raise Utils.WafError("File %s not found" % (self.stdin,))
12101+ inputs.append(stdin)
12102+
12103+ for hidden_input in self.to_list(self.hidden_inputs):
12104+ node = self.path.find_resource(hidden_input)
12105+ if node is None:
12106+ raise Utils.WafError("File %s not found in dir %s" % (hidden_input, self.path))
12107+ inputs.append(node)
12108+
12109+ for hidden_output in self.to_list(self.hidden_outputs):
12110+ node = self.path.find_or_declare(hidden_output)
12111+ if node is None:
12112+ raise Utils.WafError("File %s not found in dir %s" % (hidden_output, self.path))
12113+ outputs.append(node)
12114+
12115+ if not (inputs or getattr(self, 'no_inputs', None)):
12116+ raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
12117+ if not (outputs or getattr(self, 'no_outputs', None)):
12118+ raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
12119+
12120+ task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
12121+ Utils.copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
12122+ self.tasks.append(task)
12123+
12124+ task.inputs = inputs
12125+ task.outputs = outputs
12126+ task.dep_vars = self.to_list(self.dep_vars)
12127+
12128+ for dep in self.dependencies:
12129+ assert dep is not self
12130+ dep.post()
12131+ for dep_task in dep.tasks:
12132+ task.set_run_after(dep_task)
12133+
12134+ if not task.inputs:
12135+ # the case for svnversion, always run, and update the output nodes
12136+ task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
12137+ task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
12138+
12139+ # TODO the case with no outputs?
12140+
12141+def post_run(self):
12142+ for x in self.outputs:
12143+ h = Utils.h_file(x.abspath(self.env))
12144+ self.generator.bld.node_sigs[self.env.variant()][x.id] = h
12145+
12146+def runnable_status(self):
12147+ return Constants.RUN_ME
12148+
12149+Task.task_type_from_func('copy', vars=[], func=action_process_file_func)
12150+TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen
12151+
12152diff --git a/buildtools/wafadmin/Tools/msvc.py b/buildtools/wafadmin/Tools/msvc.py
12153new file mode 100644
12154index 0000000..4fde8b1
12155--- /dev/null
12156+++ b/buildtools/wafadmin/Tools/msvc.py
12157@@ -0,0 +1,797 @@
12158+#!/usr/bin/env python
12159+# encoding: utf-8
12160+# Carlos Rafael Giani, 2006 (dv)
12161+# Tamas Pal, 2007 (folti)
12162+# Nicolas Mercier, 2009
12163+# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing
12164+
12165+# usage:
12166+#
12167+# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
12168+# conf.env['MSVC_TARGETS'] = ['x64']
12169+# conf.check_tool('msvc')
12170+# OR conf.check_tool('msvc', funs='no_autodetect')
12171+# conf.check_lib_msvc('gdi32')
12172+# conf.check_libs_msvc('kernel32 user32', mandatory=true)
12173+# ...
12174+# obj.uselib = 'KERNEL32 USER32 GDI32'
12175+#
12176+# platforms and targets will be tested in the order they appear;
12177+# the first good configuration will be used
12178+# supported platforms :
12179+# ia64, x64, x86, x86_amd64, x86_ia64
12180+
12181+# compilers supported :
12182+# msvc => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008)
12183+# wsdk => Windows SDK, versions 6.0, 6.1, 7.0
12184+# icl => Intel compiler, versions 9,10,11
12185+# Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
12186+# PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i)
12187+
12188+
12189+import os, sys, re, string, optparse
12190+import Utils, TaskGen, Runner, Configure, Task, Options
12191+from Logs import debug, info, warn, error
12192+from TaskGen import after, before, feature
12193+
12194+from Configure import conftest, conf
12195+import ccroot, cc, cxx, ar, winres
12196+from libtool import read_la_file
12197+
12198+try:
12199+ import _winreg
12200+except:
12201+ import winreg as _winreg
12202+
12203+pproc = Utils.pproc
12204+
12205+# importlibs provided by MSVC/Platform SDK. Do NOT search them....
12206+g_msvc_systemlibs = """
12207+aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
12208+cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
12209+credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
12210+ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
12211+faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
12212+gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
12213+kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
12214+mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
12215+msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
12216+netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
12217+odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
12218+osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
12219+ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
12220+rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
12221+shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
12222+traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
12223+version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
12224+wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
12225+""".split()
12226+
12227+
12228+all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ]
12229+all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
12230+all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
12231+
12232+def setup_msvc(conf, versions):
12233+ platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
12234+ desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
12235+ versiondict = dict(versions)
12236+
12237+ for version in desired_versions:
12238+ try:
12239+ targets = dict(versiondict [version])
12240+ for target in platforms:
12241+ try:
12242+ arch,(p1,p2,p3) = targets[target]
12243+ compiler,revision = version.split()
12244+ return compiler,revision,p1,p2,p3
12245+ except KeyError: continue
12246+ except KeyError: continue
12247+ conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
12248+
12249+@conf
12250+def get_msvc_version(conf, compiler, version, target, vcvars):
12251+ debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
12252+ batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
12253+ f = open(batfile, 'w')
12254+ f.write("""@echo off
12255+set INCLUDE=
12256+set LIB=
12257+call "%s" %s
12258+echo PATH=%%PATH%%
12259+echo INCLUDE=%%INCLUDE%%
12260+echo LIB=%%LIB%%
12261+""" % (vcvars,target))
12262+ f.close()
12263+ sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
12264+ lines = sout.splitlines()
12265+
12266+ for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'):
12267+ if lines[0].find(x) != -1:
12268+ break
12269+ else:
12270+ debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
12271+ conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
12272+
12273+ for line in lines[1:]:
12274+ if line.startswith('PATH='):
12275+ path = line[5:]
12276+ MSVC_PATH = path.split(';')
12277+ elif line.startswith('INCLUDE='):
12278+ MSVC_INCDIR = [i for i in line[8:].split(';') if i]
12279+ elif line.startswith('LIB='):
12280+ MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
12281+
12282+ # Check if the compiler is usable at all.
12283+ # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
12284+ env = {}
12285+ env.update(os.environ)
12286+ env.update(PATH = path)
12287+ compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
12288+ cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
12289+ # delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
12290+ if env.has_key('CL'):
12291+ del(env['CL'])
12292+
12293+ try:
12294+ p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE)
12295+ out, err = p.communicate()
12296+ if p.returncode != 0:
12297+ raise Exception('return code: %r: %r' % (p.returncode, err))
12298+ except Exception, e:
12299+ debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
12300+ debug(str(e))
12301+ conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
12302+ else:
12303+ debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
12304+
12305+ return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
12306+
12307+@conf
12308+def gather_wsdk_versions(conf, versions):
12309+ version_pattern = re.compile('^v..?.?\...?.?')
12310+ try:
12311+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
12312+ except WindowsError:
12313+ try:
12314+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
12315+ except WindowsError:
12316+ return
12317+ index = 0
12318+ while 1:
12319+ try:
12320+ version = _winreg.EnumKey(all_versions, index)
12321+ except WindowsError:
12322+ break
12323+ index = index + 1
12324+ if not version_pattern.match(version):
12325+ continue
12326+ try:
12327+ msvc_version = _winreg.OpenKey(all_versions, version)
12328+ path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder')
12329+ except WindowsError:
12330+ continue
12331+ if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
12332+ targets = []
12333+ for target,arch in all_msvc_platforms:
12334+ try:
12335+ targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
12336+ except Configure.ConfigurationError:
12337+ pass
12338+ versions.append(('wsdk ' + version[1:], targets))
12339+
12340+@conf
12341+def gather_msvc_versions(conf, versions):
12342+ # checks SmartPhones SDKs
12343+ try:
12344+ ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
12345+ except WindowsError:
12346+ try:
12347+ ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
12348+ except WindowsError:
12349+ ce_sdk = ''
12350+ if ce_sdk:
12351+ supported_wince_platforms = []
12352+ ce_index = 0
12353+ while 1:
12354+ try:
12355+ sdk_device = _winreg.EnumKey(ce_sdk, ce_index)
12356+ except WindowsError:
12357+ break
12358+ ce_index = ce_index + 1
12359+ sdk = _winreg.OpenKey(ce_sdk, sdk_device)
12360+ path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir')
12361+ path=str(path)
12362+ path,device = os.path.split(path)
12363+ if not device:
12364+ path,device = os.path.split(path)
12365+ for arch,compiler in all_wince_platforms:
12366+ platforms = []
12367+ if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
12368+ platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
12369+ if platforms:
12370+ supported_wince_platforms.append((device, platforms))
12371+ # checks MSVC
12372+ version_pattern = re.compile('^..?\...?')
12373+ for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]:
12374+ try:
12375+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
12376+ except WindowsError:
12377+ try:
12378+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver)
12379+ except WindowsError:
12380+ continue
12381+ index = 0
12382+ while 1:
12383+ try:
12384+ version = _winreg.EnumKey(all_versions, index)
12385+ except WindowsError:
12386+ break
12387+ index = index + 1
12388+ if not version_pattern.match(version):
12389+ continue
12390+ try:
12391+ msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS")
12392+ path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir')
12393+ path=str(path)
12394+ targets = []
12395+ if ce_sdk:
12396+ for device,platforms in supported_wince_platforms:
12397+ cetargets = []
12398+ for platform,compiler,include,lib in platforms:
12399+ winCEpath = os.path.join(path, 'VC', 'ce')
12400+ if os.path.isdir(winCEpath):
12401+ common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))
12402+ if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
12403+ bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs
12404+ incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')]
12405+ libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)]
12406+ cetargets.append((platform, (platform, (bindirs,incdirs,libdirs))))
12407+ versions.append((device+' '+version, cetargets))
12408+ if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')):
12409+ for target,realtarget in all_msvc_platforms[::-1]:
12410+ try:
12411+ targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat')))))
12412+ except:
12413+ pass
12414+ elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')):
12415+ try:
12416+ targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')))))
12417+ except Configure.ConfigurationError:
12418+ pass
12419+ versions.append(('msvc '+version, targets))
12420+
12421+ except WindowsError:
12422+ continue
12423+
12424+@conf
12425+def gather_icl_versions(conf, versions):
12426+ version_pattern = re.compile('^...?.?\....?.?')
12427+ try:
12428+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
12429+ except WindowsError:
12430+ try:
12431+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
12432+ except WindowsError:
12433+ return
12434+ index = 0
12435+ while 1:
12436+ try:
12437+ version = _winreg.EnumKey(all_versions, index)
12438+ except WindowsError:
12439+ break
12440+ index = index + 1
12441+ if not version_pattern.match(version):
12442+ continue
12443+ targets = []
12444+ for target,arch in all_icl_platforms:
12445+ try:
12446+ icl_version = _winreg.OpenKey(all_versions, version+'\\'+target)
12447+ path,type = _winreg.QueryValueEx(icl_version,'ProductDir')
12448+ if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')):
12449+ try:
12450+ targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat')))))
12451+ except Configure.ConfigurationError:
12452+ pass
12453+ except WindowsError:
12454+ continue
12455+ major = version[0:2]
12456+ versions.append(('intel ' + major, targets))
12457+
12458+@conf
12459+def get_msvc_versions(conf):
12460+ if not conf.env.MSVC_INSTALLED_VERSIONS:
12461+ lst = []
12462+ conf.gather_msvc_versions(lst)
12463+ conf.gather_wsdk_versions(lst)
12464+ conf.gather_icl_versions(lst)
12465+ conf.env.MSVC_INSTALLED_VERSIONS = lst
12466+ return conf.env.MSVC_INSTALLED_VERSIONS
12467+
12468+@conf
12469+def print_all_msvc_detected(conf):
12470+ for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
12471+ info(version)
12472+ for target,l in targets:
12473+ info("\t"+target)
12474+
12475+def detect_msvc(conf):
12476+ versions = get_msvc_versions(conf)
12477+ return setup_msvc(conf, versions)
12478+
12479+@conf
12480+def find_lt_names_msvc(self, libname, is_static=False):
12481+ """
12482+ Win32/MSVC specific code to glean out information from libtool la files.
12483+ this function is not attached to the task_gen class
12484+ """
12485+ lt_names=[
12486+ 'lib%s.la' % libname,
12487+ '%s.la' % libname,
12488+ ]
12489+
12490+ for path in self.env['LIBPATH']:
12491+ for la in lt_names:
12492+ laf=os.path.join(path,la)
12493+ dll=None
12494+ if os.path.exists(laf):
12495+ ltdict=read_la_file(laf)
12496+ lt_libdir=None
12497+ if ltdict.get('libdir', ''):
12498+ lt_libdir = ltdict['libdir']
12499+ if not is_static and ltdict.get('library_names', ''):
12500+ dllnames=ltdict['library_names'].split()
12501+ dll=dllnames[0].lower()
12502+ dll=re.sub('\.dll$', '', dll)
12503+ return (lt_libdir, dll, False)
12504+ elif ltdict.get('old_library', ''):
12505+ olib=ltdict['old_library']
12506+ if os.path.exists(os.path.join(path,olib)):
12507+ return (path, olib, True)
12508+ elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
12509+ return (lt_libdir, olib, True)
12510+ else:
12511+ return (None, olib, True)
12512+ else:
12513+ raise Utils.WafError('invalid libtool object file: %s' % laf)
12514+ return (None, None, None)
12515+
12516+@conf
12517+def libname_msvc(self, libname, is_static=False, mandatory=False):
12518+ lib = libname.lower()
12519+ lib = re.sub('\.lib$','',lib)
12520+
12521+ if lib in g_msvc_systemlibs:
12522+ return lib
12523+
12524+ lib=re.sub('^lib','',lib)
12525+
12526+ if lib == 'm':
12527+ return None
12528+
12529+ (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
12530+
12531+ if lt_path != None and lt_libname != None:
12532+ if lt_static == True:
12533+ # file existance check has been made by find_lt_names
12534+ return os.path.join(lt_path,lt_libname)
12535+
12536+ if lt_path != None:
12537+ _libpaths=[lt_path] + self.env['LIBPATH']
12538+ else:
12539+ _libpaths=self.env['LIBPATH']
12540+
12541+ static_libs=[
12542+ 'lib%ss.lib' % lib,
12543+ 'lib%s.lib' % lib,
12544+ '%ss.lib' % lib,
12545+ '%s.lib' %lib,
12546+ ]
12547+
12548+ dynamic_libs=[
12549+ 'lib%s.dll.lib' % lib,
12550+ 'lib%s.dll.a' % lib,
12551+ '%s.dll.lib' % lib,
12552+ '%s.dll.a' % lib,
12553+ 'lib%s_d.lib' % lib,
12554+ '%s_d.lib' % lib,
12555+ '%s.lib' %lib,
12556+ ]
12557+
12558+ libnames=static_libs
12559+ if not is_static:
12560+ libnames=dynamic_libs + static_libs
12561+
12562+ for path in _libpaths:
12563+ for libn in libnames:
12564+ if os.path.exists(os.path.join(path, libn)):
12565+ debug('msvc: lib found: %s', os.path.join(path,libn))
12566+ return re.sub('\.lib$', '',libn)
12567+
12568+ #if no lib can be found, just return the libname as msvc expects it
12569+ if mandatory:
12570+ self.fatal("The library %r could not be found" % libname)
12571+ return re.sub('\.lib$', '', libname)
12572+
12573+@conf
12574+def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False):
12575+ "This is the api to use"
12576+ libn = self.libname_msvc(libname, is_static, mandatory)
12577+
12578+ if not uselib_store:
12579+ uselib_store = libname.upper()
12580+
12581+ # Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB,
12582+ # but we don't distinguish static libs from shared libs.
12583+ # This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER'])
12584+ if False and is_static: # disabled
12585+ self.env['STATICLIB_' + uselib_store] = [libn]
12586+ else:
12587+ self.env['LIB_' + uselib_store] = [libn]
12588+
12589+@conf
12590+def check_libs_msvc(self, libnames, is_static=False, mandatory=False):
12591+ for libname in Utils.to_list(libnames):
12592+ self.check_lib_msvc(libname, is_static, mandatory=mandatory)
12593+
12594+@conftest
12595+def no_autodetect(conf):
12596+ conf.eval_rules(detect.replace('autodetect', ''))
12597+
12598+
12599+detect = '''
12600+autodetect
12601+find_msvc
12602+msvc_common_flags
12603+cc_load_tools
12604+cxx_load_tools
12605+cc_add_flags
12606+cxx_add_flags
12607+link_add_flags
12608+'''
12609+
12610+@conftest
12611+def autodetect(conf):
12612+ v = conf.env
12613+ compiler, version, path, includes, libdirs = detect_msvc(conf)
12614+ v['PATH'] = path
12615+ v['CPPPATH'] = includes
12616+ v['LIBPATH'] = libdirs
12617+ v['MSVC_COMPILER'] = compiler
12618+
12619+def _get_prog_names(conf, compiler):
12620+ if compiler=='intel':
12621+ compiler_name = 'ICL'
12622+ linker_name = 'XILINK'
12623+ lib_name = 'XILIB'
12624+ else:
12625+ # assumes CL.exe
12626+ compiler_name = 'CL'
12627+ linker_name = 'LINK'
12628+ lib_name = 'LIB'
12629+ return compiler_name, linker_name, lib_name
12630+
12631+@conftest
12632+def find_msvc(conf):
12633+ # due to path format limitations, limit operation only to native Win32. Yeah it sucks.
12634+ if sys.platform != 'win32':
12635+ conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
12636+
12637+ v = conf.env
12638+
12639+ compiler, version, path, includes, libdirs = detect_msvc(conf)
12640+
12641+ compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
12642+ has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6) or (compiler == 'intel' and float(version) >= 11)
12643+
12644+ # compiler
12645+ cxx = None
12646+ if v.CXX: cxx = v.CXX
12647+ elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
12648+ if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
12649+ cxx = conf.cmd_to_list(cxx)
12650+
12651+ # before setting anything, check if the compiler is really msvc
12652+ env = dict(conf.environ)
12653+ env.update(PATH = ';'.join(path))
12654+ if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
12655+ conf.fatal('the msvc compiler could not be identified')
12656+
12657+ link = v.LINK_CXX
12658+ if not link:
12659+ link = conf.find_program(linker_name, path_list=path, mandatory=True)
12660+ ar = v.AR
12661+ if not ar:
12662+ ar = conf.find_program(lib_name, path_list=path, mandatory=True)
12663+
12664+ # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
12665+ mt = v.MT
12666+ if has_msvc_manifest:
12667+ mt = conf.find_program('MT', path_list=path, mandatory=True)
12668+
12669+ # no more possibility of failure means the data state will be consistent
12670+ # we may store the data safely now
12671+
12672+ v.MSVC_MANIFEST = has_msvc_manifest
12673+ v.PATH = path
12674+ v.CPPPATH = includes
12675+ v.LIBPATH = libdirs
12676+
12677+ # c/c++ compiler
12678+ v.CC = v.CXX = cxx
12679+ v.CC_NAME = v.CXX_NAME = 'msvc'
12680+
12681+ v.LINK = v.LINK_CXX = link
12682+ if not v.LINK_CC:
12683+ v.LINK_CC = v.LINK_CXX
12684+
12685+ v.AR = ar
12686+ v.MT = mt
12687+ v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
12688+
12689+
12690+ conf.check_tool('winres')
12691+
12692+ if not conf.env.WINRC:
12693+ warn('Resource compiler not found. Compiling resource file is disabled')
12694+
12695+ # environment flags
12696+ try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
12697+ except KeyError: pass
12698+ try: v.prepend_value('LIBPATH', conf.environ['LIB'])
12699+ except KeyError: pass
12700+
12701+@conftest
12702+def msvc_common_flags(conf):
12703+ v = conf.env
12704+
12705+ v['CPPFLAGS'] = ['/W3', '/nologo']
12706+
12707+ v['CCDEFINES_ST'] = '/D%s'
12708+ v['CXXDEFINES_ST'] = '/D%s'
12709+
12710+ # TODO just use _WIN32, which defined by the compiler itself!
12711+ v['CCDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
12712+ v['CXXDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
12713+
12714+ v['_CCINCFLAGS'] = []
12715+ v['_CCDEFFLAGS'] = []
12716+ v['_CXXINCFLAGS'] = []
12717+ v['_CXXDEFFLAGS'] = []
12718+
12719+ v['CC_SRC_F'] = ''
12720+ v['CC_TGT_F'] = ['/c', '/Fo']
12721+ v['CXX_SRC_F'] = ''
12722+ v['CXX_TGT_F'] = ['/c', '/Fo']
12723+
12724+ v['CPPPATH_ST'] = '/I%s' # template for adding include paths
12725+
12726+ v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
12727+
12728+ # Subsystem specific flags
12729+ v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE']
12730+ v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE']
12731+ v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX']
12732+ v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS']
12733+ v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE']
12734+
12735+ # CRT specific flags
12736+ v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT']
12737+ v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
12738+
12739+ # TODO these are defined by the compiler itself!
12740+ v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself!
12741+ v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself!
12742+
12743+ v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
12744+ v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
12745+
12746+ # TODO these are defined by the compiler itself!
12747+ v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself!
12748+ v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself!
12749+
12750+ # compiler debug levels
12751+ v['CCFLAGS'] = ['/TC']
12752+ v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
12753+ v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
12754+ v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
12755+ v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
12756+
12757+ v['CXXFLAGS'] = ['/TP', '/EHsc']
12758+ v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
12759+ v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
12760+
12761+ v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
12762+ v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
12763+
12764+ # linker
12765+ v['LIB'] = []
12766+
12767+ v['LIB_ST'] = '%s.lib' # template for adding libs
12768+ v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths
12769+ v['STATICLIB_ST'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
12770+ v['STATICLIBPATH_ST'] = '/LIBPATH:%s'
12771+
12772+ v['LINKFLAGS'] = ['/NOLOGO']
12773+ if v['MSVC_MANIFEST']:
12774+ v.append_value('LINKFLAGS', '/MANIFEST')
12775+ v['LINKFLAGS_DEBUG'] = ['/DEBUG']
12776+ v['LINKFLAGS_ULTRADEBUG'] = ['/DEBUG']
12777+
12778+ # shared library
12779+ v['shlib_CCFLAGS'] = ['']
12780+ v['shlib_CXXFLAGS'] = ['']
12781+ v['shlib_LINKFLAGS']= ['/DLL']
12782+ v['shlib_PATTERN'] = '%s.dll'
12783+ v['implib_PATTERN'] = '%s.lib'
12784+ v['IMPLIB_ST'] = '/IMPLIB:%s'
12785+
12786+ # static library
12787+ v['staticlib_LINKFLAGS'] = ['']
12788+ v['staticlib_PATTERN'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
12789+
12790+ # program
12791+ v['program_PATTERN'] = '%s.exe'
12792+
12793+
12794+#######################################################################################################
12795+##### conf above, build below
12796+
12797+@after('apply_link')
12798+@feature('cc', 'cxx')
12799+def apply_flags_msvc(self):
12800+ if self.env.CC_NAME != 'msvc' or not self.link_task:
12801+ return
12802+
12803+ subsystem = getattr(self, 'subsystem', '')
12804+ if subsystem:
12805+ subsystem = '/subsystem:%s' % subsystem
12806+ flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS'
12807+ self.env.append_value(flags, subsystem)
12808+
12809+ if getattr(self, 'link_task', None) and not 'cstaticlib' in self.features:
12810+ for f in self.env.LINKFLAGS:
12811+ d = f.lower()
12812+ if d[1:] == 'debug':
12813+ pdbnode = self.link_task.outputs[0].change_ext('.pdb')
12814+ pdbfile = pdbnode.bldpath(self.env)
12815+ self.link_task.outputs.append(pdbnode)
12816+ self.bld.install_files(self.install_path, [pdbnode], env=self.env)
12817+ break
12818+
12819+@feature('cprogram', 'cshlib', 'cstaticlib')
12820+@after('apply_lib_vars')
12821+@before('apply_obj_vars')
12822+def apply_obj_vars_msvc(self):
12823+ if self.env['CC_NAME'] != 'msvc':
12824+ return
12825+
12826+ try:
12827+ self.meths.remove('apply_obj_vars')
12828+ except ValueError:
12829+ pass
12830+
12831+ libpaths = getattr(self, 'libpaths', [])
12832+ if not libpaths: self.libpaths = libpaths
12833+
12834+ env = self.env
12835+ app = env.append_unique
12836+
12837+ cpppath_st = env['CPPPATH_ST']
12838+ lib_st = env['LIB_ST']
12839+ staticlib_st = env['STATICLIB_ST']
12840+ libpath_st = env['LIBPATH_ST']
12841+ staticlibpath_st = env['STATICLIBPATH_ST']
12842+
12843+ for i in env['LIBPATH']:
12844+ app('LINKFLAGS', libpath_st % i)
12845+ if not libpaths.count(i):
12846+ libpaths.append(i)
12847+
12848+ for i in env['LIBPATH']:
12849+ app('LINKFLAGS', staticlibpath_st % i)
12850+ if not libpaths.count(i):
12851+ libpaths.append(i)
12852+
12853+ # i doubt that anyone will make a fully static binary anyway
12854+ if not env['FULLSTATIC']:
12855+ if env['STATICLIB'] or env['LIB']:
12856+ app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work?
12857+
12858+ for i in env['STATICLIB']:
12859+ app('LINKFLAGS', staticlib_st % i)
12860+
12861+ for i in env['LIB']:
12862+ app('LINKFLAGS', lib_st % i)
12863+
12864+# split the manifest file processing from the link task, like for the rc processing
12865+
12866+@feature('cprogram', 'cshlib')
12867+@after('apply_link')
12868+def apply_manifest(self):
12869+ """Special linker for MSVC with support for embedding manifests into DLL's
12870+ and executables compiled by Visual Studio 2005 or probably later. Without
12871+ the manifest file, the binaries are unusable.
12872+ See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
12873+
12874+ if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
12875+ out_node = self.link_task.outputs[0]
12876+ man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
12877+ self.link_task.outputs.append(man_node)
12878+ self.link_task.do_manifest = True
12879+
12880+def exec_mf(self):
12881+ env = self.env
12882+ mtool = env['MT']
12883+ if not mtool:
12884+ return 0
12885+
12886+ self.do_manifest = False
12887+
12888+ outfile = self.outputs[0].bldpath(env)
12889+
12890+ manifest = None
12891+ for out_node in self.outputs:
12892+ if out_node.name.endswith('.manifest'):
12893+ manifest = out_node.bldpath(env)
12894+ break
12895+ if manifest is None:
12896+ # Should never get here. If we do, it means the manifest file was
12897+ # never added to the outputs list, thus we don't have a manifest file
12898+ # to embed, so we just return.
12899+ return 0
12900+
12901+ # embedding mode. Different for EXE's and DLL's.
12902+ # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
12903+ mode = ''
12904+ if 'cprogram' in self.generator.features:
12905+ mode = '1'
12906+ elif 'cshlib' in self.generator.features:
12907+ mode = '2'
12908+
12909+ debug('msvc: embedding manifest')
12910+ #flags = ' '.join(env['MTFLAGS'] or [])
12911+
12912+ lst = []
12913+ lst.extend([env['MT']])
12914+ lst.extend(Utils.to_list(env['MTFLAGS']))
12915+ lst.extend(Utils.to_list("-manifest"))
12916+ lst.extend(Utils.to_list(manifest))
12917+ lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
12918+
12919+ #cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
12920+ # manifest, outfile, mode)
12921+ lst = [lst]
12922+ return self.exec_command(*lst)
12923+
12924+########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
12925+
12926+def exec_command_msvc(self, *k, **kw):
12927+ "instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in"
12928+ if self.env['CC_NAME'] == 'msvc':
12929+ if isinstance(k[0], list):
12930+ lst = []
12931+ carry = ''
12932+ for a in k[0]:
12933+ if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':':
12934+ carry = a
12935+ else:
12936+ lst.append(carry + a)
12937+ carry = ''
12938+ k = [lst]
12939+
12940+ env = dict(os.environ)
12941+ env.update(PATH = ';'.join(self.env['PATH']))
12942+ kw['env'] = env
12943+
12944+ ret = self.generator.bld.exec_command(*k, **kw)
12945+ if ret: return ret
12946+ if getattr(self, 'do_manifest', None):
12947+ ret = exec_mf(self)
12948+ return ret
12949+
12950+for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
12951+ cls = Task.TaskBase.classes.get(k, None)
12952+ if cls:
12953+ cls.exec_command = exec_command_msvc
12954+
12955diff --git a/buildtools/wafadmin/Tools/nasm.py b/buildtools/wafadmin/Tools/nasm.py
12956new file mode 100644
12957index 0000000..b99c3c7
12958--- /dev/null
12959+++ b/buildtools/wafadmin/Tools/nasm.py
12960@@ -0,0 +1,49 @@
12961+#!/usr/bin/env python
12962+# encoding: utf-8
12963+# Thomas Nagy, 2008
12964+
12965+"""
12966+Nasm processing
12967+"""
12968+
12969+import os
12970+import TaskGen, Task, Utils
12971+from TaskGen import taskgen, before, extension
12972+
12973+nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
12974+
12975+EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
12976+
12977+@before('apply_link')
12978+def apply_nasm_vars(self):
12979+
12980+ # flags
12981+ if hasattr(self, 'nasm_flags'):
12982+ for flag in self.to_list(self.nasm_flags):
12983+ self.env.append_value('NASM_FLAGS', flag)
12984+
12985+ # includes - well, if we suppose it works with c processing
12986+ if hasattr(self, 'includes'):
12987+ for inc in self.to_list(self.includes):
12988+ node = self.path.find_dir(inc)
12989+ if not node:
12990+ raise Utils.WafError('cannot find the dir' + inc)
12991+ self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env))
12992+ self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env))
12993+
12994+@extension(EXT_NASM)
12995+def nasm_file(self, node):
12996+ try: obj_ext = self.obj_ext
12997+ except AttributeError: obj_ext = '_%d.o' % self.idx
12998+
12999+ task = self.create_task('nasm', node, node.change_ext(obj_ext))
13000+ self.compiled_tasks.append(task)
13001+
13002+ self.meths.append('apply_nasm_vars')
13003+
13004+# create our action here
13005+Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False)
13006+
13007+def detect(conf):
13008+ nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True)
13009+
13010diff --git a/buildtools/wafadmin/Tools/ocaml.py b/buildtools/wafadmin/Tools/ocaml.py
13011new file mode 100644
13012index 0000000..20c9269
13013--- /dev/null
13014+++ b/buildtools/wafadmin/Tools/ocaml.py
13015@@ -0,0 +1,298 @@
13016+#!/usr/bin/env python
13017+# encoding: utf-8
13018+# Thomas Nagy, 2006 (ita)
13019+
13020+"ocaml support"
13021+
13022+import os, re
13023+import TaskGen, Utils, Task, Build
13024+from Logs import error
13025+from TaskGen import taskgen, feature, before, after, extension
13026+
13027+EXT_MLL = ['.mll']
13028+EXT_MLY = ['.mly']
13029+EXT_MLI = ['.mli']
13030+EXT_MLC = ['.c']
13031+EXT_ML = ['.ml']
13032+
13033+open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
13034+foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
13035+def filter_comments(txt):
13036+ meh = [0]
13037+ def repl(m):
13038+ if m.group(1): meh[0] += 1
13039+ elif m.group(2): meh[0] -= 1
13040+ elif not meh[0]: return m.group(0)
13041+ return ''
13042+ return foo.sub(repl, txt)
13043+
13044+def scan(self):
13045+ node = self.inputs[0]
13046+ code = filter_comments(node.read(self.env))
13047+
13048+ global open_re
13049+ names = []
13050+ import_iterator = open_re.finditer(code)
13051+ if import_iterator:
13052+ for import_match in import_iterator:
13053+ names.append(import_match.group(1))
13054+ found_lst = []
13055+ raw_lst = []
13056+ for name in names:
13057+ nd = None
13058+ for x in self.incpaths:
13059+ nd = x.find_resource(name.lower()+'.ml')
13060+ if not nd: nd = x.find_resource(name+'.ml')
13061+ if nd:
13062+ found_lst.append(nd)
13063+ break
13064+ else:
13065+ raw_lst.append(name)
13066+
13067+ return (found_lst, raw_lst)
13068+
13069+native_lst=['native', 'all', 'c_object']
13070+bytecode_lst=['bytecode', 'all']
13071+class ocaml_taskgen(TaskGen.task_gen):
13072+ def __init__(self, *k, **kw):
13073+ TaskGen.task_gen.__init__(self, *k, **kw)
13074+
13075+@feature('ocaml')
13076+def init_ml(self):
13077+ Utils.def_attrs(self,
13078+ type = 'all',
13079+ incpaths_lst = [],
13080+ bld_incpaths_lst = [],
13081+ mlltasks = [],
13082+ mlytasks = [],
13083+ mlitasks = [],
13084+ native_tasks = [],
13085+ bytecode_tasks = [],
13086+ linktasks = [],
13087+ bytecode_env = None,
13088+ native_env = None,
13089+ compiled_tasks = [],
13090+ includes = '',
13091+ uselib = '',
13092+ are_deps_set = 0)
13093+
13094+@feature('ocaml')
13095+@after('init_ml')
13096+def init_envs_ml(self):
13097+
13098+ self.islibrary = getattr(self, 'islibrary', False)
13099+
13100+ global native_lst, bytecode_lst
13101+ self.native_env = None
13102+ if self.type in native_lst:
13103+ self.native_env = self.env.copy()
13104+ if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
13105+
13106+ self.bytecode_env = None
13107+ if self.type in bytecode_lst:
13108+ self.bytecode_env = self.env.copy()
13109+ if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
13110+
13111+ if self.type == 'c_object':
13112+ self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
13113+
13114+@feature('ocaml')
13115+@before('apply_vars_ml')
13116+@after('init_envs_ml')
13117+def apply_incpaths_ml(self):
13118+ inc_lst = self.includes.split()
13119+ lst = self.incpaths_lst
13120+ for dir in inc_lst:
13121+ node = self.path.find_dir(dir)
13122+ if not node:
13123+ error("node not found: " + str(dir))
13124+ continue
13125+ self.bld.rescan(node)
13126+ if not node in lst: lst.append(node)
13127+ self.bld_incpaths_lst.append(node)
13128+ # now the nodes are added to self.incpaths_lst
13129+
13130+@feature('ocaml')
13131+@before('apply_core')
13132+def apply_vars_ml(self):
13133+ for i in self.incpaths_lst:
13134+ if self.bytecode_env:
13135+ app = self.bytecode_env.append_value
13136+ app('OCAMLPATH', '-I')
13137+ app('OCAMLPATH', i.srcpath(self.env))
13138+ app('OCAMLPATH', '-I')
13139+ app('OCAMLPATH', i.bldpath(self.env))
13140+
13141+ if self.native_env:
13142+ app = self.native_env.append_value
13143+ app('OCAMLPATH', '-I')
13144+ app('OCAMLPATH', i.bldpath(self.env))
13145+ app('OCAMLPATH', '-I')
13146+ app('OCAMLPATH', i.srcpath(self.env))
13147+
13148+ varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
13149+ for name in self.uselib.split():
13150+ for vname in varnames:
13151+ cnt = self.env[vname+'_'+name]
13152+ if cnt:
13153+ if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
13154+ if self.native_env: self.native_env.append_value(vname, cnt)
13155+
13156+@feature('ocaml')
13157+@after('apply_core')
13158+def apply_link_ml(self):
13159+
13160+ if self.bytecode_env:
13161+ ext = self.islibrary and '.cma' or '.run'
13162+
13163+ linktask = self.create_task('ocalink')
13164+ linktask.bytecode = 1
13165+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
13166+ linktask.obj = self
13167+ linktask.env = self.bytecode_env
13168+ self.linktasks.append(linktask)
13169+
13170+ if self.native_env:
13171+ if self.type == 'c_object': ext = '.o'
13172+ elif self.islibrary: ext = '.cmxa'
13173+ else: ext = ''
13174+
13175+ linktask = self.create_task('ocalinkx')
13176+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
13177+ linktask.obj = self
13178+ linktask.env = self.native_env
13179+ self.linktasks.append(linktask)
13180+
13181+ # we produce a .o file to be used by gcc
13182+ self.compiled_tasks.append(linktask)
13183+
13184+@extension(EXT_MLL)
13185+def mll_hook(self, node):
13186+ mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'), env=self.native_env)
13187+ self.mlltasks.append(mll_task)
13188+
13189+ self.allnodes.append(mll_task.outputs[0])
13190+
13191+@extension(EXT_MLY)
13192+def mly_hook(self, node):
13193+ mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')], env=self.native_env)
13194+ self.mlytasks.append(mly_task)
13195+ self.allnodes.append(mly_task.outputs[0])
13196+
13197+ task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'), env=self.native_env)
13198+
13199+@extension(EXT_MLI)
13200+def mli_hook(self, node):
13201+ task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'), env=self.native_env)
13202+ self.mlitasks.append(task)
13203+
13204+@extension(EXT_MLC)
13205+def mlc_hook(self, node):
13206+ task = self.create_task('ocamlcc', node, node.change_ext('.o'), env=self.native_env)
13207+ self.compiled_tasks.append(task)
13208+
13209+@extension(EXT_ML)
13210+def ml_hook(self, node):
13211+ if self.native_env:
13212+ task = self.create_task('ocamlx', node, node.change_ext('.cmx'), env=self.native_env)
13213+ task.obj = self
13214+ task.incpaths = self.bld_incpaths_lst
13215+ self.native_tasks.append(task)
13216+
13217+ if self.bytecode_env:
13218+ task = self.create_task('ocaml', node, node.change_ext('.cmo'), env=self.bytecode_env)
13219+ task.obj = self
13220+ task.bytecode = 1
13221+ task.incpaths = self.bld_incpaths_lst
13222+ self.bytecode_tasks.append(task)
13223+
13224+def compile_may_start(self):
13225+ if not getattr(self, 'flag_deps', ''):
13226+ self.flag_deps = 1
13227+
13228+ # the evil part is that we can only compute the dependencies after the
13229+ # source files can be read (this means actually producing the source files)
13230+ if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks
13231+ else: alltasks = self.obj.native_tasks
13232+
13233+ self.signature() # ensure that files are scanned - unfortunately
13234+ tree = self.generator.bld
13235+ env = self.env
13236+ for node in self.inputs:
13237+ lst = tree.node_deps[self.unique_id()]
13238+ for depnode in lst:
13239+ for t in alltasks:
13240+ if t == self: continue
13241+ if depnode in t.inputs:
13242+ self.set_run_after(t)
13243+
13244+ # TODO necessary to get the signature right - for now
13245+ delattr(self, 'cache_sig')
13246+ self.signature()
13247+
13248+ return Task.Task.runnable_status(self)
13249+
13250+b = Task.simple_task_type
13251+cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
13252+cls.runnable_status = compile_may_start
13253+cls.scan = scan
13254+
13255+b = Task.simple_task_type
13256+cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
13257+cls.runnable_status = compile_may_start
13258+cls.scan = scan
13259+
13260+
13261+b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx")
13262+b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN')
13263+
13264+b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
13265+b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
13266+
13267+
13268+def link_may_start(self):
13269+ if not getattr(self, 'order', ''):
13270+
13271+ # now reorder the inputs given the task dependencies
13272+ if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks
13273+ else: alltasks = self.obj.native_tasks
13274+
13275+ # this part is difficult, we do not have a total order on the tasks
13276+ # if the dependencies are wrong, this may not stop
13277+ seen = []
13278+ pendant = []+alltasks
13279+ while pendant:
13280+ task = pendant.pop(0)
13281+ if task in seen: continue
13282+ for x in task.run_after:
13283+ if not x in seen:
13284+ pendant.append(task)
13285+ break
13286+ else:
13287+ seen.append(task)
13288+ self.inputs = [x.outputs[0] for x in seen]
13289+ self.order = 1
13290+ return Task.Task.runnable_status(self)
13291+
13292+act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc")
13293+act.runnable_status = link_may_start
13294+act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc")
13295+act.runnable_status = link_may_start
13296+
13297+def detect(conf):
13298+ opt = conf.find_program('ocamlopt', var='OCAMLOPT')
13299+ occ = conf.find_program('ocamlc', var='OCAMLC')
13300+ if (not opt) or (not occ):
13301+ conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
13302+
13303+ v = conf.env
13304+ v['OCAMLC'] = occ
13305+ v['OCAMLOPT'] = opt
13306+ v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX')
13307+ v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC')
13308+ v['OCAMLFLAGS'] = ''
13309+ v['OCAMLLIB'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13310+ v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13311+ v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13312+ v['LIB_OCAML'] = 'camlrun'
13313+
13314diff --git a/buildtools/wafadmin/Tools/osx.py b/buildtools/wafadmin/Tools/osx.py
13315new file mode 100644
13316index 0000000..561eca4
13317--- /dev/null
13318+++ b/buildtools/wafadmin/Tools/osx.py
13319@@ -0,0 +1,188 @@
13320+#!/usr/bin/env python
13321+# encoding: utf-8
13322+# Thomas Nagy 2008
13323+
13324+"""MacOSX related tools
13325+
13326+To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute
13327+ obj.mac_app = True
13328+
13329+To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute:
13330+ obj.mac_bundle = True
13331+"""
13332+
13333+import os, shutil, sys, platform
13334+import TaskGen, Task, Build, Options, Utils
13335+from TaskGen import taskgen, feature, after, before
13336+from Logs import error, debug
13337+
13338+# plist template
13339+app_info = '''
13340+<?xml version="1.0" encoding="UTF-8"?>
13341+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
13342+<plist version="0.9">
13343+<dict>
13344+ <key>CFBundlePackageType</key>
13345+ <string>APPL</string>
13346+ <key>CFBundleGetInfoString</key>
13347+ <string>Created by Waf</string>
13348+ <key>CFBundleSignature</key>
13349+ <string>????</string>
13350+ <key>NOTE</key>
13351+ <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
13352+ <key>CFBundleExecutable</key>
13353+ <string>%s</string>
13354+</dict>
13355+</plist>
13356+'''
13357+
13358+# see WAF issue 285
13359+# and also http://trac.macports.org/ticket/17059
13360+@feature('cc', 'cxx')
13361+@before('apply_lib_vars')
13362+def set_macosx_deployment_target(self):
13363+ if self.env['MACOSX_DEPLOYMENT_TARGET']:
13364+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
13365+ elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
13366+ if sys.platform == 'darwin':
13367+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
13368+
13369+@feature('cc', 'cxx')
13370+@after('apply_lib_vars')
13371+def apply_framework(self):
13372+ for x in self.to_list(self.env['FRAMEWORKPATH']):
13373+ frameworkpath_st = '-F%s'
13374+ self.env.append_unique('CXXFLAGS', frameworkpath_st % x)
13375+ self.env.append_unique('CCFLAGS', frameworkpath_st % x)
13376+ self.env.append_unique('LINKFLAGS', frameworkpath_st % x)
13377+
13378+ for x in self.to_list(self.env['FRAMEWORK']):
13379+ self.env.append_value('LINKFLAGS', ['-framework', x])
13380+
13381+@taskgen
13382+def create_bundle_dirs(self, name, out):
13383+ bld = self.bld
13384+ dir = out.parent.get_dir(name)
13385+
13386+ if not dir:
13387+ dir = out.__class__(name, out.parent, 1)
13388+ bld.rescan(dir)
13389+ contents = out.__class__('Contents', dir, 1)
13390+ bld.rescan(contents)
13391+ macos = out.__class__('MacOS', contents, 1)
13392+ bld.rescan(macos)
13393+ return dir
13394+
13395+def bundle_name_for_output(out):
13396+ name = out.name
13397+ k = name.rfind('.')
13398+ if k >= 0:
13399+ name = name[:k] + '.app'
13400+ else:
13401+ name = name + '.app'
13402+ return name
13403+
13404+@taskgen
13405+@after('apply_link')
13406+@feature('cprogram')
13407+def create_task_macapp(self):
13408+ """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
13409+ or use obj.mac_app = True to build specific targets as Mac apps"""
13410+ if self.env['MACAPP'] or getattr(self, 'mac_app', False):
13411+ apptask = self.create_task('macapp')
13412+ apptask.set_inputs(self.link_task.outputs)
13413+
13414+ out = self.link_task.outputs[0]
13415+
13416+ name = bundle_name_for_output(out)
13417+ dir = self.create_bundle_dirs(name, out)
13418+
13419+ n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
13420+
13421+ apptask.set_outputs([n1])
13422+ apptask.chmod = 0755
13423+ apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS')
13424+ self.apptask = apptask
13425+
13426+@after('apply_link')
13427+@feature('cprogram')
13428+def create_task_macplist(self):
13429+ """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
13430+ or use obj.mac_app = True to build specific targets as Mac apps"""
13431+ if self.env['MACAPP'] or getattr(self, 'mac_app', False):
13432+ # check if the user specified a plist before using our template
13433+ if not getattr(self, 'mac_plist', False):
13434+ self.mac_plist = app_info
13435+
13436+ plisttask = self.create_task('macplist')
13437+ plisttask.set_inputs(self.link_task.outputs)
13438+
13439+ out = self.link_task.outputs[0]
13440+ self.mac_plist = self.mac_plist % (out.name)
13441+
13442+ name = bundle_name_for_output(out)
13443+ dir = self.create_bundle_dirs(name, out)
13444+
13445+ n1 = dir.find_or_declare(['Contents', 'Info.plist'])
13446+
13447+ plisttask.set_outputs([n1])
13448+ plisttask.mac_plist = self.mac_plist
13449+ plisttask.install_path = os.path.join(self.install_path, name, 'Contents')
13450+ self.plisttask = plisttask
13451+
13452+@after('apply_link')
13453+@feature('cshlib')
13454+def apply_link_osx(self):
13455+ name = self.link_task.outputs[0].name
13456+ if not self.install_path:
13457+ return
13458+ if getattr(self, 'vnum', None):
13459+ name = name.replace('.dylib', '.%s.dylib' % self.vnum)
13460+
13461+ path = os.path.join(Utils.subst_vars(self.install_path, self.env), name)
13462+ if '-dynamiclib' in self.env['LINKFLAGS']:
13463+ self.env.append_value('LINKFLAGS', '-install_name')
13464+ self.env.append_value('LINKFLAGS', path)
13465+
13466+@before('apply_link', 'apply_lib_vars')
13467+@feature('cc', 'cxx')
13468+def apply_bundle(self):
13469+ """use env['MACBUNDLE'] to force all shlibs into mac bundles
13470+ or use obj.mac_bundle = True for specific targets only"""
13471+ if not ('cshlib' in self.features or 'shlib' in self.features): return
13472+ if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
13473+ self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN']
13474+ uselib = self.uselib = self.to_list(self.uselib)
13475+ if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE')
13476+
13477+@after('apply_link')
13478+@feature('cshlib')
13479+def apply_bundle_remove_dynamiclib(self):
13480+ if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
13481+ if not getattr(self, 'vnum', None):
13482+ try:
13483+ self.env['LINKFLAGS'].remove('-dynamiclib')
13484+ self.env['LINKFLAGS'].remove('-single_module')
13485+ except ValueError:
13486+ pass
13487+
13488+# TODO REMOVE IN 1.6 (global variable)
13489+app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
13490+
13491+def app_build(task):
13492+ env = task.env
13493+ shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env))
13494+
13495+ return 0
13496+
13497+def plist_build(task):
13498+ env = task.env
13499+ f = open(task.outputs[0].abspath(env), "w")
13500+ f.write(task.mac_plist)
13501+ f.close()
13502+
13503+ return 0
13504+
13505+Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link")
13506+Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link")
13507+
13508diff --git a/buildtools/wafadmin/Tools/perl.py b/buildtools/wafadmin/Tools/perl.py
13509new file mode 100644
13510index 0000000..a6787a8
13511--- /dev/null
13512+++ b/buildtools/wafadmin/Tools/perl.py
13513@@ -0,0 +1,109 @@
13514+#!/usr/bin/env python
13515+# encoding: utf-8
13516+# andersg at 0x63.nu 2007
13517+
13518+import os
13519+import Task, Options, Utils
13520+from Configure import conf
13521+from TaskGen import extension, taskgen, feature, before
13522+
13523+xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
13524+EXT_XS = ['.xs']
13525+
13526+@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars')
13527+@feature('perlext')
13528+def init_perlext(self):
13529+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
13530+ if not 'PERL' in self.uselib: self.uselib.append('PERL')
13531+ if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
13532+ self.env['shlib_PATTERN'] = self.env['perlext_PATTERN']
13533+
13534+@extension(EXT_XS)
13535+def xsubpp_file(self, node):
13536+ outnode = node.change_ext('.c')
13537+ self.create_task('xsubpp', node, outnode)
13538+ self.allnodes.append(outnode)
13539+
13540+Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before='cc cxx', shell=False)
13541+
13542+@conf
13543+def check_perl_version(conf, minver=None):
13544+ """
13545+ Checks if perl is installed.
13546+
13547+ If installed the variable PERL will be set in environment.
13548+
13549+ Perl binary can be overridden by --with-perl-binary config variable
13550+
13551+ """
13552+
13553+ if getattr(Options.options, 'perlbinary', None):
13554+ conf.env.PERL = Options.options.perlbinary
13555+ else:
13556+ conf.find_program('perl', var='PERL', mandatory=True)
13557+
13558+ try:
13559+ version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
13560+ except:
13561+ conf.fatal('could not determine the perl version')
13562+
13563+ conf.env.PERL_VERSION = version
13564+ cver = ''
13565+ if minver:
13566+ try:
13567+ ver = tuple(map(int, version.split('.')))
13568+ except:
13569+ conf.fatal('unsupported perl version %r' % version)
13570+ if ver < minver:
13571+ conf.fatal('perl is too old')
13572+
13573+ cver = '.'.join(map(str,minver))
13574+ conf.check_message('perl', cver, True, version)
13575+
13576+@conf
13577+def check_perl_module(conf, module):
13578+ """
13579+ Check if specified perlmodule is installed.
13580+
13581+ Minimum version can be specified by specifying it after modulename
13582+ like this:
13583+
13584+ conf.check_perl_module("Some::Module 2.92")
13585+ """
13586+ cmd = [conf.env['PERL'], '-e', 'use %s' % module]
13587+ r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0
13588+ conf.check_message("perl module %s" % module, "", r)
13589+ return r
13590+
13591+@conf
13592+def check_perl_ext_devel(conf):
13593+ """
13594+ Check for configuration needed to build perl extensions.
13595+
13596+ Sets different xxx_PERLEXT variables in the environment.
13597+
13598+ Also sets the ARCHDIR_PERL variable useful as installation path,
13599+ which can be overridden by --with-perl-archdir
13600+ """
13601+ if not conf.env.PERL:
13602+ conf.fatal('perl detection is required first')
13603+
13604+ def read_out(cmd):
13605+ return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
13606+
13607+ conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
13608+ conf.env.CPPPATH_PERLEXT = read_out('print "$Config{archlib}/CORE"')
13609+ conf.env.CCFLAGS_PERLEXT = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
13610+ conf.env.XSUBPP = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
13611+ conf.env.EXTUTILS_TYPEMAP = read_out('print "$Config{privlib}/ExtUtils/typemap"')
13612+ conf.env.perlext_PATTERN = '%s.' + read_out('print $Config{dlext}')[0]
13613+
13614+ if getattr(Options.options, 'perlarchdir', None):
13615+ conf.env.ARCHDIR_PERL = Options.options.perlarchdir
13616+ else:
13617+ conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0]
13618+
13619+def set_options(opt):
13620+ opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)
13621+ opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None)
13622+
13623diff --git a/buildtools/wafadmin/Tools/preproc.py b/buildtools/wafadmin/Tools/preproc.py
13624new file mode 100644
13625index 0000000..5055456
13626--- /dev/null
13627+++ b/buildtools/wafadmin/Tools/preproc.py
13628@@ -0,0 +1,836 @@
13629+#!/usr/bin/env python
13630+# encoding: utf-8
13631+# Thomas Nagy, 2006-2009 (ita)
13632+
13633+"""
13634+C/C++ preprocessor for finding dependencies
13635+
13636+Reasons for using the Waf preprocessor by default
13637+1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
13638+2. Not all compilers provide .d files for obtaining the dependencies (portability)
13639+3. A naive file scanner will not catch the constructs such as "#include foo()"
13640+4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
13641+
13642+Regarding the speed concerns:
13643+a. the preprocessing is performed only when files must be compiled
13644+b. the macros are evaluated only for #if/#elif/#include
13645+c. the time penalty is about 10%
13646+d. system headers are not scanned
13647+
13648+Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced
13649+during the compilation to track the dependencies (useful when used with the boost libraries).
13650+It only works with gcc though, and it cannot be used with Qt builds. A dumb
13651+file scanner will be added in the future, so we will have most bahaviours.
13652+"""
13653+# TODO: more varargs, pragma once
13654+# TODO: dumb file scanner tracking all includes
13655+
13656+import re, sys, os, string
13657+import Logs, Build, Utils
13658+from Logs import debug, error
13659+import traceback
13660+
13661+class PreprocError(Utils.WafError):
13662+ pass
13663+
13664+POPFILE = '-'
13665+
13666+
13667+recursion_limit = 5000
13668+"do not loop too much on header inclusion"
13669+
13670+go_absolute = 0
13671+"set to 1 to track headers on files in /usr/include - else absolute paths are ignored"
13672+
13673+standard_includes = ['/usr/include']
13674+if sys.platform == "win32":
13675+ standard_includes = []
13676+
13677+use_trigraphs = 0
13678+'apply the trigraph rules first'
13679+
13680+strict_quotes = 0
13681+"Keep <> for system includes (do not search for those includes)"
13682+
13683+g_optrans = {
13684+'not':'!',
13685+'and':'&&',
13686+'bitand':'&',
13687+'and_eq':'&=',
13688+'or':'||',
13689+'bitor':'|',
13690+'or_eq':'|=',
13691+'xor':'^',
13692+'xor_eq':'^=',
13693+'compl':'~',
13694+}
13695+"these ops are for c++, to reset, set an empty dict"
13696+
13697+# ignore #warning and #error
13698+re_lines = re.compile(\
13699+ '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
13700+ re.IGNORECASE | re.MULTILINE)
13701+
13702+re_mac = re.compile("^[a-zA-Z_]\w*")
13703+re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
13704+re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
13705+re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
13706+re_cpp = re.compile(
13707+ r"""(/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)|//[^\n]*|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^/"'\\]*)""",
13708+ re.MULTILINE)
13709+trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
13710+chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
13711+
13712+NUM = 'i'
13713+OP = 'O'
13714+IDENT = 'T'
13715+STR = 's'
13716+CHAR = 'c'
13717+
13718+tok_types = [NUM, STR, IDENT, OP]
13719+exp_types = [
13720+ r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
13721+ r'L?"([^"\\]|\\.)*"',
13722+ r'[a-zA-Z_]\w*',
13723+ r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
13724+]
13725+re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
13726+
13727+accepted = 'a'
13728+ignored = 'i'
13729+undefined = 'u'
13730+skipped = 's'
13731+
13732+def repl(m):
13733+ if m.group(1):
13734+ return ' '
13735+ s = m.group(2)
13736+ if s is None:
13737+ return ''
13738+ return s
13739+
13740+def filter_comments(filename):
13741+ # return a list of tuples : keyword, line
13742+ code = Utils.readf(filename)
13743+ if use_trigraphs:
13744+ for (a, b) in trig_def: code = code.split(a).join(b)
13745+ code = re_nl.sub('', code)
13746+ code = re_cpp.sub(repl, code)
13747+ return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
13748+
13749+prec = {}
13750+# op -> number, needed for such expressions: #if 1 && 2 != 0
13751+ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
13752+for x in range(len(ops)):
13753+ syms = ops[x]
13754+ for u in syms.split():
13755+ prec[u] = x
13756+
13757+def reduce_nums(val_1, val_2, val_op):
13758+ """apply arithmetic rules and try to return an integer result"""
13759+ #print val_1, val_2, val_op
13760+
13761+ # now perform the operation, make certain a and b are numeric
13762+ try: a = 0 + val_1
13763+ except TypeError: a = int(val_1)
13764+ try: b = 0 + val_2
13765+ except TypeError: b = int(val_2)
13766+
13767+ d = val_op
13768+ if d == '%': c = a%b
13769+ elif d=='+': c = a+b
13770+ elif d=='-': c = a-b
13771+ elif d=='*': c = a*b
13772+ elif d=='/': c = a/b
13773+ elif d=='^': c = a^b
13774+ elif d=='|': c = a|b
13775+ elif d=='||': c = int(a or b)
13776+ elif d=='&': c = a&b
13777+ elif d=='&&': c = int(a and b)
13778+ elif d=='==': c = int(a == b)
13779+ elif d=='!=': c = int(a != b)
13780+ elif d=='<=': c = int(a <= b)
13781+ elif d=='<': c = int(a < b)
13782+ elif d=='>': c = int(a > b)
13783+ elif d=='>=': c = int(a >= b)
13784+ elif d=='^': c = int(a^b)
13785+ elif d=='<<': c = a<<b
13786+ elif d=='>>': c = a>>b
13787+ else: c = 0
13788+ return c
13789+
13790+def get_num(lst):
13791+ if not lst: raise PreprocError("empty list for get_num")
13792+ (p, v) = lst[0]
13793+ if p == OP:
13794+ if v == '(':
13795+ count_par = 1
13796+ i = 1
13797+ while i < len(lst):
13798+ (p, v) = lst[i]
13799+
13800+ if p == OP:
13801+ if v == ')':
13802+ count_par -= 1
13803+ if count_par == 0:
13804+ break
13805+ elif v == '(':
13806+ count_par += 1
13807+ i += 1
13808+ else:
13809+ raise PreprocError("rparen expected %r" % lst)
13810+
13811+ (num, _) = get_term(lst[1:i])
13812+ return (num, lst[i+1:])
13813+
13814+ elif v == '+':
13815+ return get_num(lst[1:])
13816+ elif v == '-':
13817+ num, lst = get_num(lst[1:])
13818+ return (reduce_nums('-1', num, '*'), lst)
13819+ elif v == '!':
13820+ num, lst = get_num(lst[1:])
13821+ return (int(not int(num)), lst)
13822+ elif v == '~':
13823+ return (~ int(num), lst)
13824+ else:
13825+ raise PreprocError("invalid op token %r for get_num" % lst)
13826+ elif p == NUM:
13827+ return v, lst[1:]
13828+ elif p == IDENT:
13829+ # all macros should have been replaced, remaining identifiers eval to 0
13830+ return 0, lst[1:]
13831+ else:
13832+ raise PreprocError("invalid token %r for get_num" % lst)
13833+
13834+def get_term(lst):
13835+ if not lst: raise PreprocError("empty list for get_term")
13836+ num, lst = get_num(lst)
13837+ if not lst:
13838+ return (num, [])
13839+ (p, v) = lst[0]
13840+ if p == OP:
13841+ if v == '&&' and not num:
13842+ return (num, [])
13843+ elif v == '||' and num:
13844+ return (num, [])
13845+ elif v == ',':
13846+ # skip
13847+ return get_term(lst[1:])
13848+ elif v == '?':
13849+ count_par = 0
13850+ i = 1
13851+ while i < len(lst):
13852+ (p, v) = lst[i]
13853+
13854+ if p == OP:
13855+ if v == ')':
13856+ count_par -= 1
13857+ elif v == '(':
13858+ count_par += 1
13859+ elif v == ':':
13860+ if count_par == 0:
13861+ break
13862+ i += 1
13863+ else:
13864+ raise PreprocError("rparen expected %r" % lst)
13865+
13866+ if int(num):
13867+ return get_term(lst[1:i])
13868+ else:
13869+ return get_term(lst[i+1:])
13870+
13871+ else:
13872+ num2, lst = get_num(lst[1:])
13873+
13874+ if not lst:
13875+ # no more tokens to process
13876+ num2 = reduce_nums(num, num2, v)
13877+ return get_term([(NUM, num2)] + lst)
13878+
13879+ # operator precedence
13880+ p2, v2 = lst[0]
13881+ if p2 != OP:
13882+ raise PreprocError("op expected %r" % lst)
13883+
13884+ if prec[v2] >= prec[v]:
13885+ num2 = reduce_nums(num, num2, v)
13886+ return get_term([(NUM, num2)] + lst)
13887+ else:
13888+ num3, lst = get_num(lst[1:])
13889+ num3 = reduce_nums(num2, num3, v2)
13890+ return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
13891+
13892+
13893+ raise PreprocError("cannot reduce %r" % lst)
13894+
13895+def reduce_eval(lst):
13896+ """take a list of tokens and output true or false (#if/#elif conditions)"""
13897+ num, lst = get_term(lst)
13898+ return (NUM, num)
13899+
13900+def stringize(lst):
13901+ """use for converting a list of tokens to a string"""
13902+ lst = [str(v2) for (p2, v2) in lst]
13903+ return "".join(lst)
13904+
13905+def paste_tokens(t1, t2):
13906+ """
13907+ here is what we can paste:
13908+ a ## b -> ab
13909+ > ## = -> >=
13910+ a ## 2 -> a2
13911+ """
13912+ p1 = None
13913+ if t1[0] == OP and t2[0] == OP:
13914+ p1 = OP
13915+ elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
13916+ p1 = IDENT
13917+ elif t1[0] == NUM and t2[0] == NUM:
13918+ p1 = NUM
13919+ if not p1:
13920+ raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
13921+ return (p1, t1[1] + t2[1])
13922+
13923+def reduce_tokens(lst, defs, ban=[]):
13924+ """replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied"""
13925+ i = 0
13926+
13927+ while i < len(lst):
13928+ (p, v) = lst[i]
13929+
13930+ if p == IDENT and v == "defined":
13931+ del lst[i]
13932+ if i < len(lst):
13933+ (p2, v2) = lst[i]
13934+ if p2 == IDENT:
13935+ if v2 in defs:
13936+ lst[i] = (NUM, 1)
13937+ else:
13938+ lst[i] = (NUM, 0)
13939+ elif p2 == OP and v2 == '(':
13940+ del lst[i]
13941+ (p2, v2) = lst[i]
13942+ del lst[i] # remove the ident, and change the ) for the value
13943+ if v2 in defs:
13944+ lst[i] = (NUM, 1)
13945+ else:
13946+ lst[i] = (NUM, 0)
13947+ else:
13948+ raise PreprocError("invalid define expression %r" % lst)
13949+
13950+ elif p == IDENT and v in defs:
13951+
13952+ if isinstance(defs[v], str):
13953+ a, b = extract_macro(defs[v])
13954+ defs[v] = b
13955+ macro_def = defs[v]
13956+ to_add = macro_def[1]
13957+
13958+ if isinstance(macro_def[0], list):
13959+ # macro without arguments
13960+ del lst[i]
13961+ for x in xrange(len(to_add)):
13962+ lst.insert(i, to_add[x])
13963+ i += 1
13964+ else:
13965+ # collect the arguments for the funcall
13966+
13967+ args = []
13968+ del lst[i]
13969+
13970+ if i >= len(lst):
13971+ raise PreprocError("expected '(' after %r (got nothing)" % v)
13972+
13973+ (p2, v2) = lst[i]
13974+ if p2 != OP or v2 != '(':
13975+ raise PreprocError("expected '(' after %r" % v)
13976+
13977+ del lst[i]
13978+
13979+ one_param = []
13980+ count_paren = 0
13981+ while i < len(lst):
13982+ p2, v2 = lst[i]
13983+
13984+ del lst[i]
13985+ if p2 == OP and count_paren == 0:
13986+ if v2 == '(':
13987+ one_param.append((p2, v2))
13988+ count_paren += 1
13989+ elif v2 == ')':
13990+ if one_param: args.append(one_param)
13991+ break
13992+ elif v2 == ',':
13993+ if not one_param: raise PreprocError("empty param in funcall %s" % p)
13994+ args.append(one_param)
13995+ one_param = []
13996+ else:
13997+ one_param.append((p2, v2))
13998+ else:
13999+ one_param.append((p2, v2))
14000+ if v2 == '(': count_paren += 1
14001+ elif v2 == ')': count_paren -= 1
14002+ else:
14003+ raise PreprocError('malformed macro')
14004+
14005+ # substitute the arguments within the define expression
14006+ accu = []
14007+ arg_table = macro_def[0]
14008+ j = 0
14009+ while j < len(to_add):
14010+ (p2, v2) = to_add[j]
14011+
14012+ if p2 == OP and v2 == '#':
14013+ # stringize is for arguments only
14014+ if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
14015+ toks = args[arg_table[to_add[j+1][1]]]
14016+ accu.append((STR, stringize(toks)))
14017+ j += 1
14018+ else:
14019+ accu.append((p2, v2))
14020+ elif p2 == OP and v2 == '##':
14021+ # token pasting, how can man invent such a complicated system?
14022+ if accu and j+1 < len(to_add):
14023+ # we have at least two tokens
14024+
14025+ t1 = accu[-1]
14026+
14027+ if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
14028+ toks = args[arg_table[to_add[j+1][1]]]
14029+
14030+ if toks:
14031+ accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
14032+ accu.extend(toks[1:])
14033+ else:
14034+ # error, case "a##"
14035+ accu.append((p2, v2))
14036+ accu.extend(toks)
14037+ elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
14038+ # TODO not sure
14039+ # first collect the tokens
14040+ va_toks = []
14041+ st = len(macro_def[0])
14042+ pt = len(args)
14043+ for x in args[pt-st+1:]:
14044+ va_toks.extend(x)
14045+ va_toks.append((OP, ','))
14046+ if va_toks: va_toks.pop() # extra comma
14047+ if len(accu)>1:
14048+ (p3, v3) = accu[-1]
14049+ (p4, v4) = accu[-2]
14050+ if v3 == '##':
14051+ # remove the token paste
14052+ accu.pop()
14053+ if v4 == ',' and pt < st:
14054+ # remove the comma
14055+ accu.pop()
14056+ accu += va_toks
14057+ else:
14058+ accu[-1] = paste_tokens(t1, to_add[j+1])
14059+
14060+ j += 1
14061+ else:
14062+ # invalid paste, case "##a" or "b##"
14063+ accu.append((p2, v2))
14064+
14065+ elif p2 == IDENT and v2 in arg_table:
14066+ toks = args[arg_table[v2]]
14067+ reduce_tokens(toks, defs, ban+[v])
14068+ accu.extend(toks)
14069+ else:
14070+ accu.append((p2, v2))
14071+
14072+ j += 1
14073+
14074+
14075+ reduce_tokens(accu, defs, ban+[v])
14076+
14077+ for x in xrange(len(accu)-1, -1, -1):
14078+ lst.insert(i, accu[x])
14079+
14080+ i += 1
14081+
14082+
14083+def eval_macro(lst, adefs):
14084+ """reduce the tokens from the list lst, and try to return a 0/1 result"""
14085+ reduce_tokens(lst, adefs, [])
14086+ if not lst: raise PreprocError("missing tokens to evaluate")
14087+ (p, v) = reduce_eval(lst)
14088+ return int(v) != 0
14089+
14090+def extract_macro(txt):
14091+ """process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments"""
14092+ t = tokenize(txt)
14093+ if re_fun.search(txt):
14094+ p, name = t[0]
14095+
14096+ p, v = t[1]
14097+ if p != OP: raise PreprocError("expected open parenthesis")
14098+
14099+ i = 1
14100+ pindex = 0
14101+ params = {}
14102+ prev = '('
14103+
14104+ while 1:
14105+ i += 1
14106+ p, v = t[i]
14107+
14108+ if prev == '(':
14109+ if p == IDENT:
14110+ params[v] = pindex
14111+ pindex += 1
14112+ prev = p
14113+ elif p == OP and v == ')':
14114+ break
14115+ else:
14116+ raise PreprocError("unexpected token (3)")
14117+ elif prev == IDENT:
14118+ if p == OP and v == ',':
14119+ prev = v
14120+ elif p == OP and v == ')':
14121+ break
14122+ else:
14123+ raise PreprocError("comma or ... expected")
14124+ elif prev == ',':
14125+ if p == IDENT:
14126+ params[v] = pindex
14127+ pindex += 1
14128+ prev = p
14129+ elif p == OP and v == '...':
14130+ raise PreprocError("not implemented (1)")
14131+ else:
14132+ raise PreprocError("comma or ... expected (2)")
14133+ elif prev == '...':
14134+ raise PreprocError("not implemented (2)")
14135+ else:
14136+ raise PreprocError("unexpected else")
14137+
14138+ #~ print (name, [params, t[i+1:]])
14139+ return (name, [params, t[i+1:]])
14140+ else:
14141+ (p, v) = t[0]
14142+ return (v, [[], t[1:]])
14143+
14144+re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
14145+def extract_include(txt, defs):
14146+ """process a line in the form "#include foo" to return a string representing the file"""
14147+ m = re_include.search(txt)
14148+ if m:
14149+ if m.group('a'): return '<', m.group('a')
14150+ if m.group('b'): return '"', m.group('b')
14151+
14152+ # perform preprocessing and look at the result, it must match an include
14153+ toks = tokenize(txt)
14154+ reduce_tokens(toks, defs, ['waf_include'])
14155+
14156+ if not toks:
14157+ raise PreprocError("could not parse include %s" % txt)
14158+
14159+ if len(toks) == 1:
14160+ if toks[0][0] == STR:
14161+ return '"', toks[0][1]
14162+ else:
14163+ if toks[0][1] == '<' and toks[-1][1] == '>':
14164+ return stringize(toks).lstrip('<').rstrip('>')
14165+
14166+ raise PreprocError("could not parse include %s." % txt)
14167+
14168+def parse_char(txt):
14169+ if not txt: raise PreprocError("attempted to parse a null char")
14170+ if txt[0] != '\\':
14171+ return ord(txt)
14172+ c = txt[1]
14173+ if c == 'x':
14174+ if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
14175+ return int(txt[2:], 16)
14176+ elif c.isdigit():
14177+ if c == '0' and len(txt)==2: return 0
14178+ for i in 3, 2, 1:
14179+ if len(txt) > i and txt[1:1+i].isdigit():
14180+ return (1+i, int(txt[1:1+i], 8))
14181+ else:
14182+ try: return chr_esc[c]
14183+ except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
14184+
14185+@Utils.run_once
14186+def tokenize(s):
14187+ """convert a string into a list of tokens (shlex.split does not apply to c/c++/d)"""
14188+ ret = []
14189+ for match in re_clexer.finditer(s):
14190+ m = match.group
14191+ for name in tok_types:
14192+ v = m(name)
14193+ if v:
14194+ if name == IDENT:
14195+ try: v = g_optrans[v]; name = OP
14196+ except KeyError:
14197+ # c++ specific
14198+ if v.lower() == "true":
14199+ v = 1
14200+ name = NUM
14201+ elif v.lower() == "false":
14202+ v = 0
14203+ name = NUM
14204+ elif name == NUM:
14205+ if m('oct'): v = int(v, 8)
14206+ elif m('hex'): v = int(m('hex'), 16)
14207+ elif m('n0'): v = m('n0')
14208+ else:
14209+ v = m('char')
14210+ if v: v = parse_char(v)
14211+ else: v = m('n2') or m('n4')
14212+ elif name == OP:
14213+ if v == '%:': v = '#'
14214+ elif v == '%:%:': v = '##'
14215+ elif name == STR:
14216+ # remove the quotes around the string
14217+ v = v[1:-1]
14218+ ret.append((name, v))
14219+ break
14220+ return ret
14221+
14222+@Utils.run_once
14223+def define_name(line):
14224+ return re_mac.match(line).group(0)
14225+
14226+class c_parser(object):
14227+ def __init__(self, nodepaths=None, defines=None):
14228+ #self.lines = txt.split('\n')
14229+ self.lines = []
14230+
14231+ if defines is None:
14232+ self.defs = {}
14233+ else:
14234+ self.defs = dict(defines) # make a copy
14235+ self.state = []
14236+
14237+ self.env = None # needed for the variant when searching for files
14238+
14239+ self.count_files = 0
14240+ self.currentnode_stack = []
14241+
14242+ self.nodepaths = nodepaths or []
14243+
14244+ self.nodes = []
14245+ self.names = []
14246+
14247+ # file added
14248+ self.curfile = ''
14249+ self.ban_includes = set([])
14250+
14251+ def cached_find_resource(self, node, filename):
14252+ try:
14253+ nd = node.bld.cache_nd
14254+ except:
14255+ nd = node.bld.cache_nd = {}
14256+
14257+ tup = (node.id, filename)
14258+ try:
14259+ return nd[tup]
14260+ except KeyError:
14261+ ret = node.find_resource(filename)
14262+ nd[tup] = ret
14263+ return ret
14264+
14265+ def tryfind(self, filename):
14266+ self.curfile = filename
14267+
14268+ # for msvc it should be a for loop on the whole stack
14269+ found = self.cached_find_resource(self.currentnode_stack[-1], filename)
14270+
14271+ for n in self.nodepaths:
14272+ if found:
14273+ break
14274+ found = self.cached_find_resource(n, filename)
14275+
14276+ if found:
14277+ self.nodes.append(found)
14278+ if filename[-4:] != '.moc':
14279+ self.addlines(found)
14280+ else:
14281+ if not filename in self.names:
14282+ self.names.append(filename)
14283+ return found
14284+
14285+ def addlines(self, node):
14286+
14287+ self.currentnode_stack.append(node.parent)
14288+ filepath = node.abspath(self.env)
14289+
14290+ self.count_files += 1
14291+ if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded")
14292+ pc = self.parse_cache
14293+ debug('preproc: reading file %r', filepath)
14294+ try:
14295+ lns = pc[filepath]
14296+ except KeyError:
14297+ pass
14298+ else:
14299+ self.lines.extend(lns)
14300+ return
14301+
14302+ try:
14303+ lines = filter_comments(filepath)
14304+ lines.append((POPFILE, ''))
14305+ lines.reverse()
14306+ pc[filepath] = lines # cache the lines filtered
14307+ self.lines.extend(lines)
14308+ except IOError:
14309+ raise PreprocError("could not read the file %s" % filepath)
14310+ except Exception:
14311+ if Logs.verbose > 0:
14312+ error("parsing %s failed" % filepath)
14313+ traceback.print_exc()
14314+
14315+ def start(self, node, env):
14316+ debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
14317+
14318+ self.env = env
14319+ variant = node.variant(env)
14320+ bld = node.__class__.bld
14321+ try:
14322+ self.parse_cache = bld.parse_cache
14323+ except AttributeError:
14324+ bld.parse_cache = {}
14325+ self.parse_cache = bld.parse_cache
14326+
14327+ self.addlines(node)
14328+ if env['DEFLINES']:
14329+ lst = [('define', x) for x in env['DEFLINES']]
14330+ lst.reverse()
14331+ self.lines.extend(lst)
14332+
14333+ while self.lines:
14334+ (kind, line) = self.lines.pop()
14335+ if kind == POPFILE:
14336+ self.currentnode_stack.pop()
14337+ continue
14338+ try:
14339+ self.process_line(kind, line)
14340+ except Exception, e:
14341+ if Logs.verbose:
14342+ debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
14343+
14344+ def process_line(self, token, line):
14345+ """
14346+ WARNING: a new state must be added for if* because the endif
14347+ """
14348+ ve = Logs.verbose
14349+ if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
14350+ state = self.state
14351+
14352+ # make certain we define the state if we are about to enter in an if block
14353+ if token in ['ifdef', 'ifndef', 'if']:
14354+ state.append(undefined)
14355+ elif token == 'endif':
14356+ state.pop()
14357+
14358+ # skip lines when in a dead 'if' branch, wait for the endif
14359+ if not token in ['else', 'elif', 'endif']:
14360+ if skipped in self.state or ignored in self.state:
14361+ return
14362+
14363+ if token == 'if':
14364+ ret = eval_macro(tokenize(line), self.defs)
14365+ if ret: state[-1] = accepted
14366+ else: state[-1] = ignored
14367+ elif token == 'ifdef':
14368+ m = re_mac.match(line)
14369+ if m and m.group(0) in self.defs: state[-1] = accepted
14370+ else: state[-1] = ignored
14371+ elif token == 'ifndef':
14372+ m = re_mac.match(line)
14373+ if m and m.group(0) in self.defs: state[-1] = ignored
14374+ else: state[-1] = accepted
14375+ elif token == 'include' or token == 'import':
14376+ (kind, inc) = extract_include(line, self.defs)
14377+ if inc in self.ban_includes: return
14378+ if token == 'import': self.ban_includes.add(inc)
14379+ if ve: debug('preproc: include found %s (%s) ', inc, kind)
14380+ if kind == '"' or not strict_quotes:
14381+ self.tryfind(inc)
14382+ elif token == 'elif':
14383+ if state[-1] == accepted:
14384+ state[-1] = skipped
14385+ elif state[-1] == ignored:
14386+ if eval_macro(tokenize(line), self.defs):
14387+ state[-1] = accepted
14388+ elif token == 'else':
14389+ if state[-1] == accepted: state[-1] = skipped
14390+ elif state[-1] == ignored: state[-1] = accepted
14391+ elif token == 'define':
14392+ try:
14393+ self.defs[define_name(line)] = line
14394+ except:
14395+ raise PreprocError("invalid define line %s" % line)
14396+ elif token == 'undef':
14397+ m = re_mac.match(line)
14398+ if m and m.group(0) in self.defs:
14399+ self.defs.__delitem__(m.group(0))
14400+ #print "undef %s" % name
14401+ elif token == 'pragma':
14402+ if re_pragma_once.match(line.lower()):
14403+ self.ban_includes.add(self.curfile)
14404+
14405+def get_deps(node, env, nodepaths=[]):
14406+ """
14407+ Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind
14408+ #include some_macro()
14409+ """
14410+
14411+ gruik = c_parser(nodepaths)
14412+ gruik.start(node, env)
14413+ return (gruik.nodes, gruik.names)
14414+
14415+#################### dumb dependency scanner
14416+
14417+re_inc = re.compile(\
14418+ '^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',
14419+ re.IGNORECASE | re.MULTILINE)
14420+
14421+def lines_includes(filename):
14422+ code = Utils.readf(filename)
14423+ if use_trigraphs:
14424+ for (a, b) in trig_def: code = code.split(a).join(b)
14425+ code = re_nl.sub('', code)
14426+ code = re_cpp.sub(repl, code)
14427+ return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
14428+
14429+def get_deps_simple(node, env, nodepaths=[], defines={}):
14430+ """
14431+ Get the dependencies by just looking recursively at the #include statements
14432+ """
14433+
14434+ nodes = []
14435+ names = []
14436+
14437+ def find_deps(node):
14438+ lst = lines_includes(node.abspath(env))
14439+
14440+ for (_, line) in lst:
14441+ (t, filename) = extract_include(line, defines)
14442+ if filename in names:
14443+ continue
14444+
14445+ if filename.endswith('.moc'):
14446+ names.append(filename)
14447+
14448+ found = None
14449+ for n in nodepaths:
14450+ if found:
14451+ break
14452+ found = n.find_resource(filename)
14453+
14454+ if not found:
14455+ if not filename in names:
14456+ names.append(filename)
14457+ elif not found in nodes:
14458+ nodes.append(found)
14459+ find_deps(node)
14460+
14461+ find_deps(node)
14462+ return (nodes, names)
14463+
14464+
14465diff --git a/buildtools/wafadmin/Tools/python.py b/buildtools/wafadmin/Tools/python.py
14466new file mode 100644
14467index 0000000..4f73081
14468--- /dev/null
14469+++ b/buildtools/wafadmin/Tools/python.py
14470@@ -0,0 +1,413 @@
14471+#!/usr/bin/env python
14472+# encoding: utf-8
14473+# Thomas Nagy, 2007 (ita)
14474+# Gustavo Carneiro (gjc), 2007
14475+
14476+"Python support"
14477+
14478+import os, sys
14479+import TaskGen, Utils, Utils, Runner, Options, Build
14480+from Logs import debug, warn, info
14481+from TaskGen import extension, taskgen, before, after, feature
14482+from Configure import conf
14483+
14484+EXT_PY = ['.py']
14485+FRAG_2 = '''
14486+#include "Python.h"
14487+#ifdef __cplusplus
14488+extern "C" {
14489+#endif
14490+ void Py_Initialize(void);
14491+ void Py_Finalize(void);
14492+#ifdef __cplusplus
14493+}
14494+#endif
14495+int main()
14496+{
14497+ Py_Initialize();
14498+ Py_Finalize();
14499+ return 0;
14500+}
14501+'''
14502+
14503+@feature('pyext')
14504+@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars', 'apply_bundle')
14505+@after('vars_target_cshlib')
14506+def init_pyext(self):
14507+ self.default_install_path = '${PYTHONARCHDIR}'
14508+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
14509+ if not 'PYEXT' in self.uselib:
14510+ self.uselib.append('PYEXT')
14511+ self.env['MACBUNDLE'] = True
14512+
14513+@before('apply_link', 'apply_lib_vars', 'apply_type_vars')
14514+@after('apply_bundle')
14515+@feature('pyext')
14516+def pyext_shlib_ext(self):
14517+ # override shlib_PATTERN set by the osx module
14518+ self.env['shlib_PATTERN'] = self.env['pyext_PATTERN']
14519+
14520+@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
14521+@feature('pyembed')
14522+def init_pyembed(self):
14523+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
14524+ if not 'PYEMBED' in self.uselib:
14525+ self.uselib.append('PYEMBED')
14526+
14527+@extension(EXT_PY)
14528+def process_py(self, node):
14529+ if not (self.bld.is_install and self.install_path):
14530+ return
14531+ def inst_py(ctx):
14532+ install_pyfile(self, node)
14533+ self.bld.add_post_fun(inst_py)
14534+
14535+def install_pyfile(self, node):
14536+ path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env)
14537+
14538+ self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False)
14539+ if self.bld.is_install < 0:
14540+ info("* removing byte compiled python files")
14541+ for x in 'co':
14542+ try:
14543+ os.remove(path + x)
14544+ except OSError:
14545+ pass
14546+
14547+ if self.bld.is_install > 0:
14548+ if self.env['PYC'] or self.env['PYO']:
14549+ info("* byte compiling %r" % path)
14550+
14551+ if self.env['PYC']:
14552+ program = ("""
14553+import sys, py_compile
14554+for pyfile in sys.argv[1:]:
14555+ py_compile.compile(pyfile, pyfile + 'c')
14556+""")
14557+ argv = [self.env['PYTHON'], '-c', program, path]
14558+ ret = Utils.pproc.Popen(argv).wait()
14559+ if ret:
14560+ raise Utils.WafError('bytecode compilation failed %r' % path)
14561+
14562+ if self.env['PYO']:
14563+ program = ("""
14564+import sys, py_compile
14565+for pyfile in sys.argv[1:]:
14566+ py_compile.compile(pyfile, pyfile + 'o')
14567+""")
14568+ argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path]
14569+ ret = Utils.pproc.Popen(argv).wait()
14570+ if ret:
14571+ raise Utils.WafError('bytecode compilation failed %r' % path)
14572+
14573+# COMPAT
14574+class py_taskgen(TaskGen.task_gen):
14575+ def __init__(self, *k, **kw):
14576+ TaskGen.task_gen.__init__(self, *k, **kw)
14577+
14578+@before('apply_core')
14579+@after('vars_target_cprogram', 'vars_target_cshlib')
14580+@feature('py')
14581+def init_py(self):
14582+ self.default_install_path = '${PYTHONDIR}'
14583+
14584+def _get_python_variables(python_exe, variables, imports=['import sys']):
14585+ """Run a python interpreter and print some variables"""
14586+ program = list(imports)
14587+ program.append('')
14588+ for v in variables:
14589+ program.append("print(repr(%s))" % v)
14590+ os_env = dict(os.environ)
14591+ try:
14592+ del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
14593+ except KeyError:
14594+ pass
14595+ proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env)
14596+ output = proc.communicate()[0].split("\n") # do not touch, python3
14597+ if proc.returncode:
14598+ if Options.options.verbose:
14599+ warn("Python program to extract python configuration variables failed:\n%s"
14600+ % '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)]))
14601+ raise RuntimeError
14602+ return_values = []
14603+ for s in output:
14604+ s = s.strip()
14605+ if not s:
14606+ continue
14607+ if s == 'None':
14608+ return_values.append(None)
14609+ elif s[0] == "'" and s[-1] == "'":
14610+ return_values.append(s[1:-1])
14611+ elif s[0].isdigit():
14612+ return_values.append(int(s))
14613+ else: break
14614+ return return_values
14615+
14616+@conf
14617+def check_python_headers(conf, mandatory=True):
14618+ """Check for headers and libraries necessary to extend or embed python.
14619+
14620+ On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib
14621+
14622+ PYEXT: for compiling python extensions
14623+ PYEMBED: for embedding a python interpreter"""
14624+
14625+ if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
14626+ conf.fatal('load a compiler first (gcc, g++, ..)')
14627+
14628+ if not conf.env['PYTHON_VERSION']:
14629+ conf.check_python_version()
14630+
14631+ env = conf.env
14632+ python = env['PYTHON']
14633+ if not python:
14634+ conf.fatal('could not find the python executable')
14635+
14636+ ## On Mac OSX we need to use mac bundles for python plugins
14637+ if Options.platform == 'darwin':
14638+ conf.check_tool('osx')
14639+
14640+ try:
14641+ # Get some python configuration variables using distutils
14642+ v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
14643+ (python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
14644+ python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED,
14645+ python_MACOSX_DEPLOYMENT_TARGET) = \
14646+ _get_python_variables(python, ["get_config_var('%s')" % x for x in v],
14647+ ['from distutils.sysconfig import get_config_var'])
14648+ except RuntimeError:
14649+ conf.fatal("Python development headers not found (-v for details).")
14650+
14651+ conf.log.write("""Configuration returned from %r:
14652+python_prefix = %r
14653+python_SO = %r
14654+python_SYSLIBS = %r
14655+python_LDFLAGS = %r
14656+python_SHLIBS = %r
14657+python_LIBDIR = %r
14658+python_LIBPL = %r
14659+INCLUDEPY = %r
14660+Py_ENABLE_SHARED = %r
14661+MACOSX_DEPLOYMENT_TARGET = %r
14662+""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
14663+ python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET))
14664+
14665+ if python_MACOSX_DEPLOYMENT_TARGET:
14666+ conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
14667+ conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
14668+
14669+ env['pyext_PATTERN'] = '%s'+python_SO
14670+
14671+ # Check for python libraries for embedding
14672+ if python_SYSLIBS is not None:
14673+ for lib in python_SYSLIBS.split():
14674+ if lib.startswith('-l'):
14675+ lib = lib[2:] # strip '-l'
14676+ env.append_value('LIB_PYEMBED', lib)
14677+
14678+ if python_SHLIBS is not None:
14679+ for lib in python_SHLIBS.split():
14680+ if lib.startswith('-l'):
14681+ env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l'
14682+ else:
14683+ env.append_value('LINKFLAGS_PYEMBED', lib)
14684+
14685+ if Options.platform != 'darwin' and python_LDFLAGS:
14686+ env.append_value('LINKFLAGS_PYEMBED', python_LDFLAGS.split())
14687+
14688+ result = False
14689+ name = 'python' + env['PYTHON_VERSION']
14690+
14691+ if python_LIBDIR is not None:
14692+ path = [python_LIBDIR]
14693+ conf.log.write("\n\n# Trying LIBDIR: %r\n" % path)
14694+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14695+
14696+ if not result and python_LIBPL is not None:
14697+ conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
14698+ path = [python_LIBPL]
14699+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14700+
14701+ if not result:
14702+ conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
14703+ path = [os.path.join(python_prefix, "libs")]
14704+ name = 'python' + env['PYTHON_VERSION'].replace('.', '')
14705+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14706+
14707+ if result:
14708+ env['LIBPATH_PYEMBED'] = path
14709+ env.append_value('LIB_PYEMBED', name)
14710+ else:
14711+ conf.log.write("\n\n### LIB NOT FOUND\n")
14712+
14713+ # under certain conditions, python extensions must link to
14714+ # python libraries, not just python embedding programs.
14715+ if (sys.platform == 'win32' or sys.platform.startswith('os2')
14716+ or sys.platform == 'darwin' or Py_ENABLE_SHARED):
14717+ env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
14718+ env['LIB_PYEXT'] = env['LIB_PYEMBED']
14719+
14720+ # We check that pythonX.Y-config exists, and if it exists we
14721+ # use it to get only the includes, else fall back to distutils.
14722+ python_config = conf.find_program(
14723+ 'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
14724+ var='PYTHON_CONFIG')
14725+ if not python_config:
14726+ python_config = conf.find_program(
14727+ 'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
14728+ var='PYTHON_CONFIG')
14729+
14730+ includes = []
14731+ if python_config:
14732+ for incstr in Utils.cmd_output("%s %s --includes" % (python, python_config)).strip().split():
14733+ # strip the -I or /I
14734+ if (incstr.startswith('-I')
14735+ or incstr.startswith('/I')):
14736+ incstr = incstr[2:]
14737+ # append include path, unless already given
14738+ if incstr not in includes:
14739+ includes.append(incstr)
14740+ conf.log.write("Include path for Python extensions "
14741+ "(found via python-config --includes): %r\n" % (includes,))
14742+ env['CPPPATH_PYEXT'] = includes
14743+ env['CPPPATH_PYEMBED'] = includes
14744+ else:
14745+ conf.log.write("Include path for Python extensions "
14746+ "(found via distutils module): %r\n" % (INCLUDEPY,))
14747+ env['CPPPATH_PYEXT'] = [INCLUDEPY]
14748+ env['CPPPATH_PYEMBED'] = [INCLUDEPY]
14749+
14750+ # Code using the Python API needs to be compiled with -fno-strict-aliasing
14751+ if env['CC_NAME'] == 'gcc':
14752+ env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing')
14753+ env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing')
14754+ if env['CXX_NAME'] == 'gcc':
14755+ env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing')
14756+ env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
14757+
14758+ # See if it compiles
14759+ conf.check(define_name='HAVE_PYTHON_H',
14760+ uselib='PYEMBED', fragment=FRAG_2,
14761+ errmsg='Could not find the python development headers', mandatory=mandatory)
14762+
14763+@conf
14764+def check_python_version(conf, minver=None):
14765+ """
14766+ Check if the python interpreter is found matching a given minimum version.
14767+ minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
14768+
14769+ If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
14770+ (eg. '2.4') of the actual python version found, and PYTHONDIR is
14771+ defined, pointing to the site-packages directory appropriate for
14772+ this python version, where modules/packages/extensions should be
14773+ installed.
14774+ """
14775+ assert minver is None or isinstance(minver, tuple)
14776+ python = conf.env['PYTHON']
14777+ if not python:
14778+ conf.fatal('could not find the python executable')
14779+
14780+ # Get python version string
14781+ cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"]
14782+ debug('python: Running python command %r' % cmd)
14783+ proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE)
14784+ lines = proc.communicate()[0].split()
14785+ assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
14786+ pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
14787+
14788+ # compare python version with the minimum required
14789+ result = (minver is None) or (pyver_tuple >= minver)
14790+
14791+ if result:
14792+ # define useful environment variables
14793+ pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
14794+ conf.env['PYTHON_VERSION'] = pyver
14795+
14796+ if 'PYTHONDIR' in conf.environ:
14797+ pydir = conf.environ['PYTHONDIR']
14798+ else:
14799+ if sys.platform == 'win32':
14800+ (python_LIBDEST, pydir) = \
14801+ _get_python_variables(python,
14802+ ["get_config_var('LIBDEST')",
14803+ "get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14804+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14805+ else:
14806+ python_LIBDEST = None
14807+ (pydir,) = \
14808+ _get_python_variables(python,
14809+ ["get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14810+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14811+ if python_LIBDEST is None:
14812+ if conf.env['LIBDIR']:
14813+ python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
14814+ else:
14815+ python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
14816+
14817+ if 'PYTHONARCHDIR' in conf.environ:
14818+ pyarchdir = conf.environ['PYTHONARCHDIR']
14819+ else:
14820+ (pyarchdir,) = _get_python_variables(python,
14821+ ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14822+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14823+ if not pyarchdir:
14824+ pyarchdir = pydir
14825+
14826+ if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
14827+ conf.define('PYTHONDIR', pydir)
14828+ conf.define('PYTHONARCHDIR', pyarchdir)
14829+
14830+ conf.env['PYTHONDIR'] = pydir
14831+
14832+ # Feedback
14833+ pyver_full = '.'.join(map(str, pyver_tuple[:3]))
14834+ if minver is None:
14835+ conf.check_message_custom('Python version', '', pyver_full)
14836+ else:
14837+ minver_str = '.'.join(map(str, minver))
14838+ conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full)
14839+
14840+ if not result:
14841+ conf.fatal('The python version is too old (%r)' % pyver_full)
14842+
14843+@conf
14844+def check_python_module(conf, module_name):
14845+ """
14846+ Check if the selected python interpreter can import the given python module.
14847+ """
14848+ result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name],
14849+ stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait()
14850+ conf.check_message('Python module', module_name, result)
14851+ if not result:
14852+ conf.fatal('Could not find the python module %r' % module_name)
14853+
14854+def detect(conf):
14855+
14856+ if not conf.env.PYTHON:
14857+ conf.env.PYTHON = sys.executable
14858+
14859+ python = conf.find_program('python', var='PYTHON')
14860+ if not python:
14861+ conf.fatal('Could not find the path of the python executable')
14862+
14863+ v = conf.env
14864+
14865+ v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
14866+ v['PYFLAGS'] = ''
14867+ v['PYFLAGS_OPT'] = '-O'
14868+
14869+ v['PYC'] = getattr(Options.options, 'pyc', 1)
14870+ v['PYO'] = getattr(Options.options, 'pyo', 1)
14871+
14872+def set_options(opt):
14873+ opt.add_option('--nopyc',
14874+ action='store_false',
14875+ default=1,
14876+ help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
14877+ dest = 'pyc')
14878+ opt.add_option('--nopyo',
14879+ action='store_false',
14880+ default=1,
14881+ help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
14882+ dest='pyo')
14883+
14884diff --git a/buildtools/wafadmin/Tools/qt4.py b/buildtools/wafadmin/Tools/qt4.py
14885new file mode 100644
14886index 0000000..84d121a
14887--- /dev/null
14888+++ b/buildtools/wafadmin/Tools/qt4.py
14889@@ -0,0 +1,505 @@
14890+#!/usr/bin/env python
14891+# encoding: utf-8
14892+# Thomas Nagy, 2006 (ita)
14893+
14894+"""
14895+Qt4 support
14896+
14897+If QT4_ROOT is given (absolute path), the configuration will look in it first
14898+
14899+This module also demonstrates how to add tasks dynamically (when the build has started)
14900+"""
14901+
14902+try:
14903+ from xml.sax import make_parser
14904+ from xml.sax.handler import ContentHandler
14905+except ImportError:
14906+ has_xml = False
14907+ ContentHandler = object
14908+else:
14909+ has_xml = True
14910+
14911+import os, sys
14912+import ccroot, cxx
14913+import TaskGen, Task, Utils, Runner, Options, Node, Configure
14914+from TaskGen import taskgen, feature, after, extension
14915+from Logs import error
14916+from Constants import *
14917+
14918+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
14919+EXT_RCC = ['.qrc']
14920+EXT_UI = ['.ui']
14921+EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
14922+
14923+class qxx_task(Task.Task):
14924+ "A cpp task that may create a moc task dynamically"
14925+
14926+ before = ['cxx_link', 'static_link']
14927+
14928+ def __init__(self, *k, **kw):
14929+ Task.Task.__init__(self, *k, **kw)
14930+ self.moc_done = 0
14931+
14932+ def scan(self):
14933+ (nodes, names) = ccroot.scan(self)
14934+ # for some reasons (variants) the moc node may end in the list of node deps
14935+ for x in nodes:
14936+ if x.name.endswith('.moc'):
14937+ nodes.remove(x)
14938+ names.append(x.relpath_gen(self.inputs[0].parent))
14939+ return (nodes, names)
14940+
14941+ def runnable_status(self):
14942+ if self.moc_done:
14943+ # if there is a moc task, delay the computation of the file signature
14944+ for t in self.run_after:
14945+ if not t.hasrun:
14946+ return ASK_LATER
14947+ # the moc file enters in the dependency calculation
14948+ # so we need to recompute the signature when the moc file is present
14949+ self.signature()
14950+ return Task.Task.runnable_status(self)
14951+ else:
14952+ # yes, really, there are people who generate cxx files
14953+ for t in self.run_after:
14954+ if not t.hasrun:
14955+ return ASK_LATER
14956+ self.add_moc_tasks()
14957+ return ASK_LATER
14958+
14959+ def add_moc_tasks(self):
14960+
14961+ node = self.inputs[0]
14962+ tree = node.__class__.bld
14963+
14964+ try:
14965+ # compute the signature once to know if there is a moc file to create
14966+ self.signature()
14967+ except KeyError:
14968+ # the moc file may be referenced somewhere else
14969+ pass
14970+ else:
14971+ # remove the signature, it must be recomputed with the moc task
14972+ delattr(self, 'cache_sig')
14973+
14974+ moctasks=[]
14975+ mocfiles=[]
14976+ variant = node.variant(self.env)
14977+ try:
14978+ tmp_lst = tree.raw_deps[self.unique_id()]
14979+ tree.raw_deps[self.unique_id()] = []
14980+ except KeyError:
14981+ tmp_lst = []
14982+ for d in tmp_lst:
14983+ if not d.endswith('.moc'): continue
14984+ # paranoid check
14985+ if d in mocfiles:
14986+ error("paranoia owns")
14987+ continue
14988+
14989+ # process that base.moc only once
14990+ mocfiles.append(d)
14991+
14992+ # find the extension (performed only when the .cpp has changes)
14993+ base2 = d[:-4]
14994+ for path in [node.parent] + self.generator.env['INC_PATHS']:
14995+ tree.rescan(path)
14996+ vals = getattr(Options.options, 'qt_header_ext', '') or MOC_H
14997+ for ex in vals:
14998+ h_node = path.find_resource(base2 + ex)
14999+ if h_node:
15000+ break
15001+ else:
15002+ continue
15003+ break
15004+ else:
15005+ raise Utils.WafError("no header found for %s which is a moc file" % str(d))
15006+
15007+ m_node = h_node.change_ext('.moc')
15008+ tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node
15009+
15010+ # create the task
15011+ task = Task.TaskBase.classes['moc'](self.env, normal=0)
15012+ task.set_inputs(h_node)
15013+ task.set_outputs(m_node)
15014+
15015+ generator = tree.generator
15016+ generator.outstanding.insert(0, task)
15017+ generator.total += 1
15018+
15019+ moctasks.append(task)
15020+
15021+ # remove raw deps except the moc files to save space (optimization)
15022+ tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles
15023+
15024+ # look at the file inputs, it is set right above
15025+ lst = tree.node_deps.get(self.unique_id(), ())
15026+ for d in lst:
15027+ name = d.name
15028+ if name.endswith('.moc'):
15029+ task = Task.TaskBase.classes['moc'](self.env, normal=0)
15030+ task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple
15031+ task.set_outputs(d)
15032+
15033+ generator = tree.generator
15034+ generator.outstanding.insert(0, task)
15035+ generator.total += 1
15036+
15037+ moctasks.append(task)
15038+
15039+ # simple scheduler dependency: run the moc task before others
15040+ self.run_after = moctasks
15041+ self.moc_done = 1
15042+
15043+ run = Task.TaskBase.classes['cxx'].__dict__['run']
15044+
15045+def translation_update(task):
15046+ outs = [a.abspath(task.env) for a in task.outputs]
15047+ outs = " ".join(outs)
15048+ lupdate = task.env['QT_LUPDATE']
15049+
15050+ for x in task.inputs:
15051+ file = x.abspath(task.env)
15052+ cmd = "%s %s -ts %s" % (lupdate, file, outs)
15053+ Utils.pprint('BLUE', cmd)
15054+ task.generator.bld.exec_command(cmd)
15055+
15056+class XMLHandler(ContentHandler):
15057+ def __init__(self):
15058+ self.buf = []
15059+ self.files = []
15060+ def startElement(self, name, attrs):
15061+ if name == 'file':
15062+ self.buf = []
15063+ def endElement(self, name):
15064+ if name == 'file':
15065+ self.files.append(''.join(self.buf))
15066+ def characters(self, cars):
15067+ self.buf.append(cars)
15068+
15069+def scan(self):
15070+ "add the dependency on the files referenced in the qrc"
15071+ node = self.inputs[0]
15072+ parser = make_parser()
15073+ curHandler = XMLHandler()
15074+ parser.setContentHandler(curHandler)
15075+ fi = open(self.inputs[0].abspath(self.env))
15076+ parser.parse(fi)
15077+ fi.close()
15078+
15079+ nodes = []
15080+ names = []
15081+ root = self.inputs[0].parent
15082+ for x in curHandler.files:
15083+ nd = root.find_resource(x)
15084+ if nd: nodes.append(nd)
15085+ else: names.append(x)
15086+
15087+ return (nodes, names)
15088+
15089+@extension(EXT_RCC)
15090+def create_rcc_task(self, node):
15091+ "hook for rcc files"
15092+ rcnode = node.change_ext('_rc.cpp')
15093+ rcctask = self.create_task('rcc', node, rcnode)
15094+ cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
15095+ self.compiled_tasks.append(cpptask)
15096+ return cpptask
15097+
15098+@extension(EXT_UI)
15099+def create_uic_task(self, node):
15100+ "hook for uic tasks"
15101+ uictask = self.create_task('ui4', node)
15102+ uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
15103+ return uictask
15104+
15105+class qt4_taskgen(cxx.cxx_taskgen):
15106+ def __init__(self, *k, **kw):
15107+ cxx.cxx_taskgen.__init__(self, *k, **kw)
15108+ self.features.append('qt4')
15109+
15110+@extension('.ts')
15111+def add_lang(self, node):
15112+ """add all the .ts file into self.lang"""
15113+ self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
15114+
15115+@feature('qt4')
15116+@after('apply_link')
15117+def apply_qt4(self):
15118+ if getattr(self, 'lang', None):
15119+ update = getattr(self, 'update', None)
15120+ lst=[]
15121+ trans=[]
15122+ for l in self.to_list(self.lang):
15123+
15124+ if not isinstance(l, Node.Node):
15125+ l = self.path.find_resource(l+'.ts')
15126+
15127+ t = self.create_task('ts2qm', l, l.change_ext('.qm'))
15128+ lst.append(t.outputs[0])
15129+
15130+ if update:
15131+ trans.append(t.inputs[0])
15132+
15133+ trans_qt4 = getattr(Options.options, 'trans_qt4', False)
15134+ if update and trans_qt4:
15135+ # we need the cpp files given, except the rcc task we create after
15136+ # FIXME may be broken
15137+ u = Task.TaskCmd(translation_update, self.env, 2)
15138+ u.inputs = [a.inputs[0] for a in self.compiled_tasks]
15139+ u.outputs = trans
15140+
15141+ if getattr(self, 'langname', None):
15142+ t = Task.TaskBase.classes['qm2rcc'](self.env)
15143+ t.set_inputs(lst)
15144+ t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
15145+ t.path = self.path
15146+ k = create_rcc_task(self, t.outputs[0])
15147+ self.link_task.inputs.append(k.outputs[0])
15148+
15149+ self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS)
15150+ self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS)
15151+
15152+@extension(EXT_QT4)
15153+def cxx_hook(self, node):
15154+ # create the compilation task: cpp or cc
15155+ try: obj_ext = self.obj_ext
15156+ except AttributeError: obj_ext = '_%d.o' % self.idx
15157+
15158+ task = self.create_task('qxx', node, node.change_ext(obj_ext))
15159+ self.compiled_tasks.append(task)
15160+ return task
15161+
15162+def process_qm2rcc(task):
15163+ outfile = task.outputs[0].abspath(task.env)
15164+ f = open(outfile, 'w')
15165+ f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
15166+ for k in task.inputs:
15167+ f.write(' <file>')
15168+ #f.write(k.name)
15169+ f.write(k.path_to_parent(task.path))
15170+ f.write('</file>\n')
15171+ f.write('</qresource>\n</RCC>')
15172+ f.close()
15173+
15174+b = Task.simple_task_type
15175+b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False)
15176+cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False)
15177+cls.scan = scan
15178+b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False)
15179+b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False)
15180+
15181+Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm')
15182+
15183+def detect_qt4(conf):
15184+ env = conf.env
15185+ opt = Options.options
15186+
15187+ qtdir = getattr(opt, 'qtdir', '')
15188+ qtbin = getattr(opt, 'qtbin', '')
15189+ qtlibs = getattr(opt, 'qtlibs', '')
15190+ useframework = getattr(opt, 'use_qt4_osxframework', True)
15191+
15192+ paths = []
15193+
15194+ # the path to qmake has been given explicitely
15195+ if qtbin:
15196+ paths = [qtbin]
15197+
15198+ # the qt directory has been given - we deduce the qt binary path
15199+ if not qtdir:
15200+ qtdir = conf.environ.get('QT4_ROOT', '')
15201+ qtbin = os.path.join(qtdir, 'bin')
15202+ paths = [qtbin]
15203+
15204+ # no qtdir, look in the path and in /usr/local/Trolltech
15205+ if not qtdir:
15206+ paths = os.environ.get('PATH', '').split(os.pathsep)
15207+ paths.append('/usr/share/qt4/bin/')
15208+ try:
15209+ lst = os.listdir('/usr/local/Trolltech/')
15210+ except OSError:
15211+ pass
15212+ else:
15213+ if lst:
15214+ lst.sort()
15215+ lst.reverse()
15216+
15217+ # keep the highest version
15218+ qtdir = '/usr/local/Trolltech/%s/' % lst[0]
15219+ qtbin = os.path.join(qtdir, 'bin')
15220+ paths.append(qtbin)
15221+
15222+ # at the end, try to find qmake in the paths given
15223+ # keep the one with the highest version
15224+ cand = None
15225+ prev_ver = ['4', '0', '0']
15226+ for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
15227+ qmake = conf.find_program(qmk, path_list=paths)
15228+ if qmake:
15229+ try:
15230+ version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip()
15231+ except ValueError:
15232+ pass
15233+ else:
15234+ if version:
15235+ new_ver = version.split('.')
15236+ if new_ver > prev_ver:
15237+ cand = qmake
15238+ prev_ver = new_ver
15239+ if cand:
15240+ qmake = cand
15241+ else:
15242+ conf.fatal('could not find qmake for qt4')
15243+
15244+ conf.env.QMAKE = qmake
15245+ qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip()
15246+ qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
15247+ qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
15248+
15249+ if not qtlibs:
15250+ try:
15251+ qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep
15252+ except ValueError:
15253+ qtlibs = os.path.join(qtdir, 'lib')
15254+
15255+ def find_bin(lst, var):
15256+ for f in lst:
15257+ ret = conf.find_program(f, path_list=paths)
15258+ if ret:
15259+ env[var]=ret
15260+ break
15261+
15262+ vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
15263+
15264+ find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
15265+ find_bin(['uic-qt4', 'uic'], 'QT_UIC')
15266+ if not env['QT_UIC']:
15267+ conf.fatal('cannot find the uic compiler for qt4')
15268+
15269+ try:
15270+ version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip()
15271+ except ValueError:
15272+ conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
15273+
15274+ version = version.replace('Qt User Interface Compiler ','')
15275+ version = version.replace('User Interface Compiler for Qt', '')
15276+ if version.find(" 3.") != -1:
15277+ conf.check_message('uic version', '(too old)', 0, option='(%s)'%version)
15278+ sys.exit(1)
15279+ conf.check_message('uic version', '', 1, option='(%s)'%version)
15280+
15281+ find_bin(['moc-qt4', 'moc'], 'QT_MOC')
15282+ find_bin(['rcc'], 'QT_RCC')
15283+ find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
15284+ find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
15285+
15286+ env['UIC3_ST']= '%s -o %s'
15287+ env['UIC_ST'] = '%s -o %s'
15288+ env['MOC_ST'] = '-o'
15289+ env['ui_PATTERN'] = 'ui_%s.h'
15290+ env['QT_LRELEASE_FLAGS'] = ['-silent']
15291+
15292+ vars_debug = [a+'_debug' for a in vars]
15293+
15294+ try:
15295+ conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True)
15296+
15297+ except Configure.ConfigurationError:
15298+
15299+ for lib in vars_debug+vars:
15300+ uselib = lib.upper()
15301+
15302+ d = (lib.find('_debug') > 0) and 'd' or ''
15303+
15304+ # original author seems to prefer static to shared libraries
15305+ for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')):
15306+
15307+ conf.check_message_1('Checking for %s %s' % (lib, kind))
15308+
15309+ for ext in ['', '4']:
15310+ path = os.path.join(qtlibs, pat % (lib + d + ext))
15311+ if os.path.exists(path):
15312+ env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
15313+ conf.check_message_2('ok ' + path, 'GREEN')
15314+ break
15315+ path = os.path.join(qtbin, pat % (lib + d + ext))
15316+ if os.path.exists(path):
15317+ env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
15318+ conf.check_message_2('ok ' + path, 'GREEN')
15319+ break
15320+ else:
15321+ conf.check_message_2('not found', 'YELLOW')
15322+ continue
15323+ break
15324+
15325+ env.append_unique('LIBPATH_' + uselib, qtlibs)
15326+ env.append_unique('CPPPATH_' + uselib, qtincludes)
15327+ env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib)
15328+ else:
15329+ for i in vars_debug+vars:
15330+ try:
15331+ conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig)
15332+ except ValueError:
15333+ pass
15334+
15335+ # the libpaths are set nicely, unfortunately they make really long command-lines
15336+ # remove the qtcore ones from qtgui, etc
15337+ def process_lib(vars_, coreval):
15338+ for d in vars_:
15339+ var = d.upper()
15340+ if var == 'QTCORE': continue
15341+
15342+ value = env['LIBPATH_'+var]
15343+ if value:
15344+ core = env[coreval]
15345+ accu = []
15346+ for lib in value:
15347+ if lib in core: continue
15348+ accu.append(lib)
15349+ env['LIBPATH_'+var] = accu
15350+
15351+ process_lib(vars, 'LIBPATH_QTCORE')
15352+ process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG')
15353+
15354+ # rpath if wanted
15355+ want_rpath = getattr(Options.options, 'want_rpath', 1)
15356+ if want_rpath:
15357+ def process_rpath(vars_, coreval):
15358+ for d in vars_:
15359+ var = d.upper()
15360+ value = env['LIBPATH_'+var]
15361+ if value:
15362+ core = env[coreval]
15363+ accu = []
15364+ for lib in value:
15365+ if var != 'QTCORE':
15366+ if lib in core:
15367+ continue
15368+ accu.append('-Wl,--rpath='+lib)
15369+ env['RPATH_'+var] = accu
15370+ process_rpath(vars, 'LIBPATH_QTCORE')
15371+ process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG')
15372+
15373+ env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale'
15374+
15375+def detect(conf):
15376+ detect_qt4(conf)
15377+
15378+def set_options(opt):
15379+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
15380+
15381+ opt.add_option('--header-ext',
15382+ type='string',
15383+ default='',
15384+ help='header extension for moc files',
15385+ dest='qt_header_ext')
15386+
15387+ for i in 'qtdir qtbin qtlibs'.split():
15388+ opt.add_option('--'+i, type='string', default='', dest=i)
15389+
15390+ if sys.platform == "darwin":
15391+ opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True)
15392+
15393+ opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
15394+
15395diff --git a/buildtools/wafadmin/Tools/ruby.py b/buildtools/wafadmin/Tools/ruby.py
15396new file mode 100644
15397index 0000000..d3b7569
15398--- /dev/null
15399+++ b/buildtools/wafadmin/Tools/ruby.py
15400@@ -0,0 +1,120 @@
15401+#!/usr/bin/env python
15402+# encoding: utf-8
15403+# daniel.svensson at purplescout.se 2008
15404+
15405+import os
15406+import Task, Options, Utils
15407+from TaskGen import before, feature, after
15408+from Configure import conf
15409+
15410+@feature('rubyext')
15411+@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle')
15412+@after('default_cc', 'vars_target_cshlib')
15413+def init_rubyext(self):
15414+ self.default_install_path = '${ARCHDIR_RUBY}'
15415+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
15416+ if not 'RUBY' in self.uselib:
15417+ self.uselib.append('RUBY')
15418+ if not 'RUBYEXT' in self.uselib:
15419+ self.uselib.append('RUBYEXT')
15420+
15421+@feature('rubyext')
15422+@before('apply_link')
15423+def apply_ruby_so_name(self):
15424+ self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN']
15425+
15426+@conf
15427+def check_ruby_version(conf, minver=()):
15428+ """
15429+ Checks if ruby is installed.
15430+ If installed the variable RUBY will be set in environment.
15431+ Ruby binary can be overridden by --with-ruby-binary config variable
15432+ """
15433+
15434+ if Options.options.rubybinary:
15435+ conf.env.RUBY = Options.options.rubybinary
15436+ else:
15437+ conf.find_program("ruby", var="RUBY", mandatory=True)
15438+
15439+ ruby = conf.env.RUBY
15440+
15441+ try:
15442+ version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
15443+ except:
15444+ conf.fatal('could not determine ruby version')
15445+ conf.env.RUBY_VERSION = version
15446+
15447+ try:
15448+ ver = tuple(map(int, version.split(".")))
15449+ except:
15450+ conf.fatal('unsupported ruby version %r' % version)
15451+
15452+ cver = ''
15453+ if minver:
15454+ if ver < minver:
15455+ conf.fatal('ruby is too old')
15456+ cver = ".".join([str(x) for x in minver])
15457+
15458+ conf.check_message('ruby', cver, True, version)
15459+
15460+@conf
15461+def check_ruby_ext_devel(conf):
15462+ if not conf.env.RUBY:
15463+ conf.fatal('ruby detection is required first')
15464+
15465+ if not conf.env.CC_NAME and not conf.env.CXX_NAME:
15466+ conf.fatal('load a c/c++ compiler first')
15467+
15468+ version = tuple(map(int, conf.env.RUBY_VERSION.split(".")))
15469+
15470+ def read_out(cmd):
15471+ return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd]))
15472+
15473+ def read_config(key):
15474+ return read_out('puts Config::CONFIG[%r]' % key)
15475+
15476+ ruby = conf.env['RUBY']
15477+ archdir = read_config('archdir')
15478+ cpppath = archdir
15479+ if version >= (1, 9, 0):
15480+ ruby_hdrdir = read_config('rubyhdrdir')
15481+ cpppath += ruby_hdrdir
15482+ cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
15483+
15484+ conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file')
15485+
15486+ conf.env.LIBPATH_RUBYEXT = read_config('libdir')
15487+ conf.env.LIBPATH_RUBYEXT += archdir
15488+ conf.env.CPPPATH_RUBYEXT = cpppath
15489+ conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS")
15490+ conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
15491+
15492+ # ok this is really stupid, but the command and flags are combined.
15493+ # so we try to find the first argument...
15494+ flags = read_config('LDSHARED')
15495+ while flags and flags[0][0] != '-':
15496+ flags = flags[1:]
15497+
15498+ # we also want to strip out the deprecated ppc flags
15499+ if len(flags) > 1 and flags[1] == "ppc":
15500+ flags = flags[2:]
15501+
15502+ conf.env.LINKFLAGS_RUBYEXT = flags
15503+ conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS")
15504+ conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED")
15505+
15506+ if Options.options.rubyarchdir:
15507+ conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir
15508+ else:
15509+ conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
15510+
15511+ if Options.options.rubylibdir:
15512+ conf.env.LIBDIR_RUBY = Options.options.rubylibdir
15513+ else:
15514+ conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
15515+
15516+def set_options(opt):
15517+ opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
15518+ opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
15519+ opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
15520+
15521diff --git a/buildtools/wafadmin/Tools/suncc.py b/buildtools/wafadmin/Tools/suncc.py
15522new file mode 100644
15523index 0000000..b1a2aad
15524--- /dev/null
15525+++ b/buildtools/wafadmin/Tools/suncc.py
15526@@ -0,0 +1,76 @@
15527+#!/usr/bin/env python
15528+# encoding: utf-8
15529+# Thomas Nagy, 2006 (ita)
15530+# Ralf Habacker, 2006 (rh)
15531+
15532+import os, optparse
15533+import Utils, Options, Configure
15534+import ccroot, ar
15535+from Configure import conftest
15536+
15537+@conftest
15538+def find_scc(conf):
15539+ v = conf.env
15540+ cc = None
15541+ if v['CC']: cc = v['CC']
15542+ elif 'CC' in conf.environ: cc = conf.environ['CC']
15543+ #if not cc: cc = conf.find_program('gcc', var='CC')
15544+ if not cc: cc = conf.find_program('cc', var='CC')
15545+ if not cc: conf.fatal('suncc was not found')
15546+ cc = conf.cmd_to_list(cc)
15547+
15548+ try:
15549+ if not Utils.cmd_output(cc + ['-flags']):
15550+ conf.fatal('suncc %r was not found' % cc)
15551+ except ValueError:
15552+ conf.fatal('suncc -flags could not be executed')
15553+
15554+ v['CC'] = cc
15555+ v['CC_NAME'] = 'sun'
15556+
15557+@conftest
15558+def scc_common_flags(conf):
15559+ v = conf.env
15560+
15561+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
15562+
15563+ v['CC_SRC_F'] = ''
15564+ v['CC_TGT_F'] = ['-c', '-o', '']
15565+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
15566+
15567+ # linker
15568+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
15569+ v['CCLNK_SRC_F'] = ''
15570+ v['CCLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
15571+
15572+ v['LIB_ST'] = '-l%s' # template for adding libs
15573+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
15574+ v['STATICLIB_ST'] = '-l%s'
15575+ v['STATICLIBPATH_ST'] = '-L%s'
15576+ v['CCDEFINES_ST'] = '-D%s'
15577+
15578+ v['SONAME_ST'] = '-Wl,-h -Wl,%s'
15579+ v['SHLIB_MARKER'] = '-Bdynamic'
15580+ v['STATICLIB_MARKER'] = '-Bstatic'
15581+
15582+ # program
15583+ v['program_PATTERN'] = '%s'
15584+
15585+ # shared library
15586+ v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
15587+ v['shlib_LINKFLAGS'] = ['-G']
15588+ v['shlib_PATTERN'] = 'lib%s.so'
15589+
15590+ # static lib
15591+ v['staticlib_LINKFLAGS'] = ['-Bstatic']
15592+ v['staticlib_PATTERN'] = 'lib%s.a'
15593+
15594+detect = '''
15595+find_scc
15596+find_cpp
15597+find_ar
15598+scc_common_flags
15599+cc_load_tools
15600+cc_add_flags
15601+link_add_flags
15602+'''
15603diff --git a/buildtools/wafadmin/Tools/suncxx.py b/buildtools/wafadmin/Tools/suncxx.py
15604new file mode 100644
15605index 0000000..8754b6c
15606--- /dev/null
15607+++ b/buildtools/wafadmin/Tools/suncxx.py
15608@@ -0,0 +1,75 @@
15609+#!/usr/bin/env python
15610+# encoding: utf-8
15611+# Thomas Nagy, 2006 (ita)
15612+# Ralf Habacker, 2006 (rh)
15613+
15614+import os, optparse
15615+import Utils, Options, Configure
15616+import ccroot, ar
15617+from Configure import conftest
15618+
15619+@conftest
15620+def find_sxx(conf):
15621+ v = conf.env
15622+ cc = None
15623+ if v['CXX']: cc = v['CXX']
15624+ elif 'CXX' in conf.environ: cc = conf.environ['CXX']
15625+ if not cc: cc = conf.find_program('c++', var='CXX')
15626+ if not cc: conf.fatal('sunc++ was not found')
15627+ cc = conf.cmd_to_list(cc)
15628+
15629+ try:
15630+ if not Utils.cmd_output(cc + ['-flags']):
15631+ conf.fatal('sunc++ %r was not found' % cc)
15632+ except ValueError:
15633+ conf.fatal('sunc++ -flags could not be executed')
15634+
15635+ v['CXX'] = cc
15636+ v['CXX_NAME'] = 'sun'
15637+
15638+@conftest
15639+def sxx_common_flags(conf):
15640+ v = conf.env
15641+
15642+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
15643+
15644+ v['CXX_SRC_F'] = ''
15645+ v['CXX_TGT_F'] = ['-c', '-o', '']
15646+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
15647+
15648+ # linker
15649+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
15650+ v['CXXLNK_SRC_F'] = ''
15651+ v['CXXLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
15652+
15653+ v['LIB_ST'] = '-l%s' # template for adding libs
15654+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
15655+ v['STATICLIB_ST'] = '-l%s'
15656+ v['STATICLIBPATH_ST'] = '-L%s'
15657+ v['CXXDEFINES_ST'] = '-D%s'
15658+
15659+ v['SONAME_ST'] = '-Wl,-h -Wl,%s'
15660+ v['SHLIB_MARKER'] = '-Bdynamic'
15661+ v['STATICLIB_MARKER'] = '-Bstatic'
15662+
15663+ # program
15664+ v['program_PATTERN'] = '%s'
15665+
15666+ # shared library
15667+ v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC']
15668+ v['shlib_LINKFLAGS'] = ['-G']
15669+ v['shlib_PATTERN'] = 'lib%s.so'
15670+
15671+ # static lib
15672+ v['staticlib_LINKFLAGS'] = ['-Bstatic']
15673+ v['staticlib_PATTERN'] = 'lib%s.a'
15674+
15675+detect = '''
15676+find_sxx
15677+find_cpp
15678+find_ar
15679+sxx_common_flags
15680+cxx_load_tools
15681+cxx_add_flags
15682+link_add_flags
15683+'''
15684diff --git a/buildtools/wafadmin/Tools/tex.py b/buildtools/wafadmin/Tools/tex.py
15685new file mode 100644
15686index 0000000..2dd748b
15687--- /dev/null
15688+++ b/buildtools/wafadmin/Tools/tex.py
15689@@ -0,0 +1,251 @@
15690+#!/usr/bin/env python
15691+# encoding: utf-8
15692+# Thomas Nagy, 2006 (ita)
15693+
15694+"TeX/LaTeX/PDFLaTeX support"
15695+
15696+import os, re
15697+import Utils, TaskGen, Task, Runner, Build
15698+from TaskGen import feature, before
15699+from Logs import error, warn, debug
15700+
15701+re_tex = re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}', re.M)
15702+def scan(self):
15703+ node = self.inputs[0]
15704+ env = self.env
15705+
15706+ nodes = []
15707+ names = []
15708+ if not node: return (nodes, names)
15709+
15710+ code = Utils.readf(node.abspath(env))
15711+
15712+ curdirnode = self.curdirnode
15713+ abs = curdirnode.abspath()
15714+ for match in re_tex.finditer(code):
15715+ path = match.group('file')
15716+ if path:
15717+ for k in ['', '.tex', '.ltx']:
15718+ # add another loop for the tex include paths?
15719+ debug('tex: trying %s%s' % (path, k))
15720+ try:
15721+ os.stat(abs+os.sep+path+k)
15722+ except OSError:
15723+ continue
15724+ found = path+k
15725+ node = curdirnode.find_resource(found)
15726+ if node:
15727+ nodes.append(node)
15728+ else:
15729+ debug('tex: could not find %s' % path)
15730+ names.append(path)
15731+
15732+ debug("tex: found the following : %s and names %s" % (nodes, names))
15733+ return (nodes, names)
15734+
15735+latex_fun, _ = Task.compile_fun('latex', '${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
15736+pdflatex_fun, _ = Task.compile_fun('pdflatex', '${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
15737+bibtex_fun, _ = Task.compile_fun('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
15738+makeindex_fun, _ = Task.compile_fun('bibtex', '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
15739+
15740+g_bibtex_re = re.compile('bibdata', re.M)
15741+def tex_build(task, command='LATEX'):
15742+ env = task.env
15743+ bld = task.generator.bld
15744+
15745+ if not env['PROMPT_LATEX']:
15746+ env.append_value('LATEXFLAGS', '-interaction=batchmode')
15747+ env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
15748+
15749+ fun = latex_fun
15750+ if command == 'PDFLATEX':
15751+ fun = pdflatex_fun
15752+
15753+ node = task.inputs[0]
15754+ reldir = node.bld_dir(env)
15755+
15756+ #lst = []
15757+ #for c in Utils.split_path(reldir):
15758+ # if c: lst.append('..')
15759+ #srcfile = os.path.join(*(lst + [node.srcpath(env)]))
15760+ #sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
15761+ srcfile = node.abspath(env)
15762+ sr2 = node.parent.abspath() + os.pathsep + node.parent.abspath(env) + os.pathsep
15763+
15764+ aux_node = node.change_ext('.aux')
15765+ idx_node = node.change_ext('.idx')
15766+
15767+ nm = aux_node.name
15768+ docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
15769+
15770+ # important, set the cwd for everybody
15771+ task.cwd = task.inputs[0].parent.abspath(task.env)
15772+
15773+
15774+ warn('first pass on %s' % command)
15775+
15776+ task.env.env = {'TEXINPUTS': sr2}
15777+ task.env.SRCFILE = srcfile
15778+ ret = fun(task)
15779+ if ret:
15780+ return ret
15781+
15782+ # look in the .aux file if there is a bibfile to process
15783+ try:
15784+ ct = Utils.readf(aux_node.abspath(env))
15785+ except (OSError, IOError):
15786+ error('error bibtex scan')
15787+ else:
15788+ fo = g_bibtex_re.findall(ct)
15789+
15790+ # there is a .aux file to process
15791+ if fo:
15792+ warn('calling bibtex')
15793+
15794+ task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2}
15795+ task.env.SRCFILE = docuname
15796+ ret = bibtex_fun(task)
15797+ if ret:
15798+ error('error when calling bibtex %s' % docuname)
15799+ return ret
15800+
15801+ # look on the filesystem if there is a .idx file to process
15802+ try:
15803+ idx_path = idx_node.abspath(env)
15804+ os.stat(idx_path)
15805+ except OSError:
15806+ error('error file.idx scan')
15807+ else:
15808+ warn('calling makeindex')
15809+
15810+ task.env.SRCFILE = idx_node.name
15811+ task.env.env = {}
15812+ ret = makeindex_fun(task)
15813+ if ret:
15814+ error('error when calling makeindex %s' % idx_path)
15815+ return ret
15816+
15817+
15818+ hash = ''
15819+ i = 0
15820+ while i < 10:
15821+ # prevent against infinite loops - one never knows
15822+ i += 1
15823+
15824+ # watch the contents of file.aux
15825+ prev_hash = hash
15826+ try:
15827+ hash = Utils.h_file(aux_node.abspath(env))
15828+ except KeyError:
15829+ error('could not read aux.h -> %s' % aux_node.abspath(env))
15830+ pass
15831+
15832+ # debug
15833+ #print "hash is, ", hash, " ", old_hash
15834+
15835+ # stop if file.aux does not change anymore
15836+ if hash and hash == prev_hash:
15837+ break
15838+
15839+ # run the command
15840+ warn('calling %s' % command)
15841+
15842+ task.env.env = {'TEXINPUTS': sr2 + os.pathsep}
15843+ task.env.SRCFILE = srcfile
15844+ ret = fun(task)
15845+ if ret:
15846+ error('error when calling %s %s' % (command, latex_compile_cmd))
15847+ return ret
15848+
15849+ return None # ok
15850+
15851+latex_vardeps = ['LATEX', 'LATEXFLAGS']
15852+def latex_build(task):
15853+ return tex_build(task, 'LATEX')
15854+
15855+pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS']
15856+def pdflatex_build(task):
15857+ return tex_build(task, 'PDFLATEX')
15858+
15859+class tex_taskgen(TaskGen.task_gen):
15860+ def __init__(self, *k, **kw):
15861+ TaskGen.task_gen.__init__(self, *k, **kw)
15862+
15863+@feature('tex')
15864+@before('apply_core')
15865+def apply_tex(self):
15866+ if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
15867+ self.type = 'pdflatex'
15868+
15869+ tree = self.bld
15870+ outs = Utils.to_list(getattr(self, 'outs', []))
15871+
15872+ # prompt for incomplete files (else the batchmode is used)
15873+ self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
15874+
15875+ deps_lst = []
15876+
15877+ if getattr(self, 'deps', None):
15878+ deps = self.to_list(self.deps)
15879+ for filename in deps:
15880+ n = self.path.find_resource(filename)
15881+ if not n in deps_lst: deps_lst.append(n)
15882+
15883+ self.source = self.to_list(self.source)
15884+ for filename in self.source:
15885+ base, ext = os.path.splitext(filename)
15886+
15887+ node = self.path.find_resource(filename)
15888+ if not node: raise Utils.WafError('cannot find %s' % filename)
15889+
15890+ if self.type == 'latex':
15891+ task = self.create_task('latex', node, node.change_ext('.dvi'))
15892+ elif self.type == 'pdflatex':
15893+ task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
15894+
15895+ task.env = self.env
15896+ task.curdirnode = self.path
15897+
15898+ # add the manual dependencies
15899+ if deps_lst:
15900+ variant = node.variant(self.env)
15901+ try:
15902+ lst = tree.node_deps[task.unique_id()]
15903+ for n in deps_lst:
15904+ if not n in lst:
15905+ lst.append(n)
15906+ except KeyError:
15907+ tree.node_deps[task.unique_id()] = deps_lst
15908+
15909+ if self.type == 'latex':
15910+ if 'ps' in outs:
15911+ tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
15912+ tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
15913+ if 'pdf' in outs:
15914+ tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
15915+ tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
15916+ elif self.type == 'pdflatex':
15917+ if 'ps' in outs:
15918+ self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
15919+ self.source = []
15920+
15921+def detect(conf):
15922+ v = conf.env
15923+ for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
15924+ conf.find_program(p, var=p.upper())
15925+ v[p.upper()+'FLAGS'] = ''
15926+ v['DVIPSFLAGS'] = '-Ppdf'
15927+
15928+b = Task.simple_task_type
15929+b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
15930+b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
15931+b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
15932+b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
15933+b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
15934+
15935+b = Task.task_type_from_func
15936+cls = b('latex', latex_build, vars=latex_vardeps)
15937+cls.scan = scan
15938+cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
15939+cls.scan = scan
15940+
15941diff --git a/buildtools/wafadmin/Tools/unittestw.py b/buildtools/wafadmin/Tools/unittestw.py
15942new file mode 100644
15943index 0000000..0e30a51
15944--- /dev/null
15945+++ b/buildtools/wafadmin/Tools/unittestw.py
15946@@ -0,0 +1,310 @@
15947+#!/usr/bin/env python
15948+# encoding: utf-8
15949+# Carlos Rafael Giani, 2006
15950+
15951+"""
15952+Unit tests run in the shutdown() method, and for c/c++ programs
15953+
15954+One should NOT have to give parameters to programs to execute
15955+
15956+In the shutdown method, add the following code:
15957+
15958+ >>> def shutdown():
15959+ ... ut = UnitTest.unit_test()
15960+ ... ut.run()
15961+ ... ut.print_results()
15962+
15963+
15964+Each object to use as a unit test must be a program and must have X{obj.unit_test=1}
15965+"""
15966+import os, sys
15967+import Build, TaskGen, Utils, Options, Logs, Task
15968+from TaskGen import before, after, feature
15969+from Constants import *
15970+
15971+class unit_test(object):
15972+ "Unit test representation"
15973+ def __init__(self):
15974+ self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one
15975+ # will cause the unit test to be marked as "FAILED".
15976+
15977+ # The following variables are filled with data by run().
15978+
15979+ # print_results() uses these for printing the unit test summary,
15980+ # but if there is need for direct access to the results,
15981+ # they can be retrieved here, after calling run().
15982+
15983+ self.num_tests_ok = 0 # Number of successful unit tests
15984+ self.num_tests_failed = 0 # Number of failed unit tests
15985+ self.num_tests_err = 0 # Tests that have not even run
15986+ self.total_num_tests = 0 # Total amount of unit tests
15987+ self.max_label_length = 0 # Maximum label length (pretty-print the output)
15988+
15989+ self.unit_tests = Utils.ordered_dict() # Unit test dictionary. Key: the label (unit test filename relative
15990+ # to the build dir), value: unit test filename with absolute path
15991+ self.unit_test_results = {} # Dictionary containing the unit test results.
15992+ # Key: the label, value: result (true = success false = failure)
15993+ self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests.
15994+ # Key: the label, value: true = unit test has an error false = unit test is ok
15995+ self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir
15996+ self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites)
15997+ self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites)
15998+ self.run_if_waf_does = 'check' #build was the old default
15999+
16000+ def run(self):
16001+ "Run the unit tests and gather results (note: no output here)"
16002+
16003+ self.num_tests_ok = 0
16004+ self.num_tests_failed = 0
16005+ self.num_tests_err = 0
16006+ self.total_num_tests = 0
16007+ self.max_label_length = 0
16008+
16009+ self.unit_tests = Utils.ordered_dict()
16010+ self.unit_test_results = {}
16011+ self.unit_test_erroneous = {}
16012+
16013+ ld_library_path = []
16014+
16015+ # If waf is not building, don't run anything
16016+ if not Options.commands[self.run_if_waf_does]: return
16017+
16018+ # Get the paths for the shared libraries, and obtain the unit tests to execute
16019+ for obj in Build.bld.all_task_gen:
16020+ try:
16021+ link_task = obj.link_task
16022+ except AttributeError:
16023+ pass
16024+ else:
16025+ lib_path = link_task.outputs[0].parent.abspath(obj.env)
16026+ if lib_path not in ld_library_path:
16027+ ld_library_path.append(lib_path)
16028+
16029+ unit_test = getattr(obj, 'unit_test', '')
16030+ if unit_test and 'cprogram' in obj.features:
16031+ try:
16032+ output = obj.path
16033+ filename = os.path.join(output.abspath(obj.env), obj.target)
16034+ srcdir = output.abspath()
16035+ label = os.path.join(output.bldpath(obj.env), obj.target)
16036+ self.max_label_length = max(self.max_label_length, len(label))
16037+ self.unit_tests[label] = (filename, srcdir)
16038+ except KeyError:
16039+ pass
16040+ self.total_num_tests = len(self.unit_tests)
16041+ # Now run the unit tests
16042+ Utils.pprint('GREEN', 'Running the unit tests')
16043+ count = 0
16044+ result = 1
16045+
16046+ for label in self.unit_tests.allkeys:
16047+ file_and_src = self.unit_tests[label]
16048+ filename = file_and_src[0]
16049+ srcdir = file_and_src[1]
16050+ count += 1
16051+ line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL)
16052+ if Options.options.progress_bar and line:
16053+ sys.stderr.write(line)
16054+ sys.stderr.flush()
16055+ try:
16056+ kwargs = {}
16057+ kwargs['env'] = os.environ.copy()
16058+ if self.change_to_testfile_dir:
16059+ kwargs['cwd'] = srcdir
16060+ if not self.want_to_see_test_output:
16061+ kwargs['stdout'] = Utils.pproc.PIPE # PIPE for ignoring output
16062+ if not self.want_to_see_test_error:
16063+ kwargs['stderr'] = Utils.pproc.PIPE # PIPE for ignoring output
16064+ if ld_library_path:
16065+ v = kwargs['env']
16066+ def add_path(dct, path, var):
16067+ dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
16068+ if sys.platform == 'win32':
16069+ add_path(v, ld_library_path, 'PATH')
16070+ elif sys.platform == 'darwin':
16071+ add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH')
16072+ add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
16073+ else:
16074+ add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
16075+
16076+ pp = Utils.pproc.Popen(filename, **kwargs)
16077+ (out, err) = pp.communicate() # uh, and the output is ignored?? - fortunately this is going to disappear
16078+
16079+ result = int(pp.returncode == self.returncode_ok)
16080+
16081+ if result:
16082+ self.num_tests_ok += 1
16083+ else:
16084+ self.num_tests_failed += 1
16085+
16086+ self.unit_test_results[label] = result
16087+ self.unit_test_erroneous[label] = 0
16088+ except OSError:
16089+ self.unit_test_erroneous[label] = 1
16090+ self.num_tests_err += 1
16091+ except KeyboardInterrupt:
16092+ pass
16093+ if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on)
16094+
16095+ def print_results(self):
16096+ "Pretty-prints a summary of all unit tests, along with some statistics"
16097+
16098+ # If waf is not building, don't output anything
16099+ if not Options.commands[self.run_if_waf_does]: return
16100+
16101+ p = Utils.pprint
16102+ # Early quit if no tests were performed
16103+ if self.total_num_tests == 0:
16104+ p('YELLOW', 'No unit tests present')
16105+ return
16106+
16107+ for label in self.unit_tests.allkeys:
16108+ filename = self.unit_tests[label]
16109+ err = 0
16110+ result = 0
16111+
16112+ try: err = self.unit_test_erroneous[label]
16113+ except KeyError: pass
16114+
16115+ try: result = self.unit_test_results[label]
16116+ except KeyError: pass
16117+
16118+ n = self.max_label_length - len(label)
16119+ if err: n += 4
16120+ elif result: n += 7
16121+ else: n += 3
16122+
16123+ line = '%s %s' % (label, '.' * n)
16124+
16125+ if err: p('RED', '%sERROR' % line)
16126+ elif result: p('GREEN', '%sOK' % line)
16127+ else: p('YELLOW', '%sFAILED' % line)
16128+
16129+ percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0
16130+ percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0
16131+ percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0
16132+
16133+ p('NORMAL', '''
16134+Successful tests: %i (%.1f%%)
16135+Failed tests: %i (%.1f%%)
16136+Erroneous tests: %i (%.1f%%)
16137+
16138+Total number of tests: %i
16139+''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed,
16140+ self.num_tests_err, percentage_erroneous, self.total_num_tests))
16141+ p('GREEN', 'Unit tests finished')
16142+
16143+
16144+############################################################################################
16145+
16146+"""
16147+New unit test system
16148+
16149+The targets with feature 'test' are executed after they are built
16150+bld(features='cprogram cc test', ...)
16151+
16152+To display the results:
16153+import UnitTest
16154+bld.add_post_fun(UnitTest.summary)
16155+"""
16156+
16157+import threading
16158+testlock = threading.Lock()
16159+
16160+def set_options(opt):
16161+ opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests')
16162+
16163+@feature('test')
16164+@after('apply_link', 'vars_target_cprogram')
16165+def make_test(self):
16166+ if not 'cprogram' in self.features:
16167+ Logs.error('test cannot be executed %s' % self)
16168+ return
16169+
16170+ self.default_install_path = None
16171+ self.create_task('utest', self.link_task.outputs)
16172+
16173+def exec_test(self):
16174+
16175+ status = 0
16176+
16177+ variant = self.env.variant()
16178+
16179+ filename = self.inputs[0].abspath(self.env)
16180+ self.ut_exec = getattr(self, 'ut_exec', [filename])
16181+ if getattr(self.generator, 'ut_fun', None):
16182+ self.generator.ut_fun(self)
16183+
16184+ try:
16185+ fu = getattr(self.generator.bld, 'all_test_paths')
16186+ except AttributeError:
16187+ fu = os.environ.copy()
16188+ self.generator.bld.all_test_paths = fu
16189+
16190+ lst = []
16191+ for obj in self.generator.bld.all_task_gen:
16192+ link_task = getattr(obj, 'link_task', None)
16193+ if link_task and link_task.env.variant() == variant:
16194+ lst.append(link_task.outputs[0].parent.abspath(obj.env))
16195+
16196+ def add_path(dct, path, var):
16197+ dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
16198+
16199+ if sys.platform == 'win32':
16200+ add_path(fu, lst, 'PATH')
16201+ elif sys.platform == 'darwin':
16202+ add_path(fu, lst, 'DYLD_LIBRARY_PATH')
16203+ add_path(fu, lst, 'LD_LIBRARY_PATH')
16204+ else:
16205+ add_path(fu, lst, 'LD_LIBRARY_PATH')
16206+
16207+
16208+ cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env)
16209+ proc = Utils.pproc.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE)
16210+ (stdout, stderr) = proc.communicate()
16211+
16212+ tup = (filename, proc.returncode, stdout, stderr)
16213+ self.generator.utest_result = tup
16214+
16215+ testlock.acquire()
16216+ try:
16217+ bld = self.generator.bld
16218+ Logs.debug("ut: %r", tup)
16219+ try:
16220+ bld.utest_results.append(tup)
16221+ except AttributeError:
16222+ bld.utest_results = [tup]
16223+ finally:
16224+ testlock.release()
16225+
16226+cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin')
16227+
16228+old = cls.runnable_status
16229+def test_status(self):
16230+ ret = old(self)
16231+ if ret == SKIP_ME and getattr(Options.options, 'all_tests', False):
16232+ return RUN_ME
16233+ return ret
16234+
16235+cls.runnable_status = test_status
16236+cls.quiet = 1
16237+
16238+def summary(bld):
16239+ lst = getattr(bld, 'utest_results', [])
16240+ if lst:
16241+ Utils.pprint('CYAN', 'execution summary')
16242+
16243+ total = len(lst)
16244+ tfail = len([x for x in lst if x[1]])
16245+
16246+ Utils.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
16247+ for (f, code, out, err) in lst:
16248+ if not code:
16249+ Utils.pprint('CYAN', ' %s' % f)
16250+
16251+ Utils.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
16252+ for (f, code, out, err) in lst:
16253+ if code:
16254+ Utils.pprint('CYAN', ' %s' % f)
16255+
16256+
16257diff --git a/buildtools/wafadmin/Tools/vala.py b/buildtools/wafadmin/Tools/vala.py
16258new file mode 100644
16259index 0000000..753ee8d
16260--- /dev/null
16261+++ b/buildtools/wafadmin/Tools/vala.py
16262@@ -0,0 +1,308 @@
16263+#!/usr/bin/env python
16264+# encoding: utf-8
16265+# Ali Sabil, 2007
16266+
16267+import os.path, shutil
16268+import Task, Runner, Utils, Logs, Build, Node, Options
16269+from TaskGen import extension, after, before
16270+
16271+EXT_VALA = ['.vala', '.gs']
16272+
16273+class valac_task(Task.Task):
16274+
16275+ vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS")
16276+ before = ("cc", "cxx")
16277+
16278+ def run(self):
16279+ env = self.env
16280+ inputs = [a.srcpath(env) for a in self.inputs]
16281+ valac = env['VALAC']
16282+ vala_flags = env.get_flat('VALAFLAGS')
16283+ top_src = self.generator.bld.srcnode.abspath()
16284+ top_bld = self.generator.bld.srcnode.abspath(env)
16285+
16286+ if env['VALAC_VERSION'] > (0, 1, 6):
16287+ cmd = [valac, '-C', '--quiet', vala_flags]
16288+ else:
16289+ cmd = [valac, '-C', vala_flags]
16290+
16291+ if self.threading:
16292+ cmd.append('--thread')
16293+
16294+ if self.profile:
16295+ cmd.append('--profile=%s' % self.profile)
16296+
16297+ if self.target_glib:
16298+ cmd.append('--target-glib=%s' % self.target_glib)
16299+
16300+ features = self.generator.features
16301+
16302+ if 'cshlib' in features or 'cstaticlib' in features:
16303+ output_dir = self.outputs[0].bld_dir(env)
16304+ cmd.append('--library ' + self.target)
16305+ if env['VALAC_VERSION'] >= (0, 7, 0):
16306+ for x in self.outputs:
16307+ if x.name.endswith('.h'):
16308+ cmd.append('--header ' + x.bldpath(self.env))
16309+ cmd.append('--basedir ' + top_src)
16310+ cmd.append('-d ' + top_bld)
16311+ if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'):
16312+ cmd.append('--gir=%s.gir' % self.gir)
16313+
16314+ else:
16315+ output_dir = self.outputs[0].bld_dir(env)
16316+ cmd.append('-d %s' % output_dir)
16317+
16318+ for vapi_dir in self.vapi_dirs:
16319+ cmd.append('--vapidir=%s' % vapi_dir)
16320+
16321+ for package in self.packages:
16322+ cmd.append('--pkg %s' % package)
16323+
16324+ for package in self.packages_private:
16325+ cmd.append('--pkg %s' % package)
16326+
16327+ cmd.append(" ".join(inputs))
16328+ result = self.generator.bld.exec_command(" ".join(cmd))
16329+
16330+ if not 'cprogram' in features:
16331+ # generate the .deps file
16332+ if self.packages:
16333+ filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target)
16334+ deps = open(filename, 'w')
16335+ for package in self.packages:
16336+ deps.write(package + '\n')
16337+ deps.close()
16338+
16339+ # handle vala 0.1.6 who doesn't honor --directory for the generated .vapi
16340+ self._fix_output("../%s.vapi" % self.target)
16341+ # handle vala >= 0.1.7 who has a weid definition for --directory
16342+ self._fix_output("%s.vapi" % self.target)
16343+ # handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl
16344+ self._fix_output("%s.gidl" % self.target)
16345+ # handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir
16346+ self._fix_output("%s.gir" % self.target)
16347+ if hasattr(self, 'gir'):
16348+ self._fix_output("%s.gir" % self.gir)
16349+
16350+ first = None
16351+ for node in self.outputs:
16352+ if not first:
16353+ first = node
16354+ else:
16355+ if first.parent.id != node.parent.id:
16356+ # issue #483
16357+ if env['VALAC_VERSION'] < (0, 7, 0):
16358+ shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env))
16359+ return result
16360+
16361+ def install(self):
16362+ bld = self.generator.bld
16363+ features = self.generator.features
16364+
16365+ if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features):
16366+ headers_list = [o for o in self.outputs if o.suffix() == ".h"]
16367+ vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))]
16368+ gir_list = [o for o in self.outputs if o.suffix() == ".gir"]
16369+
16370+ for header in headers_list:
16371+ top_src = self.generator.bld.srcnode
16372+ package = self.env['PACKAGE']
16373+ try:
16374+ api_version = Utils.g_module.API_VERSION
16375+ except AttributeError:
16376+ version = Utils.g_module.VERSION.split(".")
16377+ if version[0] == "0":
16378+ api_version = "0." + version[1]
16379+ else:
16380+ api_version = version[0] + ".0"
16381+ install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src))
16382+ bld.install_as(install_path, header, self.env)
16383+ bld.install_files('${DATAROOTDIR}/vala/vapi', vapi_list, self.env)
16384+ bld.install_files('${DATAROOTDIR}/gir-1.0', gir_list, self.env)
16385+
16386+ def _fix_output(self, output):
16387+ top_bld = self.generator.bld.srcnode.abspath(self.env)
16388+ try:
16389+ src = os.path.join(top_bld, output)
16390+ dst = self.generator.path.abspath (self.env)
16391+ shutil.move(src, dst)
16392+ except:
16393+ pass
16394+
16395+@extension(EXT_VALA)
16396+def vala_file(self, node):
16397+ valatask = getattr(self, "valatask", None)
16398+ # there is only one vala task and it compiles all vala files .. :-/
16399+ if not valatask:
16400+ valatask = self.create_task('valac')
16401+ self.valatask = valatask
16402+ self.includes = Utils.to_list(getattr(self, 'includes', []))
16403+ self.uselib = self.to_list(self.uselib)
16404+ valatask.packages = []
16405+ valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', []))
16406+ valatask.vapi_dirs = []
16407+ valatask.target = self.target
16408+ valatask.threading = False
16409+ valatask.install_path = self.install_path
16410+ valatask.profile = getattr (self, 'profile', 'gobject')
16411+ valatask.target_glib = None #Deprecated
16412+
16413+ packages = Utils.to_list(getattr(self, 'packages', []))
16414+ vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
16415+ includes = []
16416+
16417+ if hasattr(self, 'uselib_local'):
16418+ local_packages = Utils.to_list(self.uselib_local)
16419+ seen = []
16420+ while len(local_packages) > 0:
16421+ package = local_packages.pop()
16422+ if package in seen:
16423+ continue
16424+ seen.append(package)
16425+
16426+ # check if the package exists
16427+ package_obj = self.name_to_obj(package)
16428+ if not package_obj:
16429+ raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name))
16430+
16431+ package_name = package_obj.target
16432+ package_node = package_obj.path
16433+ package_dir = package_node.relpath_gen(self.path)
16434+
16435+ for task in package_obj.tasks:
16436+ for output in task.outputs:
16437+ if output.name == package_name + ".vapi":
16438+ valatask.set_run_after(task)
16439+ if package_name not in packages:
16440+ packages.append(package_name)
16441+ if package_dir not in vapi_dirs:
16442+ vapi_dirs.append(package_dir)
16443+ if package_dir not in includes:
16444+ includes.append(package_dir)
16445+
16446+ if hasattr(package_obj, 'uselib_local'):
16447+ lst = self.to_list(package_obj.uselib_local)
16448+ lst.reverse()
16449+ local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
16450+
16451+ valatask.packages = packages
16452+ for vapi_dir in vapi_dirs:
16453+ try:
16454+ valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
16455+ valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
16456+ except AttributeError:
16457+ Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir)
16458+
16459+ self.includes.append(node.bld.srcnode.abspath())
16460+ self.includes.append(node.bld.srcnode.abspath(self.env))
16461+ for include in includes:
16462+ try:
16463+ self.includes.append(self.path.find_dir(include).abspath())
16464+ self.includes.append(self.path.find_dir(include).abspath(self.env))
16465+ except AttributeError:
16466+ Logs.warn("Unable to locate include directory: '%s'" % include)
16467+
16468+ if valatask.profile == 'gobject':
16469+ if hasattr(self, 'target_glib'):
16470+ Logs.warn ('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options')
16471+
16472+ if getattr(Options.options, 'vala_target_glib', None):
16473+ valatask.target_glib = Options.options.vala_target_glib
16474+
16475+ if not 'GOBJECT' in self.uselib:
16476+ self.uselib.append('GOBJECT')
16477+
16478+ if hasattr(self, 'threading'):
16479+ if valatask.profile == 'gobject':
16480+ valatask.threading = self.threading
16481+ if not 'GTHREAD' in self.uselib:
16482+ self.uselib.append('GTHREAD')
16483+ else:
16484+ #Vala doesn't have threading support for dova nor posix
16485+ Logs.warn("Profile %s does not have threading support" % valatask.profile)
16486+
16487+ if hasattr(self, 'gir'):
16488+ valatask.gir = self.gir
16489+
16490+ env = valatask.env
16491+
16492+ output_nodes = []
16493+
16494+ c_node = node.change_ext('.c')
16495+ output_nodes.append(c_node)
16496+ self.allnodes.append(c_node)
16497+
16498+ if env['VALAC_VERSION'] < (0, 7, 0):
16499+ output_nodes.append(node.change_ext('.h'))
16500+ else:
16501+ if not 'cprogram' in self.features:
16502+ output_nodes.append(self.path.find_or_declare('%s.h' % self.target))
16503+
16504+ if not 'cprogram' in self.features:
16505+ output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target))
16506+ if env['VALAC_VERSION'] > (0, 7, 2):
16507+ if hasattr(self, 'gir'):
16508+ output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir))
16509+ elif env['VALAC_VERSION'] > (0, 3, 5):
16510+ output_nodes.append(self.path.find_or_declare('%s.gir' % self.target))
16511+ elif env['VALAC_VERSION'] > (0, 1, 7):
16512+ output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target))
16513+ if valatask.packages:
16514+ output_nodes.append(self.path.find_or_declare('%s.deps' % self.target))
16515+
16516+ valatask.inputs.append(node)
16517+ valatask.outputs.extend(output_nodes)
16518+
16519+def detect(conf):
16520+ min_version = (0, 1, 6)
16521+ min_version_str = "%d.%d.%d" % min_version
16522+
16523+ valac = conf.find_program('valac', var='VALAC', mandatory=True)
16524+
16525+ if not conf.env["HAVE_GOBJECT"]:
16526+ pkg_args = {'package': 'gobject-2.0',
16527+ 'uselib_store': 'GOBJECT',
16528+ 'args': '--cflags --libs'}
16529+ if getattr(Options.options, 'vala_target_glib', None):
16530+ pkg_args['atleast_version'] = Options.options.vala_target_glib
16531+
16532+ conf.check_cfg(**pkg_args)
16533+
16534+ if not conf.env["HAVE_GTHREAD"]:
16535+ pkg_args = {'package': 'gthread-2.0',
16536+ 'uselib_store': 'GTHREAD',
16537+ 'args': '--cflags --libs'}
16538+ if getattr(Options.options, 'vala_target_glib', None):
16539+ pkg_args['atleast_version'] = Options.options.vala_target_glib
16540+
16541+ conf.check_cfg(**pkg_args)
16542+
16543+ try:
16544+ output = Utils.cmd_output(valac + " --version", silent=True)
16545+ version = output.split(' ', 1)[-1].strip().split(".")[0:3]
16546+ version = [int(x) for x in version]
16547+ valac_version = tuple(version)
16548+ except Exception:
16549+ valac_version = (0, 0, 0)
16550+
16551+ conf.check_message('program version',
16552+ 'valac >= ' + min_version_str,
16553+ valac_version >= min_version,
16554+ "%d.%d.%d" % valac_version)
16555+
16556+ conf.check_tool('gnu_dirs')
16557+
16558+ if valac_version < min_version:
16559+ conf.fatal("valac version too old to be used with this tool")
16560+ return
16561+
16562+ conf.env['VALAC_VERSION'] = valac_version
16563+ conf.env['VALAFLAGS'] = ''
16564+
16565+def set_options (opt):
16566+ valaopts = opt.add_option_group('Vala Compiler Options')
16567+ valaopts.add_option ('--vala-target-glib', default=None,
16568+ dest='vala_target_glib', metavar='MAJOR.MINOR',
16569+ help='Target version of glib for Vala GObject code generation')
16570+
16571diff --git a/buildtools/wafadmin/Tools/winres.py b/buildtools/wafadmin/Tools/winres.py
16572new file mode 100644
16573index 0000000..2500d43
16574--- /dev/null
16575+++ b/buildtools/wafadmin/Tools/winres.py
16576@@ -0,0 +1,45 @@
16577+#!/usr/bin/env python
16578+# encoding: utf-8
16579+# Brant Young, 2007
16580+
16581+"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}"
16582+
16583+import os, sys, re
16584+import TaskGen, Task
16585+from Utils import quote_whitespace
16586+from TaskGen import extension
16587+
16588+EXT_WINRC = ['.rc']
16589+
16590+winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
16591+
16592+@extension(EXT_WINRC)
16593+def rc_file(self, node):
16594+ obj_ext = '.rc.o'
16595+ if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res'
16596+
16597+ rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
16598+ self.compiled_tasks.append(rctask)
16599+
16600+# create our action, for use with rc file
16601+Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False)
16602+
16603+def detect(conf):
16604+ v = conf.env
16605+
16606+ winrc = v['WINRC']
16607+ v['WINRC_TGT_F'] = '-o'
16608+ v['WINRC_SRC_F'] = '-i'
16609+ # find rc.exe
16610+ if not winrc:
16611+ if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']:
16612+ winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH'])
16613+ elif v['CC_NAME'] == 'msvc':
16614+ winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH'])
16615+ v['WINRC_TGT_F'] = '/fo'
16616+ v['WINRC_SRC_F'] = ''
16617+ if not winrc:
16618+ conf.fatal('winrc was not found!')
16619+
16620+ v['WINRCFLAGS'] = ''
16621+
16622diff --git a/buildtools/wafadmin/Tools/xlc.py b/buildtools/wafadmin/Tools/xlc.py
16623new file mode 100644
16624index 0000000..e33b7a1
16625--- /dev/null
16626+++ b/buildtools/wafadmin/Tools/xlc.py
16627@@ -0,0 +1,78 @@
16628+#!/usr/bin/env python
16629+# encoding: utf-8
16630+# Thomas Nagy, 2006-2008 (ita)
16631+# Ralf Habacker, 2006 (rh)
16632+# Yinon Ehrlich, 2009
16633+# Michael Kuhn, 2009
16634+
16635+import os, sys
16636+import Configure, Options, Utils
16637+import ccroot, ar
16638+from Configure import conftest
16639+
16640+@conftest
16641+def find_xlc(conf):
16642+ cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True)
16643+ cc = conf.cmd_to_list(cc)
16644+ conf.env.CC_NAME = 'xlc'
16645+ conf.env.CC = cc
16646+
16647+@conftest
16648+def find_cpp(conf):
16649+ v = conf.env
16650+ cpp = None
16651+ if v['CPP']: cpp = v['CPP']
16652+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
16653+ #if not cpp: cpp = v['CC']
16654+ v['CPP'] = cpp
16655+
16656+@conftest
16657+def xlc_common_flags(conf):
16658+ v = conf.env
16659+
16660+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
16661+ v['CCFLAGS_DEBUG'] = ['-g']
16662+ v['CCFLAGS_RELEASE'] = ['-O2']
16663+
16664+ v['CC_SRC_F'] = ''
16665+ v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
16666+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
16667+
16668+ # linker
16669+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
16670+ v['CCLNK_SRC_F'] = ''
16671+ v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
16672+
16673+ v['LIB_ST'] = '-l%s' # template for adding libs
16674+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
16675+ v['STATICLIB_ST'] = '-l%s'
16676+ v['STATICLIBPATH_ST'] = '-L%s'
16677+ v['RPATH_ST'] = '-Wl,-rpath,%s'
16678+ v['CCDEFINES_ST'] = '-D%s'
16679+
16680+ v['SONAME_ST'] = ''
16681+ v['SHLIB_MARKER'] = ''
16682+ v['STATICLIB_MARKER'] = ''
16683+ v['FULLSTATIC_MARKER'] = '-static'
16684+
16685+ # program
16686+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
16687+ v['program_PATTERN'] = '%s'
16688+
16689+ # shared library
16690+ v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
16691+ v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
16692+ v['shlib_PATTERN'] = 'lib%s.so'
16693+
16694+ # static lib
16695+ v['staticlib_LINKFLAGS'] = ''
16696+ v['staticlib_PATTERN'] = 'lib%s.a'
16697+
16698+def detect(conf):
16699+ conf.find_xlc()
16700+ conf.find_cpp()
16701+ conf.find_ar()
16702+ conf.xlc_common_flags()
16703+ conf.cc_load_tools()
16704+ conf.cc_add_flags()
16705+ conf.link_add_flags()
16706diff --git a/buildtools/wafadmin/Tools/xlcxx.py b/buildtools/wafadmin/Tools/xlcxx.py
16707new file mode 100644
16708index 0000000..6e84662
16709--- /dev/null
16710+++ b/buildtools/wafadmin/Tools/xlcxx.py
16711@@ -0,0 +1,78 @@
16712+#!/usr/bin/env python
16713+# encoding: utf-8
16714+# Thomas Nagy, 2006 (ita)
16715+# Ralf Habacker, 2006 (rh)
16716+# Yinon Ehrlich, 2009
16717+# Michael Kuhn, 2009
16718+
16719+import os, sys
16720+import Configure, Options, Utils
16721+import ccroot, ar
16722+from Configure import conftest
16723+
16724+@conftest
16725+def find_xlcxx(conf):
16726+ cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
16727+ cxx = conf.cmd_to_list(cxx)
16728+ conf.env.CXX_NAME = 'xlc++'
16729+ conf.env.CXX = cxx
16730+
16731+@conftest
16732+def find_cpp(conf):
16733+ v = conf.env
16734+ cpp = None
16735+ if v['CPP']: cpp = v['CPP']
16736+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
16737+ #if not cpp: cpp = v['CXX']
16738+ v['CPP'] = cpp
16739+
16740+@conftest
16741+def xlcxx_common_flags(conf):
16742+ v = conf.env
16743+
16744+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
16745+ v['CXXFLAGS_DEBUG'] = ['-g']
16746+ v['CXXFLAGS_RELEASE'] = ['-O2']
16747+
16748+ v['CXX_SRC_F'] = ''
16749+ v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
16750+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
16751+
16752+ # linker
16753+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
16754+ v['CXXLNK_SRC_F'] = ''
16755+ v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
16756+
16757+ v['LIB_ST'] = '-l%s' # template for adding libs
16758+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
16759+ v['STATICLIB_ST'] = '-l%s'
16760+ v['STATICLIBPATH_ST'] = '-L%s'
16761+ v['RPATH_ST'] = '-Wl,-rpath,%s'
16762+ v['CXXDEFINES_ST'] = '-D%s'
16763+
16764+ v['SONAME_ST'] = ''
16765+ v['SHLIB_MARKER'] = ''
16766+ v['STATICLIB_MARKER'] = ''
16767+ v['FULLSTATIC_MARKER'] = '-static'
16768+
16769+ # program
16770+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
16771+ v['program_PATTERN'] = '%s'
16772+
16773+ # shared library
16774+ v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
16775+ v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
16776+ v['shlib_PATTERN'] = 'lib%s.so'
16777+
16778+ # static lib
16779+ v['staticlib_LINKFLAGS'] = ''
16780+ v['staticlib_PATTERN'] = 'lib%s.a'
16781+
16782+def detect(conf):
16783+ conf.find_xlcxx()
16784+ conf.find_cpp()
16785+ conf.find_ar()
16786+ conf.xlcxx_common_flags()
16787+ conf.cxx_load_tools()
16788+ conf.cxx_add_flags()
16789+ conf.link_add_flags()
16790diff --git a/buildtools/wafadmin/Utils.py b/buildtools/wafadmin/Utils.py
16791new file mode 100644
16792index 0000000..41dad57
16793--- /dev/null
16794+++ b/buildtools/wafadmin/Utils.py
16795@@ -0,0 +1,726 @@
16796+#!/usr/bin/env python
16797+# encoding: utf-8
16798+# Thomas Nagy, 2005 (ita)
16799+
16800+"""
16801+Utilities, the stable ones are the following:
16802+
16803+* h_file: compute a unique value for a file (hash), it uses
16804+ the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
16805+ else, md5 (see the python docs)
16806+
16807+ For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
16808+ it is possible to use a hashing based on the path and the size (may give broken cache results)
16809+ The method h_file MUST raise an OSError if the file is a folder
16810+
16811+ import stat
16812+ def h_file(filename):
16813+ st = os.stat(filename)
16814+ if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
16815+ m = Utils.md5()
16816+ m.update(str(st.st_mtime))
16817+ m.update(str(st.st_size))
16818+ m.update(filename)
16819+ return m.digest()
16820+
16821+ To replace the function in your project, use something like this:
16822+ import Utils
16823+ Utils.h_file = h_file
16824+
16825+* h_list
16826+* h_fun
16827+* get_term_cols
16828+* ordered_dict
16829+
16830+"""
16831+
16832+import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
16833+
16834+# In python 3.0 we can get rid of all this
16835+try: from UserDict import UserDict
16836+except ImportError: from collections import UserDict
16837+if sys.hexversion >= 0x2060000 or os.name == 'java':
16838+ import subprocess as pproc
16839+else:
16840+ import pproc
16841+import Logs
16842+from Constants import *
16843+
16844+try:
16845+ from collections import deque
16846+except ImportError:
16847+ class deque(list):
16848+ def popleft(self):
16849+ return self.pop(0)
16850+
16851+is_win32 = sys.platform == 'win32'
16852+
16853+try:
16854+ # defaultdict in python 2.5
16855+ from collections import defaultdict as DefaultDict
16856+except ImportError:
16857+ class DefaultDict(dict):
16858+ def __init__(self, default_factory):
16859+ super(DefaultDict, self).__init__()
16860+ self.default_factory = default_factory
16861+ def __getitem__(self, key):
16862+ try:
16863+ return super(DefaultDict, self).__getitem__(key)
16864+ except KeyError:
16865+ value = self.default_factory()
16866+ self[key] = value
16867+ return value
16868+
16869+class WafError(Exception):
16870+ def __init__(self, *args):
16871+ self.args = args
16872+ try:
16873+ self.stack = traceback.extract_stack()
16874+ except:
16875+ pass
16876+ Exception.__init__(self, *args)
16877+ def __str__(self):
16878+ return str(len(self.args) == 1 and self.args[0] or self.args)
16879+
16880+class WscriptError(WafError):
16881+ def __init__(self, message, wscript_file=None):
16882+ if wscript_file:
16883+ self.wscript_file = wscript_file
16884+ self.wscript_line = None
16885+ else:
16886+ try:
16887+ (self.wscript_file, self.wscript_line) = self.locate_error()
16888+ except:
16889+ (self.wscript_file, self.wscript_line) = (None, None)
16890+
16891+ msg_file_line = ''
16892+ if self.wscript_file:
16893+ msg_file_line = "%s:" % self.wscript_file
16894+ if self.wscript_line:
16895+ msg_file_line += "%s:" % self.wscript_line
16896+ err_message = "%s error: %s" % (msg_file_line, message)
16897+ WafError.__init__(self, err_message)
16898+
16899+ def locate_error(self):
16900+ stack = traceback.extract_stack()
16901+ stack.reverse()
16902+ for frame in stack:
16903+ file_name = os.path.basename(frame[0])
16904+ is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
16905+ if is_wscript:
16906+ return (frame[0], frame[1])
16907+ return (None, None)
16908+
16909+indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
16910+
16911+try:
16912+ from fnv import new as md5
16913+ import Constants
16914+ Constants.SIG_NIL = 'signofnv'
16915+
16916+ def h_file(filename):
16917+ m = md5()
16918+ try:
16919+ m.hfile(filename)
16920+ x = m.digest()
16921+ if x is None: raise OSError("not a file")
16922+ return x
16923+ except SystemError:
16924+ raise OSError("not a file" + filename)
16925+
16926+except ImportError:
16927+ try:
16928+ try:
16929+ from hashlib import md5
16930+ except ImportError:
16931+ from md5 import md5
16932+
16933+ def h_file(filename):
16934+ f = open(filename, 'rb')
16935+ m = md5()
16936+ while (filename):
16937+ filename = f.read(100000)
16938+ m.update(filename)
16939+ f.close()
16940+ return m.digest()
16941+ except ImportError:
16942+ # portability fixes may be added elsewhere (although, md5 should be everywhere by now)
16943+ md5 = None
16944+
16945+class ordered_dict(UserDict):
16946+ def __init__(self, dict = None):
16947+ self.allkeys = []
16948+ UserDict.__init__(self, dict)
16949+
16950+ def __delitem__(self, key):
16951+ self.allkeys.remove(key)
16952+ UserDict.__delitem__(self, key)
16953+
16954+ def __setitem__(self, key, item):
16955+ if key not in self.allkeys: self.allkeys.append(key)
16956+ UserDict.__setitem__(self, key, item)
16957+
16958+def exec_command(s, **kw):
16959+ if 'log' in kw:
16960+ kw['stdout'] = kw['stderr'] = kw['log']
16961+ del(kw['log'])
16962+ kw['shell'] = isinstance(s, str)
16963+
16964+ try:
16965+ proc = pproc.Popen(s, **kw)
16966+ return proc.wait()
16967+ except OSError:
16968+ return -1
16969+
16970+if is_win32:
16971+ def exec_command(s, **kw):
16972+ if 'log' in kw:
16973+ kw['stdout'] = kw['stderr'] = kw['log']
16974+ del(kw['log'])
16975+ kw['shell'] = isinstance(s, str)
16976+
16977+ if len(s) > 2000:
16978+ startupinfo = pproc.STARTUPINFO()
16979+ startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
16980+ kw['startupinfo'] = startupinfo
16981+
16982+ try:
16983+ if 'stdout' not in kw:
16984+ kw['stdout'] = pproc.PIPE
16985+ kw['stderr'] = pproc.PIPE
16986+ kw['universal_newlines'] = True
16987+ proc = pproc.Popen(s,**kw)
16988+ (stdout, stderr) = proc.communicate()
16989+ Logs.info(stdout)
16990+ if stderr:
16991+ Logs.error(stderr)
16992+ return proc.returncode
16993+ else:
16994+ proc = pproc.Popen(s,**kw)
16995+ return proc.wait()
16996+ except OSError:
16997+ return -1
16998+
16999+listdir = os.listdir
17000+if is_win32:
17001+ def listdir_win32(s):
17002+ if re.match('^[A-Za-z]:$', s):
17003+ # os.path.isdir fails if s contains only the drive name... (x:)
17004+ s += os.sep
17005+ if not os.path.isdir(s):
17006+ e = OSError()
17007+ e.errno = errno.ENOENT
17008+ raise e
17009+ return os.listdir(s)
17010+ listdir = listdir_win32
17011+
17012+def waf_version(mini = 0x010000, maxi = 0x100000):
17013+ "Halts if the waf version is wrong"
17014+ ver = HEXVERSION
17015+ try: min_val = mini + 0
17016+ except TypeError: min_val = int(mini.replace('.', '0'), 16)
17017+
17018+ if min_val > ver:
17019+ Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
17020+ sys.exit(1)
17021+
17022+ try: max_val = maxi + 0
17023+ except TypeError: max_val = int(maxi.replace('.', '0'), 16)
17024+
17025+ if max_val < ver:
17026+ Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
17027+ sys.exit(1)
17028+
17029+def python_24_guard():
17030+ if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
17031+ raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
17032+
17033+def ex_stack():
17034+ exc_type, exc_value, tb = sys.exc_info()
17035+ if Logs.verbose > 1:
17036+ exc_lines = traceback.format_exception(exc_type, exc_value, tb)
17037+ return ''.join(exc_lines)
17038+ return str(exc_value)
17039+
17040+def to_list(sth):
17041+ if isinstance(sth, str):
17042+ return sth.split()
17043+ else:
17044+ return sth
17045+
17046+g_loaded_modules = {}
17047+"index modules by absolute path"
17048+
17049+g_module=None
17050+"the main module is special"
17051+
17052+def load_module(file_path, name=WSCRIPT_FILE):
17053+ "this function requires an absolute path"
17054+ try:
17055+ return g_loaded_modules[file_path]
17056+ except KeyError:
17057+ pass
17058+
17059+ module = imp.new_module(name)
17060+
17061+ try:
17062+ code = readf(file_path, m='rU')
17063+ except (IOError, OSError):
17064+ raise WscriptError('Could not read the file %r' % file_path)
17065+
17066+ module.waf_hash_val = code
17067+
17068+ dt = os.path.dirname(file_path)
17069+ sys.path.insert(0, dt)
17070+ try:
17071+ exec(compile(code, file_path, 'exec'), module.__dict__)
17072+ except Exception:
17073+ exc_type, exc_value, tb = sys.exc_info()
17074+ raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
17075+ sys.path.remove(dt)
17076+
17077+ g_loaded_modules[file_path] = module
17078+
17079+ return module
17080+
17081+def set_main_module(file_path):
17082+ "Load custom options, if defined"
17083+ global g_module
17084+ g_module = load_module(file_path, 'wscript_main')
17085+ g_module.root_path = file_path
17086+
17087+ try:
17088+ g_module.APPNAME
17089+ except:
17090+ g_module.APPNAME = 'noname'
17091+ try:
17092+ g_module.VERSION
17093+ except:
17094+ g_module.VERSION = '1.0'
17095+
17096+ # note: to register the module globally, use the following:
17097+ # sys.modules['wscript_main'] = g_module
17098+
17099+def to_hashtable(s):
17100+ "used for importing env files"
17101+ tbl = {}
17102+ lst = s.split('\n')
17103+ for line in lst:
17104+ if not line: continue
17105+ mems = line.split('=')
17106+ tbl[mems[0]] = mems[1]
17107+ return tbl
17108+
17109+def get_term_cols():
17110+ "console width"
17111+ return 80
17112+try:
17113+ import struct, fcntl, termios
17114+except ImportError:
17115+ pass
17116+else:
17117+ if Logs.got_tty:
17118+ def myfun():
17119+ dummy_lines, cols = struct.unpack("HHHH", \
17120+ fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
17121+ struct.pack("HHHH", 0, 0, 0, 0)))[:2]
17122+ return cols
17123+ # we actually try the function once to see if it is suitable
17124+ try:
17125+ myfun()
17126+ except:
17127+ pass
17128+ else:
17129+ get_term_cols = myfun
17130+
17131+rot_idx = 0
17132+rot_chr = ['\\', '|', '/', '-']
17133+"the rotation character in the progress bar"
17134+
17135+
17136+def split_path(path):
17137+ return path.split('/')
17138+
17139+def split_path_cygwin(path):
17140+ if path.startswith('//'):
17141+ ret = path.split('/')[2:]
17142+ ret[0] = '/' + ret[0]
17143+ return ret
17144+ return path.split('/')
17145+
17146+re_sp = re.compile('[/\\\\]')
17147+def split_path_win32(path):
17148+ if path.startswith('\\\\'):
17149+ ret = re.split(re_sp, path)[2:]
17150+ ret[0] = '\\' + ret[0]
17151+ return ret
17152+ return re.split(re_sp, path)
17153+
17154+if sys.platform == 'cygwin':
17155+ split_path = split_path_cygwin
17156+elif is_win32:
17157+ split_path = split_path_win32
17158+
17159+def copy_attrs(orig, dest, names, only_if_set=False):
17160+ for a in to_list(names):
17161+ u = getattr(orig, a, ())
17162+ if u or not only_if_set:
17163+ setattr(dest, a, u)
17164+
17165+def def_attrs(cls, **kw):
17166+ '''
17167+ set attributes for class.
17168+ @param cls [any class]: the class to update the given attributes in.
17169+ @param kw [dictionary]: dictionary of attributes names and values.
17170+
17171+ if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
17172+ '''
17173+ for k, v in kw.iteritems():
17174+ if not hasattr(cls, k):
17175+ setattr(cls, k, v)
17176+
17177+def quote_define_name(path):
17178+ fu = re.compile("[^a-zA-Z0-9]").sub("_", path)
17179+ fu = fu.upper()
17180+ return fu
17181+
17182+def quote_whitespace(path):
17183+ return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
17184+
17185+def trimquotes(s):
17186+ if not s: return ''
17187+ s = s.rstrip()
17188+ if s[0] == "'" and s[-1] == "'": return s[1:-1]
17189+ return s
17190+
17191+def h_list(lst):
17192+ m = md5()
17193+ m.update(str(lst))
17194+ return m.digest()
17195+
17196+def h_fun(fun):
17197+ try:
17198+ return fun.code
17199+ except AttributeError:
17200+ try:
17201+ h = inspect.getsource(fun)
17202+ except IOError:
17203+ h = "nocode"
17204+ try:
17205+ fun.code = h
17206+ except AttributeError:
17207+ pass
17208+ return h
17209+
17210+def pprint(col, str, label='', sep='\n'):
17211+ "print messages in color"
17212+ sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
17213+
17214+def check_dir(dir):
17215+ """If a folder doesn't exists, create it."""
17216+ try:
17217+ os.stat(dir)
17218+ except OSError:
17219+ try:
17220+ os.makedirs(dir)
17221+ except OSError, e:
17222+ raise WafError("Cannot create folder '%s' (original error: %s)" % (dir, e))
17223+
17224+def cmd_output(cmd, **kw):
17225+
17226+ silent = False
17227+ if 'silent' in kw:
17228+ silent = kw['silent']
17229+ del(kw['silent'])
17230+
17231+ if 'e' in kw:
17232+ tmp = kw['e']
17233+ del(kw['e'])
17234+ kw['env'] = tmp
17235+
17236+ kw['shell'] = isinstance(cmd, str)
17237+ kw['stdout'] = pproc.PIPE
17238+ if silent:
17239+ kw['stderr'] = pproc.PIPE
17240+
17241+ try:
17242+ p = pproc.Popen(cmd, **kw)
17243+ output = p.communicate()[0]
17244+ except OSError, e:
17245+ raise ValueError(str(e))
17246+
17247+ if p.returncode:
17248+ if not silent:
17249+ msg = "command execution failed: %s -> %r" % (cmd, str(output))
17250+ raise ValueError(msg)
17251+ output = ''
17252+ return output
17253+
17254+reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
17255+def subst_vars(expr, params):
17256+ "substitute ${PREFIX}/bin in /usr/local/bin"
17257+ def repl_var(m):
17258+ if m.group(1):
17259+ return '\\'
17260+ if m.group(2):
17261+ return '$'
17262+ try:
17263+ # environments may contain lists
17264+ return params.get_flat(m.group(3))
17265+ except AttributeError:
17266+ return params[m.group(3)]
17267+ return reg_subst.sub(repl_var, expr)
17268+
17269+def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
17270+ "infers the binary format from the unversioned_sys_platform name."
17271+
17272+ if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos', 'gnu'):
17273+ return 'elf'
17274+ elif unversioned_sys_platform == 'darwin':
17275+ return 'mac-o'
17276+ elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
17277+ return 'pe'
17278+ # TODO we assume all other operating systems are elf, which is not true.
17279+ # we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
17280+ return 'elf'
17281+
17282+def unversioned_sys_platform():
17283+ """returns an unversioned name from sys.platform.
17284+ sys.plaform is not very well defined and depends directly on the python source tree.
17285+ The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
17286+ i.e., it's possible to get freebsd7 on a freebsd8 system.
17287+ So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
17288+ Some possible values of sys.platform are, amongst others:
17289+ aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
17290+ generic gnu0 irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
17291+ Investigating the python source tree may reveal more values.
17292+ """
17293+ s = sys.platform
17294+ if s == 'java':
17295+ # The real OS is hidden under the JVM.
17296+ from java.lang import System
17297+ s = System.getProperty('os.name')
17298+ # see http://lopica.sourceforge.net/os.html for a list of possible values
17299+ if s == 'Mac OS X':
17300+ return 'darwin'
17301+ elif s.startswith('Windows '):
17302+ return 'win32'
17303+ elif s == 'OS/2':
17304+ return 'os2'
17305+ elif s == 'HP-UX':
17306+ return 'hpux'
17307+ elif s in ('SunOS', 'Solaris'):
17308+ return 'sunos'
17309+ else: s = s.lower()
17310+ if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
17311+ return re.split('\d+$', s)[0]
17312+
17313+#@deprecated('use unversioned_sys_platform instead')
17314+def detect_platform():
17315+ """this function has been in the Utils module for some time.
17316+ It's hard to guess what people have used it for.
17317+ It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
17318+ For example, the version is not removed on freebsd and netbsd, amongst others.
17319+ """
17320+ s = sys.platform
17321+
17322+ # known POSIX
17323+ for x in 'cygwin linux irix sunos hpux aix darwin gnu'.split():
17324+ # sys.platform may be linux2
17325+ if s.find(x) >= 0:
17326+ return x
17327+
17328+ # unknown POSIX
17329+ if os.name in 'posix java os2'.split():
17330+ return os.name
17331+
17332+ return s
17333+
17334+def load_tool(tool, tooldir=None):
17335+ '''
17336+ load_tool: import a Python module, optionally using several directories.
17337+ @param tool [string]: name of tool to import.
17338+ @param tooldir [list]: directories to look for the tool.
17339+ @return: the loaded module.
17340+
17341+ Warning: this function is not thread-safe: plays with sys.path,
17342+ so must run in sequence.
17343+ '''
17344+ if tooldir:
17345+ assert isinstance(tooldir, list)
17346+ sys.path = tooldir + sys.path
17347+ else:
17348+ tooldir = []
17349+ try:
17350+ return __import__(tool)
17351+ finally:
17352+ for dt in tooldir:
17353+ sys.path.remove(dt)
17354+
17355+def readf(fname, m='r'):
17356+ "get the contents of a file, it is not used anywhere for the moment"
17357+ f = open(fname, m)
17358+ try:
17359+ txt = f.read()
17360+ finally:
17361+ f.close()
17362+ return txt
17363+
17364+def nada(*k, **kw):
17365+ """A function that does nothing"""
17366+ pass
17367+
17368+def diff_path(top, subdir):
17369+ """difference between two absolute paths"""
17370+ top = os.path.normpath(top).replace('\\', '/').split('/')
17371+ subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
17372+ if len(top) == len(subdir): return ''
17373+ diff = subdir[len(top) - len(subdir):]
17374+ return os.path.join(*diff)
17375+
17376+class Context(object):
17377+ """A base class for commands to be executed from Waf scripts"""
17378+
17379+ def set_curdir(self, dir):
17380+ self.curdir_ = dir
17381+
17382+ def get_curdir(self):
17383+ try:
17384+ return self.curdir_
17385+ except AttributeError:
17386+ self.curdir_ = os.getcwd()
17387+ return self.get_curdir()
17388+
17389+ curdir = property(get_curdir, set_curdir)
17390+
17391+ def recurse(self, dirs, name=''):
17392+ """The function for calling scripts from folders, it tries to call wscript + function_name
17393+ and if that file does not exist, it will call the method 'function_name' from a file named wscript
17394+ the dirs can be a list of folders or a string containing space-separated folder paths
17395+ """
17396+ if not name:
17397+ name = inspect.stack()[1][3]
17398+
17399+ if isinstance(dirs, str):
17400+ dirs = to_list(dirs)
17401+
17402+ for x in dirs:
17403+ if os.path.isabs(x):
17404+ nexdir = x
17405+ else:
17406+ nexdir = os.path.join(self.curdir, x)
17407+
17408+ base = os.path.join(nexdir, WSCRIPT_FILE)
17409+ file_path = base + '_' + name
17410+
17411+ try:
17412+ txt = readf(file_path, m='rU')
17413+ except (OSError, IOError):
17414+ try:
17415+ module = load_module(base)
17416+ except OSError:
17417+ raise WscriptError('No such script %s' % base)
17418+
17419+ try:
17420+ f = module.__dict__[name]
17421+ except KeyError:
17422+ raise WscriptError('No function %s defined in %s' % (name, base))
17423+
17424+ if getattr(self.__class__, 'pre_recurse', None):
17425+ self.pre_recurse(f, base, nexdir)
17426+ old = self.curdir
17427+ self.curdir = nexdir
17428+ try:
17429+ f(self)
17430+ finally:
17431+ self.curdir = old
17432+ if getattr(self.__class__, 'post_recurse', None):
17433+ self.post_recurse(module, base, nexdir)
17434+ else:
17435+ dc = {'ctx': self}
17436+ if getattr(self.__class__, 'pre_recurse', None):
17437+ dc = self.pre_recurse(txt, file_path, nexdir)
17438+ old = self.curdir
17439+ self.curdir = nexdir
17440+ try:
17441+ try:
17442+ exec(compile(txt, file_path, 'exec'), dc)
17443+ except Exception:
17444+ exc_type, exc_value, tb = sys.exc_info()
17445+ raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
17446+ finally:
17447+ self.curdir = old
17448+ if getattr(self.__class__, 'post_recurse', None):
17449+ self.post_recurse(txt, file_path, nexdir)
17450+
17451+if is_win32:
17452+ old = shutil.copy2
17453+ def copy2(src, dst):
17454+ old(src, dst)
17455+ shutil.copystat(src, src)
17456+ setattr(shutil, 'copy2', copy2)
17457+
17458+def zip_folder(dir, zip_file_name, prefix):
17459+ """
17460+ prefix represents the app to add in the archive
17461+ """
17462+ import zipfile
17463+ zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
17464+ base = os.path.abspath(dir)
17465+
17466+ if prefix:
17467+ if prefix[-1] != os.sep:
17468+ prefix += os.sep
17469+
17470+ n = len(base)
17471+ for root, dirs, files in os.walk(base):
17472+ for f in files:
17473+ archive_name = prefix + root[n:] + os.sep + f
17474+ zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
17475+ zip.close()
17476+
17477+def get_elapsed_time(start):
17478+ "Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
17479+ delta = datetime.datetime.now() - start
17480+ # cast to int necessary for python 3.0
17481+ days = int(delta.days)
17482+ hours = int(delta.seconds / 3600)
17483+ minutes = int((delta.seconds - hours * 3600) / 60)
17484+ seconds = delta.seconds - hours * 3600 - minutes * 60 \
17485+ + float(delta.microseconds) / 1000 / 1000
17486+ result = ''
17487+ if days:
17488+ result += '%dd' % days
17489+ if days or hours:
17490+ result += '%dh' % hours
17491+ if days or hours or minutes:
17492+ result += '%dm' % minutes
17493+ return '%s%.3fs' % (result, seconds)
17494+
17495+if os.name == 'java':
17496+ # For Jython (they should really fix the inconsistency)
17497+ try:
17498+ gc.disable()
17499+ gc.enable()
17500+ except NotImplementedError:
17501+ gc.disable = gc.enable
17502+
17503+def run_once(fun):
17504+ """
17505+ decorator, make a function cache its results, use like this:
17506+
17507+ @run_once
17508+ def foo(k):
17509+ return 345*2343
17510+ """
17511+ cache = {}
17512+ def wrap(k):
17513+ try:
17514+ return cache[k]
17515+ except KeyError:
17516+ ret = fun(k)
17517+ cache[k] = ret
17518+ return ret
17519+ wrap.__cache__ = cache
17520+ return wrap
17521+
17522diff --git a/buildtools/wafadmin/__init__.py b/buildtools/wafadmin/__init__.py
17523new file mode 100644
17524index 0000000..01273cf
17525--- /dev/null
17526+++ b/buildtools/wafadmin/__init__.py
17527@@ -0,0 +1,3 @@
17528+#!/usr/bin/env python
17529+# encoding: utf-8
17530+# Thomas Nagy, 2005 (ita)
17531diff --git a/buildtools/wafadmin/ansiterm.py b/buildtools/wafadmin/ansiterm.py
17532new file mode 100644
17533index 0000000..720b79c
17534--- /dev/null
17535+++ b/buildtools/wafadmin/ansiterm.py
17536@@ -0,0 +1,236 @@
17537+import sys, os
17538+try:
17539+ if (not sys.stderr.isatty()) or (not sys.stdout.isatty()):
17540+ raise ValueError('not a tty')
17541+
17542+ from ctypes import *
17543+
17544+ class COORD(Structure):
17545+ _fields_ = [("X", c_short), ("Y", c_short)]
17546+
17547+ class SMALL_RECT(Structure):
17548+ _fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
17549+
17550+ class CONSOLE_SCREEN_BUFFER_INFO(Structure):
17551+ _fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
17552+
17553+ class CONSOLE_CURSOR_INFO(Structure):
17554+ _fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
17555+
17556+ sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17557+ csinfo = CONSOLE_CURSOR_INFO()
17558+ hconsole = windll.kernel32.GetStdHandle(-11)
17559+ windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
17560+ if sbinfo.Size.X < 10 or sbinfo.Size.Y < 10: raise Exception('small console')
17561+ windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
17562+except Exception:
17563+ pass
17564+else:
17565+ import re, threading
17566+
17567+ to_int = lambda number, default: number and int(number) or default
17568+ wlock = threading.Lock()
17569+
17570+ STD_OUTPUT_HANDLE = -11
17571+ STD_ERROR_HANDLE = -12
17572+
17573+ class AnsiTerm(object):
17574+ def __init__(self):
17575+ self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
17576+ self.cursor_history = []
17577+ self.orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17578+ self.orig_csinfo = CONSOLE_CURSOR_INFO()
17579+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self.orig_sbinfo))
17580+ windll.kernel32.GetConsoleCursorInfo(hconsole, byref(self.orig_csinfo))
17581+
17582+
17583+ def screen_buffer_info(self):
17584+ sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17585+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
17586+ return sbinfo
17587+
17588+ def clear_line(self, param):
17589+ mode = param and int(param) or 0
17590+ sbinfo = self.screen_buffer_info()
17591+ if mode == 1: # Clear from begining of line to cursor position
17592+ line_start = COORD(0, sbinfo.CursorPosition.Y)
17593+ line_length = sbinfo.Size.X
17594+ elif mode == 2: # Clear entire line
17595+ line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
17596+ line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
17597+ else: # Clear from cursor position to end of line
17598+ line_start = sbinfo.CursorPosition
17599+ line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
17600+ chars_written = c_int()
17601+ windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), line_length, line_start, byref(chars_written))
17602+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
17603+
17604+ def clear_screen(self, param):
17605+ mode = to_int(param, 0)
17606+ sbinfo = self.screen_buffer_info()
17607+ if mode == 1: # Clear from begining of screen to cursor position
17608+ clear_start = COORD(0, 0)
17609+ clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
17610+ elif mode == 2: # Clear entire screen and return cursor to home
17611+ clear_start = COORD(0, 0)
17612+ clear_length = sbinfo.Size.X * sbinfo.Size.Y
17613+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
17614+ else: # Clear from cursor position to end of screen
17615+ clear_start = sbinfo.CursorPosition
17616+ clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
17617+ chars_written = c_int()
17618+ windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), clear_length, clear_start, byref(chars_written))
17619+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
17620+
17621+ def push_cursor(self, param):
17622+ sbinfo = self.screen_buffer_info()
17623+ self.cursor_history.push(sbinfo.CursorPosition)
17624+
17625+ def pop_cursor(self, param):
17626+ if self.cursor_history:
17627+ old_pos = self.cursor_history.pop()
17628+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
17629+
17630+ def set_cursor(self, param):
17631+ x, sep, y = param.partition(';')
17632+ x = to_int(x, 1) - 1
17633+ y = to_int(y, 1) - 1
17634+ sbinfo = self.screen_buffer_info()
17635+ new_pos = COORD(
17636+ min(max(0, x), sbinfo.Size.X),
17637+ min(max(0, y), sbinfo.Size.Y)
17638+ )
17639+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17640+
17641+ def set_column(self, param):
17642+ x = to_int(param, 1) - 1
17643+ sbinfo = self.screen_buffer_info()
17644+ new_pos = COORD(
17645+ min(max(0, x), sbinfo.Size.X),
17646+ sbinfo.CursorPosition.Y
17647+ )
17648+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17649+
17650+ def move_cursor(self, x_offset=0, y_offset=0):
17651+ sbinfo = self.screen_buffer_info()
17652+ new_pos = COORD(
17653+ min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
17654+ min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
17655+ )
17656+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17657+
17658+ def move_up(self, param):
17659+ self.move_cursor(y_offset = -to_int(param, 1))
17660+
17661+ def move_down(self, param):
17662+ self.move_cursor(y_offset = to_int(param, 1))
17663+
17664+ def move_left(self, param):
17665+ self.move_cursor(x_offset = -to_int(param, 1))
17666+
17667+ def move_right(self, param):
17668+ self.move_cursor(x_offset = to_int(param, 1))
17669+
17670+ def next_line(self, param):
17671+ sbinfo = self.screen_buffer_info()
17672+ self.move_cursor(
17673+ x_offset = -sbinfo.CursorPosition.X,
17674+ y_offset = to_int(param, 1)
17675+ )
17676+
17677+ def prev_line(self, param):
17678+ sbinfo = self.screen_buffer_info()
17679+ self.move_cursor(
17680+ x_offset = -sbinfo.CursorPosition.X,
17681+ y_offset = -to_int(param, 1)
17682+ )
17683+
17684+ escape_to_color = { (0, 30): 0x0, #black
17685+ (0, 31): 0x4, #red
17686+ (0, 32): 0x2, #green
17687+ (0, 33): 0x4+0x2, #dark yellow
17688+ (0, 34): 0x1, #blue
17689+ (0, 35): 0x1+0x4, #purple
17690+ (0, 36): 0x2+0x4, #cyan
17691+ (0, 37): 0x1+0x2+0x4, #grey
17692+ (1, 30): 0x1+0x2+0x4, #dark gray
17693+ (1, 31): 0x4+0x8, #red
17694+ (1, 32): 0x2+0x8, #light green
17695+ (1, 33): 0x4+0x2+0x8, #yellow
17696+ (1, 34): 0x1+0x8, #light blue
17697+ (1, 35): 0x1+0x4+0x8, #light purple
17698+ (1, 36): 0x1+0x2+0x8, #light cyan
17699+ (1, 37): 0x1+0x2+0x4+0x8, #white
17700+ }
17701+
17702+ def set_color(self, param):
17703+ cols = param.split(';')
17704+ attr = self.orig_sbinfo.Attributes
17705+ for c in cols:
17706+ c = to_int(c, 0)
17707+ if c in range(30,38):
17708+ attr = (attr & 0xf0) | (self.escape_to_color.get((0,c), 0x7))
17709+ elif c in range(40,48):
17710+ attr = (attr & 0x0f) | (self.escape_to_color.get((0,c), 0x7) << 8)
17711+ elif c in range(90,98):
17712+ attr = (attr & 0xf0) | (self.escape_to_color.get((1,c-60), 0x7))
17713+ elif c in range(100,108):
17714+ attr = (attr & 0x0f) | (self.escape_to_color.get((1,c-60), 0x7) << 8)
17715+ elif c == 1:
17716+ attr |= 0x08
17717+ windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
17718+
17719+ def show_cursor(self,param):
17720+ csinfo.bVisible = 1
17721+ windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
17722+
17723+ def hide_cursor(self,param):
17724+ csinfo.bVisible = 0
17725+ windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
17726+
17727+ ansi_command_table = {
17728+ 'A': move_up,
17729+ 'B': move_down,
17730+ 'C': move_right,
17731+ 'D': move_left,
17732+ 'E': next_line,
17733+ 'F': prev_line,
17734+ 'G': set_column,
17735+ 'H': set_cursor,
17736+ 'f': set_cursor,
17737+ 'J': clear_screen,
17738+ 'K': clear_line,
17739+ 'h': show_cursor,
17740+ 'l': hide_cursor,
17741+ 'm': set_color,
17742+ 's': push_cursor,
17743+ 'u': pop_cursor,
17744+ }
17745+ # Match either the escape sequence or text not containing escape sequence
17746+ ansi_tokans = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
17747+ def write(self, text):
17748+ try:
17749+ wlock.acquire()
17750+ for param, cmd, txt in self.ansi_tokans.findall(text):
17751+ if cmd:
17752+ cmd_func = self.ansi_command_table.get(cmd)
17753+ if cmd_func:
17754+ cmd_func(self, param)
17755+ else:
17756+ chars_written = c_int()
17757+ if isinstance(txt, unicode):
17758+ windll.kernel32.WriteConsoleW(self.hconsole, txt, len(txt), byref(chars_written), None)
17759+ else:
17760+ windll.kernel32.WriteConsoleA(self.hconsole, txt, len(txt), byref(chars_written), None)
17761+ finally:
17762+ wlock.release()
17763+
17764+ def flush(self):
17765+ pass
17766+
17767+ def isatty(self):
17768+ return True
17769+
17770+ sys.stderr = sys.stdout = AnsiTerm()
17771+ os.environ['TERM'] = 'vt100'
17772+
17773diff --git a/buildtools/wafadmin/pproc.py b/buildtools/wafadmin/pproc.py
17774new file mode 100644
17775index 0000000..cb15178
17776--- /dev/null
17777+++ b/buildtools/wafadmin/pproc.py
17778@@ -0,0 +1,620 @@
17779+# borrowed from python 2.5.2c1
17780+# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
17781+# Licensed to PSF under a Contributor Agreement.
17782+
17783+import sys
17784+mswindows = (sys.platform == "win32")
17785+
17786+import os
17787+import types
17788+import traceback
17789+import gc
17790+
17791+class CalledProcessError(Exception):
17792+ def __init__(self, returncode, cmd):
17793+ self.returncode = returncode
17794+ self.cmd = cmd
17795+ def __str__(self):
17796+ return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
17797+
17798+if mswindows:
17799+ import threading
17800+ import msvcrt
17801+ if 0:
17802+ import pywintypes
17803+ from win32api import GetStdHandle, STD_INPUT_HANDLE, \
17804+ STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
17805+ from win32api import GetCurrentProcess, DuplicateHandle, \
17806+ GetModuleFileName, GetVersion
17807+ from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
17808+ from win32pipe import CreatePipe
17809+ from win32process import CreateProcess, STARTUPINFO, \
17810+ GetExitCodeProcess, STARTF_USESTDHANDLES, \
17811+ STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
17812+ from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
17813+ else:
17814+ from _subprocess import *
17815+ class STARTUPINFO:
17816+ dwFlags = 0
17817+ hStdInput = None
17818+ hStdOutput = None
17819+ hStdError = None
17820+ wShowWindow = 0
17821+ class pywintypes:
17822+ error = IOError
17823+else:
17824+ import select
17825+ import errno
17826+ import fcntl
17827+ import pickle
17828+
17829+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
17830+
17831+try:
17832+ MAXFD = os.sysconf("SC_OPEN_MAX")
17833+except:
17834+ MAXFD = 256
17835+
17836+try:
17837+ False
17838+except NameError:
17839+ False = 0
17840+ True = 1
17841+
17842+_active = []
17843+
17844+def _cleanup():
17845+ for inst in _active[:]:
17846+ if inst.poll(_deadstate=sys.maxint) >= 0:
17847+ try:
17848+ _active.remove(inst)
17849+ except ValueError:
17850+ pass
17851+
17852+PIPE = -1
17853+STDOUT = -2
17854+
17855+
17856+def call(*popenargs, **kwargs):
17857+ return Popen(*popenargs, **kwargs).wait()
17858+
17859+def check_call(*popenargs, **kwargs):
17860+ retcode = call(*popenargs, **kwargs)
17861+ cmd = kwargs.get("args")
17862+ if cmd is None:
17863+ cmd = popenargs[0]
17864+ if retcode:
17865+ raise CalledProcessError(retcode, cmd)
17866+ return retcode
17867+
17868+
17869+def list2cmdline(seq):
17870+ result = []
17871+ needquote = False
17872+ for arg in seq:
17873+ bs_buf = []
17874+
17875+ if result:
17876+ result.append(' ')
17877+
17878+ needquote = (" " in arg) or ("\t" in arg) or arg == ""
17879+ if needquote:
17880+ result.append('"')
17881+
17882+ for c in arg:
17883+ if c == '\\':
17884+ bs_buf.append(c)
17885+ elif c == '"':
17886+ result.append('\\' * len(bs_buf)*2)
17887+ bs_buf = []
17888+ result.append('\\"')
17889+ else:
17890+ if bs_buf:
17891+ result.extend(bs_buf)
17892+ bs_buf = []
17893+ result.append(c)
17894+
17895+ if bs_buf:
17896+ result.extend(bs_buf)
17897+
17898+ if needquote:
17899+ result.extend(bs_buf)
17900+ result.append('"')
17901+
17902+ return ''.join(result)
17903+
17904+class Popen(object):
17905+ def __init__(self, args, bufsize=0, executable=None,
17906+ stdin=None, stdout=None, stderr=None,
17907+ preexec_fn=None, close_fds=False, shell=False,
17908+ cwd=None, env=None, universal_newlines=False,
17909+ startupinfo=None, creationflags=0):
17910+ _cleanup()
17911+
17912+ self._child_created = False
17913+ if not isinstance(bufsize, (int, long)):
17914+ raise TypeError("bufsize must be an integer")
17915+
17916+ if mswindows:
17917+ if preexec_fn is not None:
17918+ raise ValueError("preexec_fn is not supported on Windows platforms")
17919+ if close_fds:
17920+ raise ValueError("close_fds is not supported on Windows platforms")
17921+ else:
17922+ if startupinfo is not None:
17923+ raise ValueError("startupinfo is only supported on Windows platforms")
17924+ if creationflags != 0:
17925+ raise ValueError("creationflags is only supported on Windows platforms")
17926+
17927+ self.stdin = None
17928+ self.stdout = None
17929+ self.stderr = None
17930+ self.pid = None
17931+ self.returncode = None
17932+ self.universal_newlines = universal_newlines
17933+
17934+ (p2cread, p2cwrite,
17935+ c2pread, c2pwrite,
17936+ errread, errwrite) = self._get_handles(stdin, stdout, stderr)
17937+
17938+ self._execute_child(args, executable, preexec_fn, close_fds,
17939+ cwd, env, universal_newlines,
17940+ startupinfo, creationflags, shell,
17941+ p2cread, p2cwrite,
17942+ c2pread, c2pwrite,
17943+ errread, errwrite)
17944+
17945+ if mswindows:
17946+ if stdin is None and p2cwrite is not None:
17947+ os.close(p2cwrite)
17948+ p2cwrite = None
17949+ if stdout is None and c2pread is not None:
17950+ os.close(c2pread)
17951+ c2pread = None
17952+ if stderr is None and errread is not None:
17953+ os.close(errread)
17954+ errread = None
17955+
17956+ if p2cwrite:
17957+ self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
17958+ if c2pread:
17959+ if universal_newlines:
17960+ self.stdout = os.fdopen(c2pread, 'rU', bufsize)
17961+ else:
17962+ self.stdout = os.fdopen(c2pread, 'rb', bufsize)
17963+ if errread:
17964+ if universal_newlines:
17965+ self.stderr = os.fdopen(errread, 'rU', bufsize)
17966+ else:
17967+ self.stderr = os.fdopen(errread, 'rb', bufsize)
17968+
17969+
17970+ def _translate_newlines(self, data):
17971+ data = data.replace("\r\n", "\n")
17972+ data = data.replace("\r", "\n")
17973+ return data
17974+
17975+
17976+ def __del__(self, sys=sys):
17977+ if not self._child_created:
17978+ return
17979+ self.poll(_deadstate=sys.maxint)
17980+ if self.returncode is None and _active is not None:
17981+ _active.append(self)
17982+
17983+
17984+ def communicate(self, input=None):
17985+ if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
17986+ stdout = None
17987+ stderr = None
17988+ if self.stdin:
17989+ if input:
17990+ self.stdin.write(input)
17991+ self.stdin.close()
17992+ elif self.stdout:
17993+ stdout = self.stdout.read()
17994+ elif self.stderr:
17995+ stderr = self.stderr.read()
17996+ self.wait()
17997+ return (stdout, stderr)
17998+
17999+ return self._communicate(input)
18000+
18001+
18002+ if mswindows:
18003+ def _get_handles(self, stdin, stdout, stderr):
18004+ if stdin is None and stdout is None and stderr is None:
18005+ return (None, None, None, None, None, None)
18006+
18007+ p2cread, p2cwrite = None, None
18008+ c2pread, c2pwrite = None, None
18009+ errread, errwrite = None, None
18010+
18011+ if stdin is None:
18012+ p2cread = GetStdHandle(STD_INPUT_HANDLE)
18013+ if p2cread is not None:
18014+ pass
18015+ elif stdin is None or stdin == PIPE:
18016+ p2cread, p2cwrite = CreatePipe(None, 0)
18017+ p2cwrite = p2cwrite.Detach()
18018+ p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
18019+ elif isinstance(stdin, int):
18020+ p2cread = msvcrt.get_osfhandle(stdin)
18021+ else:
18022+ p2cread = msvcrt.get_osfhandle(stdin.fileno())
18023+ p2cread = self._make_inheritable(p2cread)
18024+
18025+ if stdout is None:
18026+ c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
18027+ if c2pwrite is not None:
18028+ pass
18029+ elif stdout is None or stdout == PIPE:
18030+ c2pread, c2pwrite = CreatePipe(None, 0)
18031+ c2pread = c2pread.Detach()
18032+ c2pread = msvcrt.open_osfhandle(c2pread, 0)
18033+ elif isinstance(stdout, int):
18034+ c2pwrite = msvcrt.get_osfhandle(stdout)
18035+ else:
18036+ c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
18037+ c2pwrite = self._make_inheritable(c2pwrite)
18038+
18039+ if stderr is None:
18040+ errwrite = GetStdHandle(STD_ERROR_HANDLE)
18041+ if errwrite is not None:
18042+ pass
18043+ elif stderr is None or stderr == PIPE:
18044+ errread, errwrite = CreatePipe(None, 0)
18045+ errread = errread.Detach()
18046+ errread = msvcrt.open_osfhandle(errread, 0)
18047+ elif stderr == STDOUT:
18048+ errwrite = c2pwrite
18049+ elif isinstance(stderr, int):
18050+ errwrite = msvcrt.get_osfhandle(stderr)
18051+ else:
18052+ errwrite = msvcrt.get_osfhandle(stderr.fileno())
18053+ errwrite = self._make_inheritable(errwrite)
18054+
18055+ return (p2cread, p2cwrite,
18056+ c2pread, c2pwrite,
18057+ errread, errwrite)
18058+ def _make_inheritable(self, handle):
18059+ return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
18060+
18061+ def _find_w9xpopen(self):
18062+ w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
18063+ if not os.path.exists(w9xpopen):
18064+ w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
18065+ if not os.path.exists(w9xpopen):
18066+ raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
18067+ return w9xpopen
18068+
18069+ def _execute_child(self, args, executable, preexec_fn, close_fds,
18070+ cwd, env, universal_newlines,
18071+ startupinfo, creationflags, shell,
18072+ p2cread, p2cwrite,
18073+ c2pread, c2pwrite,
18074+ errread, errwrite):
18075+
18076+ if not isinstance(args, types.StringTypes):
18077+ args = list2cmdline(args)
18078+
18079+ if startupinfo is None:
18080+ startupinfo = STARTUPINFO()
18081+ if None not in (p2cread, c2pwrite, errwrite):
18082+ startupinfo.dwFlags |= STARTF_USESTDHANDLES
18083+ startupinfo.hStdInput = p2cread
18084+ startupinfo.hStdOutput = c2pwrite
18085+ startupinfo.hStdError = errwrite
18086+
18087+ if shell:
18088+ startupinfo.dwFlags |= STARTF_USESHOWWINDOW
18089+ startupinfo.wShowWindow = SW_HIDE
18090+ comspec = os.environ.get("COMSPEC", "cmd.exe")
18091+ args = comspec + " /c " + args
18092+ if (GetVersion() >= 0x80000000L or
18093+ os.path.basename(comspec).lower() == "command.com"):
18094+ w9xpopen = self._find_w9xpopen()
18095+ args = '"%s" %s' % (w9xpopen, args)
18096+ creationflags |= CREATE_NEW_CONSOLE
18097+
18098+ try:
18099+ hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
18100+ except pywintypes.error, e:
18101+ raise WindowsError(*e.args)
18102+
18103+ self._child_created = True
18104+ self._handle = hp
18105+ self.pid = pid
18106+ ht.Close()
18107+
18108+ if p2cread is not None:
18109+ p2cread.Close()
18110+ if c2pwrite is not None:
18111+ c2pwrite.Close()
18112+ if errwrite is not None:
18113+ errwrite.Close()
18114+
18115+
18116+ def poll(self, _deadstate=None):
18117+ if self.returncode is None:
18118+ if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
18119+ self.returncode = GetExitCodeProcess(self._handle)
18120+ return self.returncode
18121+
18122+
18123+ def wait(self):
18124+ if self.returncode is None:
18125+ obj = WaitForSingleObject(self._handle, INFINITE)
18126+ self.returncode = GetExitCodeProcess(self._handle)
18127+ return self.returncode
18128+
18129+ def _readerthread(self, fh, buffer):
18130+ buffer.append(fh.read())
18131+
18132+ def _communicate(self, input):
18133+ stdout = None
18134+ stderr = None
18135+
18136+ if self.stdout:
18137+ stdout = []
18138+ stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
18139+ stdout_thread.setDaemon(True)
18140+ stdout_thread.start()
18141+ if self.stderr:
18142+ stderr = []
18143+ stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
18144+ stderr_thread.setDaemon(True)
18145+ stderr_thread.start()
18146+
18147+ if self.stdin:
18148+ if input is not None:
18149+ self.stdin.write(input)
18150+ self.stdin.close()
18151+
18152+ if self.stdout:
18153+ stdout_thread.join()
18154+ if self.stderr:
18155+ stderr_thread.join()
18156+
18157+ if stdout is not None:
18158+ stdout = stdout[0]
18159+ if stderr is not None:
18160+ stderr = stderr[0]
18161+
18162+ if self.universal_newlines and hasattr(file, 'newlines'):
18163+ if stdout:
18164+ stdout = self._translate_newlines(stdout)
18165+ if stderr:
18166+ stderr = self._translate_newlines(stderr)
18167+
18168+ self.wait()
18169+ return (stdout, stderr)
18170+
18171+ else:
18172+ def _get_handles(self, stdin, stdout, stderr):
18173+ p2cread, p2cwrite = None, None
18174+ c2pread, c2pwrite = None, None
18175+ errread, errwrite = None, None
18176+
18177+ if stdin is None:
18178+ pass
18179+ elif stdin == PIPE:
18180+ p2cread, p2cwrite = os.pipe()
18181+ elif isinstance(stdin, int):
18182+ p2cread = stdin
18183+ else:
18184+ p2cread = stdin.fileno()
18185+
18186+ if stdout is None:
18187+ pass
18188+ elif stdout == PIPE:
18189+ c2pread, c2pwrite = os.pipe()
18190+ elif isinstance(stdout, int):
18191+ c2pwrite = stdout
18192+ else:
18193+ c2pwrite = stdout.fileno()
18194+
18195+ if stderr is None:
18196+ pass
18197+ elif stderr == PIPE:
18198+ errread, errwrite = os.pipe()
18199+ elif stderr == STDOUT:
18200+ errwrite = c2pwrite
18201+ elif isinstance(stderr, int):
18202+ errwrite = stderr
18203+ else:
18204+ errwrite = stderr.fileno()
18205+
18206+ return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
18207+
18208+ def _set_cloexec_flag(self, fd):
18209+ try:
18210+ cloexec_flag = fcntl.FD_CLOEXEC
18211+ except AttributeError:
18212+ cloexec_flag = 1
18213+
18214+ old = fcntl.fcntl(fd, fcntl.F_GETFD)
18215+ fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
18216+
18217+ def _close_fds(self, but):
18218+ for i in xrange(3, MAXFD):
18219+ if i == but:
18220+ continue
18221+ try:
18222+ os.close(i)
18223+ except:
18224+ pass
18225+
18226+ def _execute_child(self, args, executable, preexec_fn, close_fds,
18227+ cwd, env, universal_newlines, startupinfo, creationflags, shell,
18228+ p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
18229+
18230+ if isinstance(args, types.StringTypes):
18231+ args = [args]
18232+ else:
18233+ args = list(args)
18234+
18235+ if shell:
18236+ args = ["/bin/sh", "-c"] + args
18237+
18238+ if executable is None:
18239+ executable = args[0]
18240+
18241+ errpipe_read, errpipe_write = os.pipe()
18242+ self._set_cloexec_flag(errpipe_write)
18243+
18244+ gc_was_enabled = gc.isenabled()
18245+ gc.disable()
18246+ try:
18247+ self.pid = os.fork()
18248+ except:
18249+ if gc_was_enabled:
18250+ gc.enable()
18251+ raise
18252+ self._child_created = True
18253+ if self.pid == 0:
18254+ try:
18255+ if p2cwrite:
18256+ os.close(p2cwrite)
18257+ if c2pread:
18258+ os.close(c2pread)
18259+ if errread:
18260+ os.close(errread)
18261+ os.close(errpipe_read)
18262+
18263+ if p2cread:
18264+ os.dup2(p2cread, 0)
18265+ if c2pwrite:
18266+ os.dup2(c2pwrite, 1)
18267+ if errwrite:
18268+ os.dup2(errwrite, 2)
18269+
18270+ if p2cread and p2cread not in (0,):
18271+ os.close(p2cread)
18272+ if c2pwrite and c2pwrite not in (p2cread, 1):
18273+ os.close(c2pwrite)
18274+ if errwrite and errwrite not in (p2cread, c2pwrite, 2):
18275+ os.close(errwrite)
18276+
18277+ if close_fds:
18278+ self._close_fds(but=errpipe_write)
18279+
18280+ if cwd is not None:
18281+ os.chdir(cwd)
18282+
18283+ if preexec_fn:
18284+ apply(preexec_fn)
18285+
18286+ if env is None:
18287+ os.execvp(executable, args)
18288+ else:
18289+ os.execvpe(executable, args, env)
18290+
18291+ except:
18292+ exc_type, exc_value, tb = sys.exc_info()
18293+ exc_lines = traceback.format_exception(exc_type, exc_value, tb)
18294+ exc_value.child_traceback = ''.join(exc_lines)
18295+ os.write(errpipe_write, pickle.dumps(exc_value))
18296+
18297+ os._exit(255)
18298+
18299+ if gc_was_enabled:
18300+ gc.enable()
18301+ os.close(errpipe_write)
18302+ if p2cread and p2cwrite:
18303+ os.close(p2cread)
18304+ if c2pwrite and c2pread:
18305+ os.close(c2pwrite)
18306+ if errwrite and errread:
18307+ os.close(errwrite)
18308+
18309+ data = os.read(errpipe_read, 1048576)
18310+ os.close(errpipe_read)
18311+ if data != "":
18312+ os.waitpid(self.pid, 0)
18313+ child_exception = pickle.loads(data)
18314+ raise child_exception
18315+
18316+ def _handle_exitstatus(self, sts):
18317+ if os.WIFSIGNALED(sts):
18318+ self.returncode = -os.WTERMSIG(sts)
18319+ elif os.WIFEXITED(sts):
18320+ self.returncode = os.WEXITSTATUS(sts)
18321+ else:
18322+ raise RuntimeError("Unknown child exit status!")
18323+
18324+ def poll(self, _deadstate=None):
18325+ if self.returncode is None:
18326+ try:
18327+ pid, sts = os.waitpid(self.pid, os.WNOHANG)
18328+ if pid == self.pid:
18329+ self._handle_exitstatus(sts)
18330+ except os.error:
18331+ if _deadstate is not None:
18332+ self.returncode = _deadstate
18333+ return self.returncode
18334+
18335+ def wait(self):
18336+ if self.returncode is None:
18337+ pid, sts = os.waitpid(self.pid, 0)
18338+ self._handle_exitstatus(sts)
18339+ return self.returncode
18340+
18341+ def _communicate(self, input):
18342+ read_set = []
18343+ write_set = []
18344+ stdout = None
18345+ stderr = None
18346+
18347+ if self.stdin:
18348+ self.stdin.flush()
18349+ if input:
18350+ write_set.append(self.stdin)
18351+ else:
18352+ self.stdin.close()
18353+ if self.stdout:
18354+ read_set.append(self.stdout)
18355+ stdout = []
18356+ if self.stderr:
18357+ read_set.append(self.stderr)
18358+ stderr = []
18359+
18360+ input_offset = 0
18361+ while read_set or write_set:
18362+ rlist, wlist, xlist = select.select(read_set, write_set, [])
18363+
18364+ if self.stdin in wlist:
18365+ bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
18366+ input_offset += bytes_written
18367+ if input_offset >= len(input):
18368+ self.stdin.close()
18369+ write_set.remove(self.stdin)
18370+
18371+ if self.stdout in rlist:
18372+ data = os.read(self.stdout.fileno(), 1024)
18373+ if data == "":
18374+ self.stdout.close()
18375+ read_set.remove(self.stdout)
18376+ stdout.append(data)
18377+
18378+ if self.stderr in rlist:
18379+ data = os.read(self.stderr.fileno(), 1024)
18380+ if data == "":
18381+ self.stderr.close()
18382+ read_set.remove(self.stderr)
18383+ stderr.append(data)
18384+
18385+ if stdout is not None:
18386+ stdout = ''.join(stdout)
18387+ if stderr is not None:
18388+ stderr = ''.join(stderr)
18389+
18390+ if self.universal_newlines and hasattr(file, 'newlines'):
18391+ if stdout:
18392+ stdout = self._translate_newlines(stdout)
18393+ if stderr:
18394+ stderr = self._translate_newlines(stderr)
18395+
18396+ self.wait()
18397+ return (stdout, stderr)
18398+
18399diff --git a/buildtools/wafadmin/py3kfixes.py b/buildtools/wafadmin/py3kfixes.py
18400new file mode 100644
18401index 0000000..2f3c9c2
18402--- /dev/null
18403+++ b/buildtools/wafadmin/py3kfixes.py
18404@@ -0,0 +1,130 @@
18405+#!/usr/bin/env python
18406+# encoding: utf-8
18407+# Thomas Nagy, 2009 (ita)
18408+
18409+"""
18410+Fixes for py3k go here
18411+"""
18412+
18413+import os
18414+
18415+all_modifs = {}
18416+
18417+def modif(dir, name, fun):
18418+ if name == '*':
18419+ lst = []
18420+ for y in '. Tools 3rdparty'.split():
18421+ for x in os.listdir(os.path.join(dir, y)):
18422+ if x.endswith('.py'):
18423+ lst.append(y + os.sep + x)
18424+ #lst = [y + os.sep + x for x in os.listdir(os.path.join(dir, y)) for y in '. Tools 3rdparty'.split() if x.endswith('.py')]
18425+ for x in lst:
18426+ modif(dir, x, fun)
18427+ return
18428+
18429+ filename = os.path.join(dir, name)
18430+ f = open(filename, 'r')
18431+ txt = f.read()
18432+ f.close()
18433+
18434+ txt = fun(txt)
18435+
18436+ f = open(filename, 'w')
18437+ f.write(txt)
18438+ f.close()
18439+
18440+def subst(filename):
18441+ def do_subst(fun):
18442+ global all_modifs
18443+ try:
18444+ all_modifs[filename] += fun
18445+ except KeyError:
18446+ all_modifs[filename] = [fun]
18447+ return fun
18448+ return do_subst
18449+
18450+@subst('Constants.py')
18451+def r1(code):
18452+ code = code.replace("'iluvcuteoverload'", "b'iluvcuteoverload'")
18453+ code = code.replace("ABI=7", "ABI=37")
18454+ return code
18455+
18456+@subst('Tools/ccroot.py')
18457+def r2(code):
18458+ code = code.replace("p.stdin.write('\\n')", "p.stdin.write(b'\\n')")
18459+ code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
18460+ return code
18461+
18462+@subst('Utils.py')
18463+def r3(code):
18464+ code = code.replace("m.update(str(lst))", "m.update(str(lst).encode())")
18465+ code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
18466+ return code
18467+
18468+@subst('ansiterm.py')
18469+def r33(code):
18470+ code = code.replace('unicode', 'str')
18471+ return code
18472+
18473+@subst('Task.py')
18474+def r4(code):
18475+ code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
18476+ code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
18477+ code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
18478+ code = code.replace("up(x.name)", "up(x.name.encode())")
18479+ code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
18480+ code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
18481+ code = code.replace("sig.encode('hex')", 'binascii.hexlify(sig)')
18482+ code = code.replace("os.path.join(Options.cache_global,ssig)", "os.path.join(Options.cache_global,ssig.decode())")
18483+ return code
18484+
18485+@subst('Build.py')
18486+def r5(code):
18487+ code = code.replace("cPickle.dump(data,file,-1)", "cPickle.dump(data,file)")
18488+ code = code.replace('for node in src_dir_node.childs.values():', 'for node in list(src_dir_node.childs.values()):')
18489+ return code
18490+
18491+@subst('*')
18492+def r6(code):
18493+ code = code.replace('xrange', 'range')
18494+ code = code.replace('iteritems', 'items')
18495+ code = code.replace('maxint', 'maxsize')
18496+ code = code.replace('iterkeys', 'keys')
18497+ code = code.replace('Error,e:', 'Error as e:')
18498+ code = code.replace('Exception,e:', 'Exception as e:')
18499+ return code
18500+
18501+@subst('TaskGen.py')
18502+def r7(code):
18503+ code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
18504+ return code
18505+
18506+@subst('Tools/python.py')
18507+def r8(code):
18508+ code = code.replace('proc.communicate()[0]', 'proc.communicate()[0].decode("utf-8")')
18509+ return code
18510+
18511+@subst('Tools/glib2.py')
18512+def r9(code):
18513+ code = code.replace('f.write(c)', 'f.write(c.encode("utf-8"))')
18514+ return code
18515+
18516+@subst('Tools/config_c.py')
18517+def r10(code):
18518+ code = code.replace("key=kw['success']", "key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
18519+ code = code.replace('out=str(out)','out=out.decode("utf-8")')
18520+ code = code.replace('err=str(err)','err=err.decode("utf-8")')
18521+ return code
18522+
18523+@subst('Tools/d.py')
18524+def r11(code):
18525+ code = code.replace('ret.strip()', 'ret.strip().decode("utf-8")')
18526+ return code
18527+
18528+def fixdir(dir):
18529+ global all_modifs
18530+ for k in all_modifs:
18531+ for v in all_modifs[k]:
18532+ modif(os.path.join(dir, 'wafadmin'), k, v)
18533+ #print('substitutions finished')
18534+
diff --git a/meta-oe/recipes-connectivity/samba/samba-basic.inc b/meta-oe/recipes-connectivity/samba/samba-basic.inc
new file mode 100644
index 000000000..e9f891f8c
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-basic.inc
@@ -0,0 +1,55 @@
1SRC_URI += "file://config-lfs.patch \
2 file://quota.patch;striplevel=0 \
3"
4
5EXTRA_OECONF += "\
6 --enable-nss-wrapper \
7 --without-ads \
8 --with-winbind \
9 --without-ldap \
10 --without-krb5"
11
12PACKAGES =+ "libwbclient libwinbind libwinbind-dbg libnss-winbind winbind winbind-dbg libnetapi libtdb libsmbsharemodes libsmbclient libsmbclient-dev cifs cifs-doc swat"
13
14FILES_winbind-dbg = "${libdir}/idmap/.debug/*.so \
15 ${libdir}/security/.debug/pam_winbind.so \
16"
17
18FILES_${PN} += "${libdir}/vfs/*.so \
19 ${libdir}/charset/*.so \
20 ${libdir}/*.dat \
21 ${libdir}/auth/*.so \
22 ${libdir}/security/pam_smbpass.so \
23"
24
25FILES_${PN}-dbg += "${libdir}/vfs/.debug/*.so \
26 ${libdir}/charset/.debug/*.so \
27 ${libdir}/auth/.debug/*.so \
28 ${libdir}/security/.debug/pam_smbpass.so \
29"
30
31FILES_libwbclient = "${libdir}/libwbclient.so.*"
32FILES_libnetapi = "${libdir}/libnetapi.so.*"
33FILES_libsmbsharemodes = "${libdir}/libsmbsharemodes.so.*"
34FILES_libtdb = "${libdir}/libtdb.so.*"
35FILES_cifs = "${base_sbindir}/mount.cifs ${base_sbindir}/umount.cifs"
36FILES_cifs-doc = "${mandir}/man8/mount.cifs.8 ${mandir}/man8/umount.cifs.8"
37FILES_libsmbclient = "${libdir}/libsmbclient.so.*"
38FILES_libsmbclient-dev = "${libdir}/libsmbclient.so ${includedir}"
39FILES_winbind = "${sbindir}/winbindd \
40 ${bindir}/wbinfo \
41 ${bindir}/ntlm_auth \
42 ${sysconfdir}/init.d/winbind \
43 ${systemd_unitdir}/system/winbind.service \
44"
45FILES_libwinbind = "${libdir}/idmap/*.so \
46 ${libdir}/pdb \
47 ${libdir}/gpext \
48 ${libdir}/perfcount \
49 ${libdir}/security/pam_winbind.so \
50"
51
52FILES_libnss-winbind = "${libdir}/libnss_*${SOLIBS} \
53 ${libdir}/nss_info"
54
55FILES_swat = "${sbindir}/swat ${datadir}/swat ${libdir}/*.msg"
diff --git a/meta-oe/recipes-connectivity/samba/samba.inc b/meta-oe/recipes-connectivity/samba/samba.inc
new file mode 100644
index 000000000..aef8aaca1
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba.inc
@@ -0,0 +1,159 @@
1SECTION = "console/network"
2LICENSE = "GPL-3.0"
3DEPENDS = "readline virtual/libiconv zlib popt"
4
5SAMBA_MIRROR = "http://samba.org/samba/ftp"
6
7MIRRORS += "\
8${SAMBA_MIRROR} http://mirror.internode.on.net/pub/samba \n \
9${SAMBA_MIRROR} http://www.mirrorservice.org/sites/ftp.samba.org \n \
10"
11
12SRC_URI = "${SAMBA_MIRROR}/stable/samba-${PV}.tar.gz \
13 file://volatiles.03_samba \
14 file://smb.conf \
15 file://init.samba \
16 file://init.winbind \
17 file://tdb.pc \
18 file://nmb.service \
19 file://smb.service \
20 file://winbind.service \
21"
22
23S = "${WORKDIR}/samba-${PV}/source"
24
25inherit autotools-brokensep update-rc.d systemd
26
27SYSTEMD_PACKAGES = "${PN} winbind"
28SYSTEMD_SERVICE_${PN} = "nmb.service smb.service"
29SYSTEMD_SERVICE_winbind = "winbind.service"
30
31SAMBAMMAP = "no"
32SAMBAMMAP_libc-glibc = "yes"
33
34# The file system settings --foodir=dirfoo and overridden unconditionally
35# in the samba config by --with-foodir=dirfoo - even if the --with is not
36# specified! Fix that here. Set the privatedir to /etc/samba/private.
37EXTRA_OECONF='--disable-cups \
38 --with-readline=${STAGING_LIBDIR}/.. \
39 --with-libiconv=${STAGING_LIBDIR}/.. \
40 --without-automount \
41 --with-configdir=${sysconfdir}/samba \
42 --with-privatedir=${sysconfdir}/samba/private \
43 --with-lockdir=${localstatedir}/lock \
44 --with-piddir=${localstatedir}/run \
45 --with-logfilebase=${localstatedir}/log/samba \
46 --libdir=${libdir} \
47 --with-mandir=${mandir} \
48 --with-swatdir=${datadir}/swat \
49 --with-aio-support \
50 --with-winbind \
51 --with-wbclient \
52 --without-acl-support \
53 --disable-avahi \
54 samba_cv_struct_timespec=yes \
55 libreplace_cv_HAVE_MMAP=${SAMBAMMAP}'
56
57PACKAGECONFIG = "${@base_contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}"
58PACKAGECONFIG[pam] = "--with-pam,--without-pam,libpam"
59PACKAGECONFIG[fam] = "--enable-fam,--disable-fam,gamin"
60PACKAGECONFIG[talloc] = "--enable-external-libtalloc --with-libtalloc, --disable-external-libtalloc --without-libtalloc, talloc"
61
62INITSCRIPT_PACKAGES = "samba winbind"
63INITSCRIPT_NAME_samba = "samba"
64INITSCRIPT_NAME_winbind = "winbind"
65# No dependencies, goes in at level 20 (NOTE: take care with the
66# level, later levels put the shutdown later too - see the links
67# in rc6.d, the shutdown must precede network shutdown).
68INITSCRIPT_PARAMS = "defaults"
69CONFFILES_${PN} = "${sysconfdir}/samba/smb.conf"
70
71do_configure_prepend () {
72 ./script/mkversion.sh
73 if [ ! -e acinclude.m4 ]; then
74 touch aclocal.m4
75 cat aclocal.m4 > acinclude.m4
76 fi
77}
78
79do_compile () {
80 oe_runmake proto_exists
81 base_do_compile
82}
83
84do_install_append() {
85 install -d ${D}${libdir}/pkgconfig/
86 cp ${WORKDIR}/tdb.pc ${D}${libdir}/pkgconfig/
87 mv ${D}${libdir}/libsmbclient.so ${D}${libdir}/libsmbclient.so.0 || true
88 ln -sf libsmbclient.so.0 ${D}${libdir}/libsmbclient.so
89 mkdir -p ${D}${base_sbindir}
90 rm -f ${D}${bindir}/*.old
91 rm -f ${D}${sbindir}/*.old
92 [ -f ${D}${sbindir}/mount.cifs ] && mv ${D}${sbindir}/mount.cifs ${D}${base_sbindir}/
93 [ -f ${D}${sbindir}/umount.cifs ] && mv ${D}${sbindir}/umount.cifs ${D}${base_sbindir}/
94
95 # This is needed for < 3.2.4
96 rm -f ${D}${sbindir}/mount.smbfs ${D}${base_sbindir}/mount.smbfs
97 if [ -f ${D}${bindir}/smbmount ]; then
98 ln -sf ${bindir}/smbmount ${D}${base_sbindir}/mount.smb
99 ln -sf ${bindir}/smbmount ${D}${base_sbindir}/mount.smbfs
100 fi
101
102 install -D -m 755 ${WORKDIR}/init.samba ${D}${sysconfdir}/init.d/samba
103 install -D -m 755 ${WORKDIR}/init.winbind ${D}${sysconfdir}/init.d/winbind
104 install -D -m 644 ${WORKDIR}/smb.conf ${D}${sysconfdir}/samba/smb.conf
105 install -D -m 644 ${WORKDIR}/volatiles.03_samba ${D}${sysconfdir}/default/volatiles/volatiles.03_samba
106 install -d ${D}/var/log/samba
107 install -d ${D}/var/spool/samba
108
109 # Install other stuff not installed by "make install"
110 if [ -d ${WORKDIR}/${PN}-${PV}/nsswitch ]; then
111 install -m 0644 ${WORKDIR}/${PN}-${PV}/nsswitch/libnss_winbind.so ${D}${libdir}/libnss_winbind.so.2
112 install -m 0644 ${WORKDIR}/${PN}-${PV}/nsswitch/libnss_wins.so ${D}${libdir}/libnss_wins.so.2
113 fi
114
115 rmdir --ignore-fail-on-non-empty ${D}${base_sbindir}
116 sed -i -e '1s,#!.*perl,#!${USRBINPATH}/env perl,' ${D}${bindir}/findsmb
117
118 # usershare mount place
119 mkdir -p ${D}${localstatedir}/lib/samba/usershares
120
121 # Remove sysinit script if sysvinit is not in DISTRO_FEATURES
122 if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'false', 'true', d)}; then
123 rm -rf ${D}${sysconfdir}/init.d/
124 fi
125
126 install -d ${D}${systemd_unitdir}/system
127 for i in nmb smb winbind; do
128 install -m 0644 ${WORKDIR}/$i.service ${D}${systemd_unitdir}/system
129 done
130 sed -e 's,@BASE_BINDIR@,${base_bindir},g' \
131 -e 's,@SBINDIR@,${sbindir},g' \
132 -i ${D}${systemd_unitdir}/system/*.service
133
134 if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
135 install -d ${D}${sysconfdir}/tmpfiles.d
136 echo "d ${localstatedir}/log/samba 0755 root root -" \
137 > ${D}${sysconfdir}/tmpfiles.d/99-${BPN}.conf
138 fi
139}
140
141DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd-systemctl-native', '', d)}"
142pkg_postinst_${PN} () {
143 if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd sysvinit', 'true', 'false', d)}; then
144 if [ -n "$D" ]; then
145 OPTS="--root=$D"
146 fi
147 systemctl $OPTS mask samba.service
148 fi
149}
150
151pkg_postinst_libnss-winbind () {
152 # add wins to the list of resolvers
153 ns=$D${sysconfdir}/nsswitch.conf
154 if ! grep "hosts:.*wins" $ns > /dev/null; then
155 hosts="`grep '^hosts:' $ns`"
156 hosts=`echo "$hosts" | sed 's/\[/\\\\[/g; s/\]/\\\\]/g'`
157 sed -i "s/$hosts/$hosts wins/" "$ns"
158 fi
159}
diff --git a/meta-oe/recipes-connectivity/samba/samba/0001-PIDL-fix-parsing-linemarkers-in-preprocessor-output.patch b/meta-oe/recipes-connectivity/samba/samba/0001-PIDL-fix-parsing-linemarkers-in-preprocessor-output.patch
new file mode 100644
index 000000000..586867d8f
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/0001-PIDL-fix-parsing-linemarkers-in-preprocessor-output.patch
@@ -0,0 +1,68 @@
1Upstream-Status: Backport
2
3From b07ade6e6fcc8b844cf1fb8b6038617281c4c2d0 Mon Sep 17 00:00:00 2001
4From: Alexander Bokovoy <ab@samba.org>
5Date: Wed, 6 Feb 2013 10:17:57 +0200
6Subject: [PATCH] PIDL: fix parsing linemarkers in preprocessor output
7
8Commit b07ade6e6fcc8b844cf1fb8b6038617281c4c2d0 v3-6-stable
9
10When PIDL calls out to C preprocessor to expand IDL files
11and parse the output, it filters out linemarkers and line control
12information as described in http://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html
13and http://gcc.gnu.org/onlinedocs/cpp/Line-Control.html#Line-Control
14
15With gcc 4.8 stdc-predef.h is included automatically and linemarker for the
16file has extended flags that PIDL couldn't parse ('system header that needs to
17be extern "C" protected for C++')
18
19Thanks to Jakub Jelinek <jakub@redhat.com> for explanation of the linemarker format.
20
21Fixes https://bugzilla.redhat.com/show_bug.cgi?id=906517
22
23Reviewed-by: Andreas Schneider <asn@samba.org>
24(cherry picked from commit 6ba7ab5c14801aecae96373d5a9db7ab82957526)
25
26Signed-off-by: Andreas Schneider <asn@samba.org>
27
28Fix bug #9636 - pidl can't parse new linemarkers in preprocessor output.
29(cherry picked from commit 643571470f2e4cd2f58bd60ac7189abb826d33cc)
30(cherry picked from commit b5a8afd6550e9091d169d3010751913bb483fc4b)
31---
32 pidl/idl.yp | 4 +++-
33 pidl/lib/Parse/Pidl/IDL.pm | 4 +++-
34 2 files changed, 6 insertions(+), 2 deletions(-)
35
36diff --git a/pidl/idl.yp b/pidl/idl.yp
37index b5c5185..c8a65f6 100644
38--- a/pidl/idl.yp
39+++ b/pidl/idl.yp
40@@ -610,7 +610,9 @@ again:
41
42 for ($parser->YYData->{INPUT}) {
43 if (/^\#/) {
44- if (s/^\# (\d+) \"(.*?)\"( \d+|)//) {
45+ # Linemarker format is described at
46+ # http://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html
47+ if (s/^\# (\d+) \"(.*?)\"(( \d+){1,4}|)//) {
48 $parser->YYData->{LINE} = $1-1;
49 $parser->YYData->{FILE} = $2;
50 goto again;
51diff --git a/pidl/lib/Parse/Pidl/IDL.pm b/pidl/lib/Parse/Pidl/IDL.pm
52index d4820ff..6927c89 100644
53--- a/pidl/lib/Parse/Pidl/IDL.pm
54+++ b/pidl/lib/Parse/Pidl/IDL.pm
55@@ -2576,7 +2576,9 @@ again:
56
57 for ($parser->YYData->{INPUT}) {
58 if (/^\#/) {
59- if (s/^\# (\d+) \"(.*?)\"( \d+|)//) {
60+ # Linemarker format is described at
61+ # http://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html
62+ if (s/^\# (\d+) \"(.*?)\"(( \d+){1,4}|)//) {
63 $parser->YYData->{LINE} = $1-1;
64 $parser->YYData->{FILE} = $2;
65 goto again;
66--
671.7.5.4
68
diff --git a/meta-oe/recipes-connectivity/samba/samba/Managing-Samba.txt b/meta-oe/recipes-connectivity/samba/samba/Managing-Samba.txt
new file mode 100644
index 000000000..01f759282
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/Managing-Samba.txt
@@ -0,0 +1,40 @@
1This device is running a bare-bone Samba server which allows easy
2transfer of files and directories between any networked desktop PC and
3your networked PDA.
4
5Since it is generally a bad idea to allow everyone read and write access
6to your PDA, you will have to configure at least one user to get access to
7any shared folder.
8
9How to create a Samba user with password:
10
11- If you haven't already created a non-root user, do so now:
12 root@poodle:/usr/bin# adduser testuser
13 Changing password for testuser
14 Enter the new password (minimum of 5, maximum of 8 characters)
15 Please use a combination of upper and lower case letters and numbers.
16 Enter new password:
17 Bad password: too short.
18
19 Warning: weak password (continuing).
20 Re-enter new password:
21 Password changed.
22 root@poodle:/usr/bin#
23
24- Note that the password you entered will _not_ be your samba password.
25 Samba uses its own password database.
26
27- Add a Samba password for your user:
28 root@poodle:/usr/bin# smbpasswd -a testuser
29 New SMB password:
30 Retype new SMB password:
31 Added user testuser.
32 root@poodle:/usr/bin#
33
34- After you have added your new samba user, you'll have to restart the samba
35 server by running "/etc/init.d/samba restart" or by rebooting the device
36
37- Use the newly created username / password combination to access your network
38 shares. Please note the the Samba username must also exist as a unix username!
39
40
diff --git a/meta-oe/recipes-connectivity/samba/samba/cifs.patch b/meta-oe/recipes-connectivity/samba/samba/cifs.patch
new file mode 100644
index 000000000..ee6dab567
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/cifs.patch
@@ -0,0 +1,10 @@
1--- source/client/mount.cifs.c.old 2004-11-28 02:33:52.000000000 +1030
2+++ source/client/mount.cifs.c 2004-11-28 02:33:59.000000000 +1030
3@@ -36,6 +36,7 @@
4 #include <string.h>
5 #include <mntent.h>
6 #include <fcntl.h>
7+#include <linux/limits.h>
8
9 #define MOUNT_CIFS_VERSION_MAJOR "1"
10 #define MOUNT_CIFS_VERSION_MINOR "2"
diff --git a/meta-oe/recipes-connectivity/samba/samba/config-h.patch b/meta-oe/recipes-connectivity/samba/samba/config-h.patch
new file mode 100644
index 000000000..eeb22684e
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/config-h.patch
@@ -0,0 +1,12 @@
1diff -urN source.old//include/config.h.in source//include/config.h.in
2--- source.old//include/config.h.in 2008-11-20 14:45:04.000000000 +0000
3+++ source//include/config.h.in 2008-11-30 21:04:17.990008933 +0000
4@@ -2672,7 +2672,7 @@
5 #undef USE_SETEUID
6
7 /* Whether setresuid() is available */
8-#undef USE_SETRESUID
9+#define USE_SETRESUID 1
10
11 /* Whether setreuid() is available */
12 #undef USE_SETREUID
diff --git a/meta-oe/recipes-connectivity/samba/samba/config-lfs.patch b/meta-oe/recipes-connectivity/samba/samba/config-lfs.patch
new file mode 100644
index 000000000..b37ed690c
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/config-lfs.patch
@@ -0,0 +1,47 @@
1Cache the check for Linux LFS support, so it can be prepopulated from
2the site cache for configure variables for cross-compiling. Without this,
3samba gets the idea that it can use dirent64 and friends without defining
4the flags it needs to get it, such as _GNU_SOURCE and _LARGEFILE64_SOURCE.
5
6Symptoms of getting the configuration wrong on cross-compile inculde
7warnings such as
8
9 smbd/trans2.c: In function `get_lanman2_dir_entry':
10 smbd/trans2.c:1065: warning: right shift count >= width of type
11
12and errors like
13
14 smbd/vfs.c:630: error: dereferencing pointer to incomplete type
15
16(when trying to dereference dirent64.)
17
18--- source/configure.in.orig 2005-05-29 14:46:18.000000000 -0700
19+++ source/configure.in 2005-05-29 14:51:57.000000000 -0700
20@@ -588,7 +588,7 @@
21 # Tests for linux LFS support. Need kernel 2.4 and glibc2.2 or greater support.
22 #
23 *linux*)
24- AC_MSG_CHECKING([for LFS support])
25+ AC_CACHE_CHECK([for LFS support], samba_cv_LINUX_LFS_SUPPORT,[
26 old_CPPFLAGS="$CPPFLAGS"
27 CPPFLAGS="-D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -D_GNU_SOURCE $CPPFLAGS"
28 AC_TRY_RUN([
29@@ -627,15 +627,14 @@
30 exit(1);
31 #endif
32 }
33-], [LINUX_LFS_SUPPORT=yes], [LINUX_LFS_SUPPORT=no], [LINUX_LFS_SUPPORT=cross])
34- CPPFLAGS="$old_CPPFLAGS"
35- if test x$LINUX_LFS_SUPPORT = xyes ; then
36+], [samba_cv_LINUX_LFS_SUPPORT=yes], [samba_cv_LINUX_LFS_SUPPORT=no], [samba_cv_LINUX_LFS_SUPPORT=cross])
37+ CPPFLAGS="$old_CPPFLAGS"])
38+ if test x"$samba_cv_LINUX_LFS_SUPPORT" = x"yes" ; then
39 CPPFLAGS="-D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -D_GNU_SOURCE $CPPFLAGS"
40 AC_DEFINE(_LARGEFILE64_SOURCE, 1, [Whether to enable large file support])
41 AC_DEFINE(_FILE_OFFSET_BITS, 64, [File offset bits])
42 AC_DEFINE(_GNU_SOURCE, 1, [Whether to use GNU libc extensions])
43 fi
44- AC_MSG_RESULT([$LINUX_LFS_SUPPORT])
45 ;;
46
47 #
diff --git a/meta-oe/recipes-connectivity/samba/samba/configure-3.3.0.patch b/meta-oe/recipes-connectivity/samba/samba/configure-3.3.0.patch
new file mode 100644
index 000000000..19fb9864d
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/configure-3.3.0.patch
@@ -0,0 +1,85 @@
1diff -urN source.old//configure source//configure
2--- source.old//configure 2009-01-27 07:53:11.000000000 +0000
3+++ source//configure 2009-01-31 21:07:25.811887936 +0000
4@@ -43860,13 +43860,7 @@
5 *linux*)
6 # glibc <= 2.3.2 has a broken getgrouplist
7 if test "$cross_compiling" = yes; then
8- { { $as_echo "$as_me:$LINENO: error: in \`$ac_pwd':" >&5
9-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
10-{ { $as_echo "$as_me:$LINENO: error: cannot run test program while cross compiling
11-See \`config.log' for more details." >&5
12-$as_echo "$as_me: error: cannot run test program while cross compiling
13-See \`config.log' for more details." >&2;}
14- { (exit 1); exit 1; }; }; }
15+linux_getgrouplist_ok=no
16 else
17 cat >conftest.$ac_ext <<_ACEOF
18 /* confdefs.h. */
19diff -urN source.old//configure.in source//configure.in
20--- source.old//configure.in 2009-01-26 13:56:34.000000000 +0000
21+++ source//configure.in 2009-01-31 21:04:39.051889949 +0000
22@@ -280,6 +280,8 @@
23 fi
24 AC_SUBST(BROKEN_CC)
25
26+AC_TRY_COMPILE([],[(void)sizeof(char[-1])],AC_MSG_ERROR([configure's compilation assert doesn't work with $CC]))
27+
28 dnl Check if the C compiler understands -Werror
29 AC_CACHE_CHECK([that the C compiler understands -Werror],samba_cv_HAVE_Werror, [
30 AC_TRY_RUN_STRICT([
31@@ -330,25 +332,11 @@
32 # a runtime test is needed here
33 AC_SUBST(PIDL_ARGS)
34 AC_CACHE_CHECK([that the C compiler understands negative enum values],samba_cv_CC_NEGATIVE_ENUM_VALUES, [
35- AC_TRY_RUN(
36+ AC_TRY_COMPILE([],
37 [
38- #include <stdio.h>
39 enum negative_values { NEGATIVE_VALUE = 0xFFFFFFFF };
40- int main(void) {
41- enum negative_values v1 = NEGATIVE_VALUE;
42- unsigned v2 = NEGATIVE_VALUE;
43-
44- if (v1 != 0xFFFFFFFF) {
45- printf("%u != 0xFFFFFFFF\n", v1);
46- return 1;
47- }
48- if (v2 != 0xFFFFFFFF) {
49- printf("%u != 0xFFFFFFFF\n", v2);
50- return 1;
51- }
52-
53- return 0;
54- }
55+ (void)sizeof(char[1-2*( (unsigned)NEGATIVE_VALUE != 0xFFFFFFFF)]);
56+ (void)sizeof(char[1-2*((enum negative_values)NEGATIVE_VALUE != 0xFFFFFFFF)]);
57 ],
58 samba_cv_CC_NEGATIVE_ENUM_VALUES=yes,samba_cv__CC_NEGATIVE_ENUM_VALUES=no)])
59 if test x"$samba_cv_CC_NEGATIVE_ENUM_VALUES" != x"yes"; then
60@@ -1224,22 +1212,12 @@
61 case "$host_os" in
62 *linux*)
63 # glibc <= 2.3.2 has a broken getgrouplist
64- AC_TRY_RUN([
65-#include <unistd.h>
66+ AC_TRY_COMPILE([
67 #include <sys/utsname.h>
68-main() {
69- /* glibc up to 2.3 has a broken getgrouplist */
70+],[
71 #if defined(__GLIBC__) && defined(__GLIBC_MINOR__)
72- int libc_major = __GLIBC__;
73- int libc_minor = __GLIBC_MINOR__;
74-
75- if (libc_major < 2)
76- exit(1);
77- if ((libc_major == 2) && (libc_minor <= 3))
78- exit(1);
79+ (void)sizeof(char[1-2*(__GLIBC__ < 2 || __GLIBC__ == 2 && __GLIBC_MINOR__ <= 3)]);
80 #endif
81- exit(0);
82-}
83 ], [linux_getgrouplist_ok=yes], [linux_getgrouplist_ok=no])
84 if test x"$linux_getgrouplist_ok" = x"yes"; then
85 AC_DEFINE(HAVE_GETGROUPLIST, 1, [Have good getgrouplist])
diff --git a/meta-oe/recipes-connectivity/samba/samba/init.samba b/meta-oe/recipes-connectivity/samba/samba/init.samba
new file mode 100644
index 000000000..6a44ac468
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/init.samba
@@ -0,0 +1,58 @@
1#! /bin/sh
2#
3# This is an init script for openembedded
4# Copy it to /etc/init.d/samba and type
5# > update-rc.d samba defaults 60
6#
7
8
9smbd=/usr/sbin/smbd
10test -x "$smbd" || exit 0
11nmbd=/usr/sbin/nmbd
12test -x "$nmbd" || exit 0
13
14
15case "$1" in
16 start)
17 echo -n "Starting Samba: smbd"
18 start-stop-daemon --start --quiet --exec $smbd
19 echo -n " nmbd"
20 start-stop-daemon --start --quiet --exec $nmbd
21 echo "."
22 ;;
23 stop)
24 echo -n "Stopping Samba: smbd"
25 start-stop-daemon --stop --quiet --pidfile /var/run/smbd.pid
26 echo -n " nmbd"
27 start-stop-daemon --stop --quiet --pidfile /var/run/nmbd.pid
28 echo "."
29 ;;
30 reload|force-reload)
31 start-stop-daemon --stop --quiet --signal 1 --exec $smbd
32 start-stop-daemon --stop --quiet --signal 1 --exec $nmbd
33 ;;
34 restart)
35 echo -n "Stopping Samba: smbd"
36 start-stop-daemon --stop --quiet --pidfile /var/run/smbd.pid
37 echo -n " nmbd"
38 start-stop-daemon --stop --quiet --pidfile /var/run/nmbd.pid
39 echo ""
40 echo -n "Waiting for samba processes to die off"
41 for i in 1 2 3 ;
42 do
43 sleep 1
44 echo -n "."
45 done
46 echo ""
47 echo -n "Starting Samba: smbd"
48 start-stop-daemon --start --quiet --exec $smbd
49 echo -n " nmbd"
50 start-stop-daemon --start --quiet --exec $nmbd
51 echo "."
52 ;;
53 *)
54 echo "Usage: /etc/init.d/samba {start|stop|reload|restart|force-reload}"
55 exit 1
56esac
57
58exit 0
diff --git a/meta-oe/recipes-connectivity/samba/samba/init.winbind b/meta-oe/recipes-connectivity/samba/samba/init.winbind
new file mode 100644
index 000000000..53de2eec6
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/init.winbind
@@ -0,0 +1,38 @@
1#! /bin/sh
2
3### BEGIN INIT INFO
4# Provides: winbind
5# Required-Start: $network $remote_fs $syslog
6# Required-Stop: $network $remote_fs $syslog
7# Should-Start: samba
8# Default-Start: 2 3 4 5
9# Default-Stop: 0 1 6
10# Short-Description: start Winbind daemon
11### END INIT INFO
12
13winbind=/usr/sbin/winbindd
14test -x "$winbind" || exit 0
15
16case "$1" in
17 start)
18 echo -n "Starting Winbind... "
19 start-stop-daemon --start --quiet --exec $winbind
20 echo "done"
21 ;;
22 stop)
23 echo -n "Stopping Winbind... "
24 start-stop-daemon --stop --quiet --pidfile /var/run/winbind.pid
25 echo "done"
26 ;;
27 reload|force-reload)
28 start-stop-daemon --stop --quiet --signal 1 --exec $winbind
29 ;;
30 restart)
31 $0 stop && sleep 2 && $0 start
32 ;;
33 *)
34 echo "Usage: /etc/init.d/winbind {start|stop|reload|restart|force-reload}"
35 exit 1
36esac
37
38exit 0
diff --git a/meta-oe/recipes-connectivity/samba/samba/mtab.patch b/meta-oe/recipes-connectivity/samba/samba/mtab.patch
new file mode 100644
index 000000000..2ee8ba094
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/mtab.patch
@@ -0,0 +1,11 @@
1diff -urN source/client/mtab.c samba-3.2.7//source/client/mtab.c
2--- source/client/mtab.c 2008-12-19 13:57:33.000000000 +0000
3+++ source/client/mtab.c 2009-01-09 23:14:00.717671075 +0000
4@@ -32,6 +32,7 @@
5 #include <errno.h>
6 #include <stdio.h>
7 #include <sys/time.h>
8+#include <sys/stat.h>
9 #include <time.h>
10 #include <fcntl.h>
11 #include <mntent.h>
diff --git a/meta-oe/recipes-connectivity/samba/samba/nmb.service b/meta-oe/recipes-connectivity/samba/samba/nmb.service
new file mode 100644
index 000000000..91b997533
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/nmb.service
@@ -0,0 +1,12 @@
1[Unit]
2Description=Samba NMB Daemon
3After=syslog.target network.target
4
5[Service]
6Type=forking
7PIDFile=/var/run/nmbd.pid
8ExecStart=@SBINDIR@/nmbd
9ExecReload=@BASE_BINDIR@/kill -HUP $MAINPID
10
11[Install]
12WantedBy=multi-user.target
diff --git a/meta-oe/recipes-connectivity/samba/samba/quota.patch b/meta-oe/recipes-connectivity/samba/samba/quota.patch
new file mode 100644
index 000000000..6f42ff868
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/quota.patch
@@ -0,0 +1,11 @@
1--- lib/sysquotas_4A.c.old 2005-07-03 17:16:00.000000000 +0200
2+++ lib/sysquotas_4A.c 2005-07-03 17:10:09.000000000 +0200
3@@ -28,6 +28,8 @@
4 /* long quotactl(int cmd, char *special, qid_t id, caddr_t addr) */
5 /* this is used by: HPUX,IRIX */
6
7+ #define _LINUX_QUOTA_VERSION 1
8+
9 #ifdef HAVE_SYS_TYPES_H
10 #include <sys/types.h>
11 #endif
diff --git a/meta-oe/recipes-connectivity/samba/samba/smb.conf b/meta-oe/recipes-connectivity/samba/samba/smb.conf
new file mode 100644
index 000000000..f07e3e4ec
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/smb.conf
@@ -0,0 +1,266 @@
1# This is the main Samba configuration file. You should read the
2# smb.conf(5) manual page in order to understand the options listed
3# here. Samba has a huge number of configurable options (perhaps too
4# many!) most of which are not shown in this example
5#
6# For a step to step guide on installing, configuring and using samba,
7# read the Samba-HOWTO-Collection. This may be obtained from:
8# http://www.samba.org/samba/docs/Samba-HOWTO-Collection.pdf
9#
10# Many working examples of smb.conf files can be found in the
11# Samba-Guide which is generated daily and can be downloaded from:
12# http://www.samba.org/samba/docs/Samba-Guide.pdf
13#
14# Any line which starts with a ; (semi-colon) or a # (hash)
15# is a comment and is ignored. In this example we will use a #
16# for commentry and a ; for parts of the config file that you
17# may wish to enable
18#
19# NOTE: Whenever you modify this file you should run the command "testparm"
20# to check that you have not made any basic syntactic errors.
21#
22#======================= Global Settings =====================================
23[global]
24
25# workgroup = NT-Domain-Name or Workgroup-Name, eg: MIDEARTH
26 workgroup = MYGROUP
27
28# server string is the equivalent of the NT Description field
29 server string = Samba Server
30
31# Security mode. Defines in which mode Samba will operate. Possible
32# values are share, user, server, domain and ads. Most people will want
33# user level security. See the Samba-HOWTO-Collection for details.
34 security = user
35
36# This option is important for security. It allows you to restrict
37# connections to machines which are on your local network. The
38# following example restricts access to two C class networks and
39# the "loopback" interface. For more examples of the syntax see
40# the smb.conf man page
41; hosts allow = 192.168.1. 192.168.2. 127.
42
43# If you want to automatically load your printer list rather
44# than setting them up individually then you'll need this
45 load printers = yes
46
47# you may wish to override the location of the printcap file
48; printcap name = /etc/printcap
49
50# on SystemV system setting printcap name to lpstat should allow
51# you to automatically obtain a printer list from the SystemV spool
52# system
53; printcap name = lpstat
54
55# It should not be necessary to specify the print system type unless
56# it is non-standard. Currently supported print systems include:
57# bsd, cups, sysv, plp, lprng, aix, hpux, qnx
58; printing = cups
59
60# Uncomment this if you want a guest account, you must add this to /etc/passwd
61# otherwise the user "nobody" is used
62; guest account = pcguest
63
64# this tells Samba to use a separate log file for each machine
65# that connects
66 log file = /var/log/samba/log.%m
67
68# Put a capping on the size of the log files (in Kb).
69 max log size = 50
70
71# Use password server option only with security = server
72# The argument list may include:
73# password server = My_PDC_Name [My_BDC_Name] [My_Next_BDC_Name]
74# or to auto-locate the domain controller/s
75# password server = *
76; password server = <NT-Server-Name>
77
78# Use the realm option only with security = ads
79# Specifies the Active Directory realm the host is part of
80; realm = MY_REALM
81
82# Backend to store user information in. New installations should
83# use either tdbsam or ldapsam. smbpasswd is available for backwards
84# compatibility. tdbsam requires no further configuration.
85; passdb backend = tdbsam
86
87# Using the following line enables you to customise your configuration
88# on a per machine basis. The %m gets replaced with the netbios name
89# of the machine that is connecting.
90# Note: Consider carefully the location in the configuration file of
91# this line. The included file is read at that point.
92; include = /usr/local/samba/lib/smb.conf.%m
93
94# Configure Samba to use multiple interfaces
95# If you have multiple network interfaces then you must list them
96# here. See the man page for details.
97; interfaces = 192.168.12.2/24 192.168.13.2/24
98
99# Browser Control Options:
100# set local master to no if you don't want Samba to become a master
101# browser on your network. Otherwise the normal election rules apply
102; local master = no
103
104# OS Level determines the precedence of this server in master browser
105# elections. The default value should be reasonable
106; os level = 33
107
108# Domain Master specifies Samba to be the Domain Master Browser. This
109# allows Samba to collate browse lists between subnets. Don't use this
110# if you already have a Windows NT domain controller doing this job
111; domain master = yes
112
113# Preferred Master causes Samba to force a local browser election on startup
114# and gives it a slightly higher chance of winning the election
115; preferred master = yes
116
117# Enable this if you want Samba to be a domain logon server for
118# Windows95 workstations.
119; domain logons = yes
120
121# if you enable domain logons then you may want a per-machine or
122# per user logon script
123# run a specific logon batch file per workstation (machine)
124; logon script = %m.bat
125# run a specific logon batch file per username
126; logon script = %U.bat
127
128# Where to store roving profiles (only for Win95 and WinNT)
129# %L substitutes for this servers netbios name, %U is username
130# You must uncomment the [Profiles] share below
131; logon path = \\%L\Profiles\%U
132
133# Windows Internet Name Serving Support Section:
134# WINS Support - Tells the NMBD component of Samba to enable it's WINS Server
135; wins support = yes
136
137# WINS Server - Tells the NMBD components of Samba to be a WINS Client
138# Note: Samba can be either a WINS Server, or a WINS Client, but NOT both
139; wins server = w.x.y.z
140
141# WINS Proxy - Tells Samba to answer name resolution queries on
142# behalf of a non WINS capable client, for this to work there must be
143# at least one WINS Server on the network. The default is NO.
144; wins proxy = yes
145
146# DNS Proxy - tells Samba whether or not to try to resolve NetBIOS names
147# via DNS nslookups. The default is NO.
148 dns proxy = no
149
150# These scripts are used on a domain controller or stand-alone
151# machine to add or delete corresponding unix accounts
152; add user script = /usr/sbin/useradd %u
153; add group script = /usr/sbin/groupadd %g
154; add machine script = /usr/sbin/adduser -n -g machines -c Machine -d /dev/null -s /bin/false %u
155; delete user script = /usr/sbin/userdel %u
156; delete user from group script = /usr/sbin/deluser %u %g
157; delete group script = /usr/sbin/groupdel %g
158
159
160#============================ Share Definitions ==============================
161[homes]
162 comment = Home Directories
163 browseable = yes
164 writable = yes
165
166# Un-comment the following and create the netlogon directory for Domain Logons
167; [netlogon]
168; comment = Network Logon Service
169; path = /usr/local/samba/lib/netlogon
170; guest ok = yes
171; writable = no
172; share modes = no
173
174
175# Un-comment the following to provide a specific roving profile share
176# the default is to use the user's home directory
177;[Profiles]
178; path = /usr/local/samba/profiles
179; browseable = no
180; guest ok = yes
181
182
183# NOTE: If you have a BSD-style print system there is no need to
184# specifically define each individual printer
185[printers]
186 comment = All Printers
187 path = /var/spool/samba
188 browseable = no
189# Set public = yes to allow user 'guest account' to print
190 guest ok = no
191 writable = no
192 printable = yes
193
194# This one is useful for people to share files
195;[tmp]
196; comment = Temporary file space
197; path = /tmp
198; read only = no
199; public = yes
200
201# A publicly accessible directory, but read only, except for people in
202# the "staff" group
203;[public]
204; comment = Public Stuff
205; path = /home/samba
206; public = yes
207; writable = yes
208; printable = no
209; write list = @staff
210
211# Other examples.
212#
213# A private printer, usable only by fred. Spool data will be placed in fred's
214# home directory. Note that fred must have write access to the spool directory,
215# wherever it is.
216;[fredsprn]
217; comment = Fred's Printer
218; valid users = fred
219; path = /homes/fred
220; printer = freds_printer
221; public = no
222# A private directory, usable only by fred. Note that fred requires write
223# access to the directory.
224;[fredsdir]
225; comment = Fred's Service
226; path = /usr/somewhere/private
227; valid users = fred
228; public = no
229; writable = yes
230; printable = no
231
232# a service which has a different directory for each machine that connects
233# this allows you to tailor configurations to incoming machines. You could
234# also use the %U option to tailor it by user name.
235# The %m gets replaced with the machine name that is connecting.
236;[pchome]
237; comment = PC Directories
238; path = /usr/pc/%m
239; public = no
240; writable = yes
241
242# A publicly accessible directory, read/write to all users. Note that all files
243# created in the directory by users will be owned by the default user, so
244# any user with access can delete any other user's files. Obviously this
245# directory must be writable by the default user. Another user could of course
246# be specified, in which case all files would be owned by that user instead.
247;[public]
248; path = /usr/somewhere/else/public
249; public = yes
250; only guest = yes
251; writable = yes
252; printable = no
253
254# The following two entries demonstrate how to share a directory so that two
255# users can place files there that will be owned by the specific users. In this
256# setup, the directory should be writable by both users and should have the
257# sticky bit set on it to prevent abuse. Obviously this could be extended to
258# as many users as required.
259;[myshare]
260; comment = Mary's and Fred's stuff
261; path = /usr/somewhere/shared
262; valid users = mary fred
263; public = no
264; writable = yes
265; printable = no
266; create mask = 0765
diff --git a/meta-oe/recipes-connectivity/samba/samba/smb.service b/meta-oe/recipes-connectivity/samba/samba/smb.service
new file mode 100644
index 000000000..bc0707a34
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/smb.service
@@ -0,0 +1,13 @@
1[Unit]
2Description=Samba SMB Daemon
3After=syslog.target network.target nmb.service winbind.service
4
5[Service]
6Type=forking
7PIDFile=/var/run/smbd.pid
8LimitNOFILE=16384
9ExecStart=@SBINDIR@/smbd
10ExecReload=@BASE_BINDIR@/kill -HUP $MAINPID
11
12[Install]
13WantedBy=multi-user.target
diff --git a/meta-oe/recipes-connectivity/samba/samba/tdb.pc b/meta-oe/recipes-connectivity/samba/samba/tdb.pc
new file mode 100644
index 000000000..6307a20a0
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/tdb.pc
@@ -0,0 +1,11 @@
1prefix=/usr
2exec_prefix=/usr
3libdir=/usr/lib
4includedir=/usr/include
5
6Name: tdb
7Description: A trivial database
8Version: 1.1.2
9Libs: -L${libdir} -ltdb
10Cflags: -I${includedir}
11URL: http://tdb.samba.org/
diff --git a/meta-oe/recipes-connectivity/samba/samba/tdbheaderfix.patch b/meta-oe/recipes-connectivity/samba/samba/tdbheaderfix.patch
new file mode 100644
index 000000000..e37c9b0d5
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/tdbheaderfix.patch
@@ -0,0 +1,14 @@
1Index: source/lib/tdb/include/tdb.h
2===================================================================
3--- source.orig/lib/tdb/include/tdb.h 2009-05-17 17:42:41.000000000 +0100
4+++ source/lib/tdb/include/tdb.h 2009-05-17 17:42:46.000000000 +0100
5@@ -1,6 +1,9 @@
6 #ifndef __TDB_H__
7 #define __TDB_H__
8
9+#include <signal.h>
10+
11+
12 /*
13 Unix SMB/CIFS implementation.
14
diff --git a/meta-oe/recipes-connectivity/samba/samba/volatiles.03_samba b/meta-oe/recipes-connectivity/samba/samba/volatiles.03_samba
new file mode 100644
index 000000000..469bc9e7b
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/volatiles.03_samba
@@ -0,0 +1,2 @@
1# <type> <owner> <group> <mode> <path> <linksource>
2d root root 0755 /var/log/samba none
diff --git a/meta-oe/recipes-connectivity/samba/samba/winbind.service b/meta-oe/recipes-connectivity/samba/samba/winbind.service
new file mode 100644
index 000000000..bff6fb8e1
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba/winbind.service
@@ -0,0 +1,12 @@
1i[Unit]
2Description=Samba Winbind Daemon
3After=syslog.target network.target nmb.service
4
5[Service]
6Type=forking
7PIDFile=/var/run/winbindd.pid
8ExecStart=@SBINDIR@/winbindd
9ExecReload=@BASE_BINDIR@/kill -HUP $MAINPID
10
11[Install]
12WantedBy=multi-user.target
diff --git a/meta-oe/recipes-connectivity/samba/samba_3.6.24.bb b/meta-oe/recipes-connectivity/samba/samba_3.6.24.bb
new file mode 100644
index 000000000..8860da088
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba_3.6.24.bb
@@ -0,0 +1,67 @@
1require samba.inc
2require samba-basic.inc
3LICENSE = "GPLv3"
4LIC_FILES_CHKSUM = "file://../COPYING;md5=d32239bcb673463ab874e80d47fae504"
5
6SRC_URI += "\
7 file://config-h.patch \
8 file://documentation.patch;patchdir=.. \
9 file://documentation2.patch;patchdir=.. \
10 file://fhs-filespaths.patch;patchdir=.. \
11 file://installswat.sh.patch;patchdir=.. \
12 file://pam-examples.patch;patchdir=.. \
13 file://smbclient-pager.patch;patchdir=.. \
14 file://undefined-symbols.patch;patchdir=.. \
15 file://usershare.patch;patchdir=.. \
16 file://smbtar-bashism.patch;patchdir=.. \
17 file://dont-build-VFS-examples.patch;patchdir=.. \
18 file://bug_221618_precise-64bit-prototype.patch;patchdir=.. \
19 file://bug_598313_upstream_7499-nss_wins-dont-clobber-daemons-logs.patch;patchdir=.. \
20 file://bug_387266_upstream_4104_mention-kerberos-in-smbspool-manpage.patch;patchdir=.. \
21 file://bug_604768_upstream_7826_drop-using-samba-link.patch;patchdir=.. \
22 file://bug_604768_upstream_7826_fix-WHATSNEW-link.patch;patchdir=.. \
23 file://waf-as-source.patch;patchdir=.. \
24 file://smbtorture-manpage.patch;patchdir=.. \
25 file://libutil_drop_AI_ADDRCONFIG.patch;patchdir=.. \
26 file://shadow_copy2_backport.patch;patchdir=.. \
27 file://only_export_public_symbols.patch;patchdir=.. \
28 file://configure-disable-getaddrinfo-cross.patch;patchdir=.. \
29 file://configure-disable-core_pattern-cross-check.patch;patchdir=.. \
30 file://configure-libunwind.patch;patchdir=.. \
31"
32SRC_URI[md5sum] = "d98425c0c2b73e08f048d31ffc727fb0"
33SRC_URI[sha256sum] = "11d0bd04b734731970259efc6692b8e749ff671a9b56d8cc5fa98c192ab234a7"
34
35S = "${WORKDIR}/samba-${PV}/source3"
36
37PACKAGECONFIG ??= ""
38PACKAGECONFIG[libunwind] = "--enable-libunwind,--disable-libunwind,libunwind"
39
40EXTRA_OECONF += "\
41 ac_cv_path_PYTHON=/not/exist \
42 ac_cv_path_PYTHON_CONFIG=/not/exist \
43 SMB_BUILD_CC_NEGATIVE_ENUM_VALUES=yes \
44 samba_cv_CC_NEGATIVE_ENUM_VALUES=yes \
45 linux_getgrouplist_ok=no \
46 samba_cv_HAVE_BROKEN_GETGROUPS=no \
47 samba_cv_HAVE_FTRUNCATE_EXTEND=yes \
48 samba_cv_have_setresuid=yes \
49 samba_cv_have_setresgid=yes \
50 samba_cv_HAVE_WRFILE_KEYTAB=yes \
51 samba_cv_linux_getgrouplist_ok=yes \
52"
53
54do_configure() {
55 gnu-configize --force
56 oe_runconf
57}
58
59do_compile () {
60 base_do_compile
61}
62
63do_install_append() {
64 rmdir "${D}${localstatedir}/lock"
65 rmdir "${D}${localstatedir}/run"
66 rmdir --ignore-fail-on-non-empty "${D}${localstatedir}"
67}
diff --git a/meta-oe/recipes-connectivity/ser2net/ser2net_2.9.1.bb b/meta-oe/recipes-connectivity/ser2net/ser2net_2.9.1.bb
new file mode 100644
index 000000000..8fe6a6f9c
--- /dev/null
+++ b/meta-oe/recipes-connectivity/ser2net/ser2net_2.9.1.bb
@@ -0,0 +1,15 @@
1SUMMARY = "A serial to network proxy"
2SECTION = "console/network"
3HOMEPAGE = "http://sourceforge.net/projects/ser2net/"
4
5LICENSE = "GPLv2"
6LIC_FILES_CHKSUM = "file://COPYING;md5=bae3019b4c6dc4138c217864bd04331f"
7
8SRC_URI = "${SOURCEFORGE_MIRROR}/project/ser2net/ser2net/ser2net-${PV}.tar.gz"
9
10SRC_URI[md5sum] = "80011ac0e60bbdcb65f1d7a86251e3f3"
11SRC_URI[sha256sum] = "fdee1e69903cf409bdc6f32403a566cbc6006aa9e2a4d6f8f12b90dfd5ca0d0e"
12
13inherit autotools pkgconfig
14
15BBCLASSEXTEND += "native nativesdk"
diff --git a/meta-oe/recipes-connectivity/soft66/files/fix-ar.patch b/meta-oe/recipes-connectivity/soft66/files/fix-ar.patch
new file mode 100644
index 000000000..bf1ffdfc6
--- /dev/null
+++ b/meta-oe/recipes-connectivity/soft66/files/fix-ar.patch
@@ -0,0 +1,13 @@
1---
2 configure.ac | 1 +
3 1 file changed, 1 insertion(+)
4
5--- git.orig/configure.ac
6+++ git/configure.ac
7@@ -7,5 +7,6 @@ AC_PROG_CC
8 AC_CONFIG_HEADERS([config.h])
9 AC_CONFIG_FILES([Makefile lib/Makefile tools/Makefile])
10 PKG_CHECK_MODULES([FTDI], [libftdi >= 0.13])
11 AC_OUTPUT
12 AM_PROG_CC_C_O
13+AM_PROG_AR
diff --git a/meta-oe/recipes-connectivity/soft66/soft66_git.bb b/meta-oe/recipes-connectivity/soft66/soft66_git.bb
new file mode 100644
index 000000000..33ca2b568
--- /dev/null
+++ b/meta-oe/recipes-connectivity/soft66/soft66_git.bb
@@ -0,0 +1,20 @@
1SUMMARY = "Library and tools for Soft66ADD and related SDR radio receivers"
2LICENSE = "GPLv3 LGPLv3+"
3LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949 \
4 file://COPYING.LESSER;md5=e6a600fd5e1d9cbde2d983680233ad02 "
5
6PNBLACKLIST[soft66] ?= "BROKEN: depends on broken libftdi"
7
8DEPENDS = "libftdi"
9
10PV = "0.1.3+gitr${SRCPV}"
11PR = "r1"
12
13SRCREV = "a1dab25e73896c90c98227ac8055f227b830d512"
14SRC_URI = "git://home.horsten.com/soft66 \
15file://fix-ar.patch"
16
17S = "${WORKDIR}/git"
18
19inherit autotools pkgconfig
20
diff --git a/meta-oe/recipes-connectivity/umip/umip_1.0.bb b/meta-oe/recipes-connectivity/umip/umip_1.0.bb
new file mode 100644
index 000000000..76901abf7
--- /dev/null
+++ b/meta-oe/recipes-connectivity/umip/umip_1.0.bb
@@ -0,0 +1,18 @@
1SUMMARY = "Mobile IPv6 and NEMO for Linux"
2DESCRIPTION = "UMIP is an open source implementation of Mobile IPv6 and NEMO \
3Basic Support for Linux. It is released under the GPLv2 license. It supports \
4the following IETF RFC: RFC6275 (Mobile IPv6), RFC3963 (NEMO), RFC3776 and \
5RFC4877 (IPsec and IKEv2)."
6HOMEPAGE = "http://umip.org/"
7SECTION = "System Environment/Base"
8LICENSE = "GPLv2"
9LIC_FILES_CHKSUM = "file://COPYING;md5=073dc31ccb2ebed70db54f1e8aeb4c33"
10DEPENDS = "rpm indent-native"
11
12SRC_URI = "git://git.umip.org/umip.git"
13SRCREV = "428974c2d0d8e75a2750a3ab0488708c5dfdd8e3"
14
15S = "${WORKDIR}/git"
16EXTRA_OE_CONF = "--enable-vt"
17
18inherit autotools-brokensep
diff --git a/meta-oe/recipes-connectivity/usbmuxd/usbmuxd_git.bb b/meta-oe/recipes-connectivity/usbmuxd/usbmuxd_git.bb
new file mode 100644
index 000000000..52bf811e3
--- /dev/null
+++ b/meta-oe/recipes-connectivity/usbmuxd/usbmuxd_git.bb
@@ -0,0 +1,24 @@
1DESCRIPTION = "This daemon is in charge of multiplexing connections over USB to an iPhone or iPod touch."
2LICENSE = "GPLv3 & GPLv2 & LGPLv2.1"
3LIC_FILES_CHKSUM = "file://COPYING.GPLv2;md5=ebb5c50ab7cab4baeffba14977030c07 \
4 file://COPYING.GPLv3;md5=d32239bcb673463ab874e80d47fae504 \
5 file://COPYING.LGPLv2.1;md5=6ab17b41640564434dda85c06b7124f7"
6
7DEPENDS = "udev libusb1"
8
9inherit cmake pkgconfig gitpkgv
10
11PKGV = "${GITPKGVTAG}"
12
13SRCREV = "919587580c5e77f3936f3432115d2e10c7bac7c5"
14SRC_URI = "git://git.sukimashita.com/usbmuxd.git;protocol=http"
15
16S = "${WORKDIR}/git"
17
18FILES_${PN} += "${base_libdir}/udev/rules.d/"
19
20# fix usbmuxd installing files to /usr/lib64 on 64bit hosts:
21EXTRA_OECMAKE = "-DLIB_SUFFIX=${@d.getVar('baselib', True).replace('lib', '')}"
22
23PACKAGECONFIG ??= ""
24PACKAGECONFIG[plist] = "-DWANT_PLIST=1,-DWANT_PLIST=0,libplist"
diff --git a/meta-oe/recipes-connectivity/wvdial/wvdial/typo_pon.wvdial.1.patch b/meta-oe/recipes-connectivity/wvdial/wvdial/typo_pon.wvdial.1.patch
new file mode 100644
index 000000000..eec5a5d64
--- /dev/null
+++ b/meta-oe/recipes-connectivity/wvdial/wvdial/typo_pon.wvdial.1.patch
@@ -0,0 +1,20 @@
1Remove warnings found by lintian
2Last-Update: 2011-01-09
3Index: wvdial-1.61/pon.wvdial.1
4===================================================================
5--- wvdial-1.61.orig/pon.wvdial.1 2011-01-09 21:33:03.000000000 +0300
6+++ wvdial-1.61/pon.wvdial.1 2011-01-09 21:33:15.000000000 +0300
7@@ -8,13 +8,11 @@
8 .SH DESCRIPTION
9 .B pon.wvdial
10 .br
11-.TR
12 .B poff.wvdial
13 .br
14 .RS
15 Replacement scripts for pon and poff.
16 .RE
17-\."
18 .SH SEE ALSO
19 .BR wvdial (1),
20 .BR pon (1),
diff --git a/meta-oe/recipes-connectivity/wvdial/wvdial_1.61.bb b/meta-oe/recipes-connectivity/wvdial/wvdial_1.61.bb
new file mode 100644
index 000000000..f7adf4c9f
--- /dev/null
+++ b/meta-oe/recipes-connectivity/wvdial/wvdial_1.61.bb
@@ -0,0 +1,30 @@
1HOMEPAGE = "http://www.alumnit.ca/wiki/?WvDial"
2DESCRIPTION = "WvDial is a program that makes it easy to connect your Linux workstation to the Internet."
3
4LICENSE = "LGPLv2"
5LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=55ca817ccb7d5b5b66355690e9abc605"
6
7DEPENDS = "wvstreams"
8RDEPENDS_${PN} = "ppp"
9
10SRC_URI = "http://${BPN}.googlecode.com/files/${BP}.tar.bz2 \
11 file://typo_pon.wvdial.1.patch \
12 "
13
14SRC_URI[md5sum] = "37e9a2d664effe4efd44c0e1a20136de"
15SRC_URI[sha256sum] = "99906d9560cbdbc97e1855e7b0a7169f1e11983be3ac539140423f09debced82"
16
17EXTRA_OEMAKE = ""
18export WVLINK="${LD}"
19
20PARALLEL_MAKE = ""
21
22BUILD_CPPFLAGS += "-I${STAGING_INCDIR}/wvstreams"
23
24do_configure() {
25 sed -i 's/LDFLAGS+=-luniconf/LIBS+=-luniconf/' ${S}/Makefile
26}
27
28do_install() {
29 oe_runmake prefix=${D}/usr PPPDIR=${D}/etc/ppp/peers install
30}
diff --git a/meta-oe/recipes-connectivity/wvdial/wvstreams/04_signed_request.diff b/meta-oe/recipes-connectivity/wvdial/wvstreams/04_signed_request.diff
new file mode 100644
index 000000000..5ab633bc3
--- /dev/null
+++ b/meta-oe/recipes-connectivity/wvdial/wvstreams/04_signed_request.diff
@@ -0,0 +1,13 @@
1Index: wvstreams-4.6/crypto/wvx509.cc
2===================================================================
3--- wvstreams-4.6.orig/crypto/wvx509.cc 2009-07-29 11:58:16.000000000 -0400
4+++ wvstreams-4.6/crypto/wvx509.cc 2009-07-29 11:58:43.000000000 -0400
5@@ -325,7 +325,7 @@
6 }
7
8 int verify_result = X509_REQ_verify(certreq, pk);
9- if (verify_result == 0)
10+ if (verify_result == 0 || verify_result == -1)
11 {
12 debug(WvLog::Warning, "Self signed request failed");
13 X509_REQ_free(certreq);
diff --git a/meta-oe/recipes-connectivity/wvdial/wvstreams/05_gcc.diff b/meta-oe/recipes-connectivity/wvdial/wvstreams/05_gcc.diff
new file mode 100644
index 000000000..8e4fd0329
--- /dev/null
+++ b/meta-oe/recipes-connectivity/wvdial/wvstreams/05_gcc.diff
@@ -0,0 +1,41 @@
1Index: wvstreams-4.6.1/crypto/wvx509.cc
2===================================================================
3--- wvstreams-4.6.1.orig/crypto/wvx509.cc 2011-05-20 00:02:38.119136584 +0200
4+++ wvstreams-4.6.1/crypto/wvx509.cc 2011-05-20 00:02:26.035136589 +0200
5@@ -1157,7 +1157,7 @@
6
7 if (ext)
8 {
9- X509V3_EXT_METHOD *method = X509V3_EXT_get(ext);
10+ X509V3_EXT_METHOD *method = (X509V3_EXT_METHOD *)X509V3_EXT_get(ext);
11 if (!method)
12 {
13 WvDynBuf buf;
14Index: wvstreams-4.6.1/ipstreams/wvunixdgsocket.cc
15===================================================================
16--- wvstreams-4.6.1.orig/ipstreams/wvunixdgsocket.cc 2011-05-20 00:02:38.391136584 +0200
17+++ wvstreams-4.6.1/ipstreams/wvunixdgsocket.cc 2011-05-20 00:02:35.283136585 +0200
18@@ -1,8 +1,6 @@
19 #include "wvunixdgsocket.h"
20-#ifdef MACOS
21 #include <sys/types.h>
22 #include <sys/stat.h>
23-#endif
24
25 WvUnixDGSocket::WvUnixDGSocket(WvStringParm filename, bool _server, int perms)
26 : socketfile(filename)
27Index: wvstreams-4.6.1/streams/wvatomicfile.cc
28===================================================================
29--- wvstreams-4.6.1.orig/streams/wvatomicfile.cc 2011-05-20 00:02:38.223136584 +0200
30+++ wvstreams-4.6.1/streams/wvatomicfile.cc 2011-05-20 00:02:31.619136587 +0200
31@@ -10,10 +10,7 @@
32 #include "wvatomicfile.h"
33 #include "wvfileutils.h"
34 #include "wvstrutils.h"
35-
36-#ifdef MACOS
37 #include <sys/stat.h>
38-#endif
39
40 WvAtomicFile::WvAtomicFile(WvStringParm filename, int flags, mode_t create_mode)
41 : tmp_file(WvString::null)
diff --git a/meta-oe/recipes-connectivity/wvdial/wvstreams/06_gcc-4.7.diff b/meta-oe/recipes-connectivity/wvdial/wvstreams/06_gcc-4.7.diff
new file mode 100644
index 000000000..a75067a10
--- /dev/null
+++ b/meta-oe/recipes-connectivity/wvdial/wvstreams/06_gcc-4.7.diff
@@ -0,0 +1,18 @@
1Description: Fix FTBFS with gcc-4.7
2 Small header include change. This is borderlinde cosmetic, but still needed
3 to prevent the FTBFS.
4Author: Paul Tagliamonte <paultag@ubuntu.com>
5Origin: vendor
6Bug-Debian: http://bugs.debian.org/667418
7Last-Update: 2012-04-13
8
9--- wvstreams-4.6.1.orig/utils/wvuid.cc
10+++ wvstreams-4.6.1/utils/wvuid.cc
11@@ -33,6 +33,7 @@ wvuid_t wvgetuid()
12
13 #else // not WIN32
14
15+#include <unistd.h>
16
17 WvString wv_username_from_uid(wvuid_t uid)
18 {
diff --git a/meta-oe/recipes-connectivity/wvdial/wvstreams/07_buildflags.diff b/meta-oe/recipes-connectivity/wvdial/wvstreams/07_buildflags.diff
new file mode 100644
index 000000000..ec99dcd36
--- /dev/null
+++ b/meta-oe/recipes-connectivity/wvdial/wvstreams/07_buildflags.diff
@@ -0,0 +1,32 @@
1Index: b/gen-cc
2===================================================================
3--- a/gen-cc
4+++ b/gen-cc
5@@ -15,6 +15,11 @@
6 shift
7 shift
8
9+ echo $CC \$MODE -o \$BASE.o \$BASE.$EXT \\
10+ -MMD -MF \$DEPFILE -MP -MQ \$BASE.o \\
11+ $CPPFLAGS \\
12+ $CFLAGS \\
13+ "\$@"
14 $CC \$MODE -o \$BASE.o \$BASE.$EXT \\
15 -MMD -MF \$DEPFILE -MP -MQ \$BASE.o \\
16 $CPPFLAGS \\
17Index: b/wvrules-posix.mk
18===================================================================
19--- a/wvrules-posix.mk
20+++ b/wvrules-posix.mk
21@@ -35,11 +35,6 @@
22 # Default compiler we use for linking
23 WVLINK_CC = $(CXX)
24
25-ifneq ("$(enable_optimization)", "no")
26- CXXFLAGS+=-O2
27- CFLAGS+=-O2
28-endif
29-
30 ifneq ("$(enable_warnings)", "no")
31 CXXFLAGS+=-Wall -Woverloaded-virtual
32 CFLAGS+=-Wall
diff --git a/meta-oe/recipes-connectivity/wvdial/wvstreams_4.6.1.bb b/meta-oe/recipes-connectivity/wvdial/wvstreams_4.6.1.bb
new file mode 100644
index 000000000..8a77b5bec
--- /dev/null
+++ b/meta-oe/recipes-connectivity/wvdial/wvstreams_4.6.1.bb
@@ -0,0 +1,45 @@
1HOMEPAGE = "http://alumnit.ca/wiki/index.php?page=WvStreams"
2SUMMARY = "WvStreams is a network programming library in C++"
3
4LICENSE = "LGPLv2"
5LIC_FILES_CHKSUM = "file://LICENSE;md5=55ca817ccb7d5b5b66355690e9abc605"
6
7DEPENDS = "zlib openssl (>= 0.9.8) dbus readline"
8
9SRC_URI = "http://${BPN}.googlecode.com/files/${BP}.tar.gz \
10 file://04_signed_request.diff \
11 file://05_gcc.diff \
12 file://06_gcc-4.7.diff \
13 file://07_buildflags.diff \
14 "
15
16SRC_URI[md5sum] = "2760dac31a43d452a19a3147bfde571c"
17SRC_URI[sha256sum] = "8403f5fbf83aa9ac0c6ce15d97fd85607488152aa84e007b7d0621b8ebc07633"
18
19inherit autotools-brokensep pkgconfig
20
21PARALLEL_MAKE = ""
22
23LDFLAGS_append = " -Wl,-rpath-link,${CROSS_DIR}/${TARGET_SYS}/lib"
24
25EXTRA_OECONF = " --without-tcl --without-qt --without-pam --without-valgrind"
26
27PACKAGES_prepend = "libuniconf libuniconf-dbg "
28PACKAGES_prepend = "uniconfd uniconfd-dbg "
29PACKAGES_prepend = "libwvstreams-base libwvstreams-base-dbg "
30PACKAGES_prepend = "libwvstreams-extras libwvstreams-extras-dbg "
31PACKAGES_prepend = "${PN}-valgrind "
32
33FILES_libuniconf = "${libdir}/libuniconf.so.*"
34FILES_libuniconf-dbg = "${libdir}/.debug/libuniconf.so.*"
35
36FILES_uniconfd = "${sbindir}/uniconfd ${sysconfdir}/uniconf.conf ${localstatedir}/uniconf"
37FILES_uniconfd-dbg = "${sbindir}/.debug/uniconfd"
38
39FILES_libwvstreams-base = "${libdir}/libwvutils.so.*"
40FILES_libwvstreams-base-dbg = "${libdir}/.debug/libwvutils.so.*"
41
42FILES_libwvstreams-extras = "${libdir}/libwvbase.so.* ${libdir}/libwvstreams.so.*"
43FILES_libwvstreams-extras-dbg = "${libdir}/.debug/libwvbase.so.* ${libdir}/.debug/libwvstreams.so.*"
44
45FILES_${PN}-valgrind = "${libdir}/valgrind/wvstreams.supp"
diff --git a/meta-oe/recipes-connectivity/zeroc-ice/zeroc-ice-3.5.1/0002-Modify-Makefile-for-cross-compile.patch b/meta-oe/recipes-connectivity/zeroc-ice/zeroc-ice-3.5.1/0002-Modify-Makefile-for-cross-compile.patch
new file mode 100644
index 000000000..43096d2bc
--- /dev/null
+++ b/meta-oe/recipes-connectivity/zeroc-ice/zeroc-ice-3.5.1/0002-Modify-Makefile-for-cross-compile.patch
@@ -0,0 +1,292 @@
1Upstream-Status: Inappropriate
2
3This patch lets you build Ice with OpenEmbedded. I doubt you could do
4a regular build after applying this patch.
5
6From bc622ce74fa03a935278d21736a5a251466e1798 Mon Sep 17 00:00:00 2001
7From: Tom Rondeau <tom@trondeau.com>
8Date: Wed, 16 Apr 2014 14:34:51 -0400
9Subject: [PATCH] Modify Makefiles for cross compile
10
11---
12 config/Make.common.rules | 20 +++++++------
13 cpp/Makefile | 9 +++---
14 cpp/config/Make.rules | 32 ++++++++++++--------
15 cpp/config/Make.rules.Linux | 18 ++---------
16 cpp/src/IceStorm/FreezeDB/Makefile | 2 +-
17 py/config/Make.rules | 58 +++++++++++++++++++-----------------
18 6 files changed, 70 insertions(+), 69 deletions(-)
19
20diff --git a/config/Make.common.rules b/config/Make.common.rules
21index d7b1d59..a3fb17e 100644
22--- a/config/Make.common.rules
23+++ b/config/Make.common.rules
24@@ -65,9 +65,9 @@ ifeq ($(UNAME),Linux)
25 #
26 # Some Linux distributions like Debian/Ubuntu don't use /usr/lib64.
27 #
28- ifeq ($(shell test -d /usr/lib64 && echo 0),0)
29- lp64suffix = 64
30- endif
31+ #ifeq ($(shell test -d /usr/lib64 && echo 0),0)
32+ # lp64suffix = 64
33+ #endif
34 ifeq ($(LP64),)
35 LP64 = yes
36 endif
37@@ -244,12 +244,13 @@ else
38 slicedir = $(ice_dir)/slice
39 endif
40
41-ifeq ($(prefix), /usr)
42- install_slicedir = /usr/share/Ice-$(VERSION)/slice
43-else
44- install_slicedir = $(prefix)/slice
45-endif
46+#ifeq ($(prefix), /usr)
47+# install_slicedir = /usr/share/Ice-$(VERSION)/slice
48+#else
49+# install_slicedir = $(prefix)/slice
50+#endif
51
52+install_slicedir = $(prefix)/slice
53 #
54 # Set environment variables for the Slice translator.
55 #
56@@ -265,7 +266,8 @@ ifneq ($(ice_dir), /usr)
57 endif
58
59 ifeq ($(UNAME),Linux)
60- export LD_LIBRARY_PATH := $(ice_lib_dir):$(LD_LIBRARY_PATH)
61+ #export LD_LIBRARY_PATH := $(ice_lib_dir):$(LD_LIBRARY_PATH)
62+ export LD_LIBRARY_PATH := $(ICE_HOME)/lib:$(LD_LIBRARY_PATH)
63 endif
64
65 ifeq ($(UNAME),SunOS)
66diff --git a/cpp/Makefile b/cpp/Makefile
67index a68f113..1f44f57 100644
68--- a/cpp/Makefile
69+++ b/cpp/Makefile
70@@ -11,11 +11,12 @@ top_srcdir = .
71
72 include $(top_srcdir)/config/Make.rules
73
74-SUBDIRS = config src include test
75+#SUBDIRS = config src include test
76+SUBDIRS = config src include
77
78-ifeq ($(shell uname | grep MINGW),)
79-SUBDIRS := $(SUBDIRS) demo
80-endif
81+#ifeq ($(shell uname | grep MINGW),)
82+#SUBDIRS := $(SUBDIRS) demo
83+#endif
84
85 INSTALL_SUBDIRS = $(install_bindir) $(install_libdir) $(install_includedir) \
86 $(install_configdir) $(install_mandir)
87diff --git a/cpp/config/Make.rules b/cpp/config/Make.rules
88index 37461ae..197c5e8 100644
89--- a/cpp/config/Make.rules
90+++ b/cpp/config/Make.rules
91@@ -175,11 +175,12 @@ headerdir = $(top_srcdir)/include
92 # includedir is not handled the same as bindir and libdir
93 # because it is used in the .depend files
94 #
95-ifdef ice_src_dist
96- includedir = $(top_srcdir)/include
97-else
98- includedir = $(ice_dir)/include
99-endif
100+#ifdef ice_src_dist
101+# includedir = $(top_srcdir)/include
102+#else
103+# includedir = $(ice_dir)/include
104+#endif
105+includedir = $(top_srcdir)/include
106
107 #
108 # Platform specific definitions
109@@ -277,14 +278,17 @@ ICECPPFLAGS = -I$(slicedir)
110 SLICE2CPPFLAGS = $(ICECPPFLAGS)
111
112 ifeq ($(ice_dir), /usr)
113- LDFLAGS = $(LDPLATFORMFLAGS) $(CXXFLAGS)
114+ LDFLAGS += $(LDPLATFORMFLAGS) $(CXXFLAGS)
115 else
116 CPPFLAGS += -I$(includedir)
117- ifdef ice_src_dist
118- LDFLAGS = $(LDPLATFORMFLAGS) $(CXXFLAGS) -L$(libdir)
119- else
120- LDFLAGS = $(LDPLATFORMFLAGS) $(CXXFLAGS) -L$(ice_dir)/$(libsubdir)$(cpp11suffix)
121- endif
122+# We must always build using the libraries in the source tree, the host's are obviously
123+# not what we want for the target
124+ LDFLAGS += $(LDPLATFORMFLAGS) $(CXXFLAGS) -L$(libdir) $(call rpathlink,$(libdir))
125+# ifdef ice_src_dist
126+# LDFLAGS = $(LDPLATFORMFLAGS) $(CXXFLAGS) -L$(libdir)
127+# else
128+# LDFLAGS = $(LDPLATFORMFLAGS) $(CXXFLAGS) -L$(ice_dir)/$(libsubdir)$(cpp11suffix)
129+# endif
130 endif
131
132 ifeq ($(FLEX_NOLINE),yes)
133@@ -313,8 +317,10 @@ endif
134
135 ifdef ice_src_dist
136 SLICEPARSERLIB = $(libdir)/$(call mklibfilename,Slice,$(VERSION))
137- SLICE2CPP = $(bindir)/slice2cpp
138- SLICE2FREEZE = $(bindir)/slice2freeze
139+# SLICE2CPP = $(bindir)/slice2cpp
140+# SLICE2FREEZE = $(bindir)/slice2freeze
141+ SLICE2CPP = $(ICE_HOME)/bin/slice2cpp
142+ SLICE2FREEZE = $(ICE_HOME)/bin/slice2freeze
143 else
144 SLICEPARSERLIB = $(ice_dir)/$(libsubdir)$(cpp11suffix)/$(call mklibfilename,Slice,$(VERSION))
145 SLICE2CPP = $(ice_dir)/$(binsubdir)$(cpp11suffix)/slice2cpp
146diff --git a/cpp/config/Make.rules.Linux b/cpp/config/Make.rules.Linux
147index 5d5717c..8363c6e 100644
148--- a/cpp/config/Make.rules.Linux
149+++ b/cpp/config/Make.rules.Linux
150@@ -31,7 +31,7 @@ ifeq ($(CXX),c++)
151 CXX = g++
152 endif
153
154-ifeq ($(CXX),g++)
155+#ifeq ($(CXX),g++)
156
157 ifneq ($(SUSE_i586),)
158 CXXARCHFLAGS += -march=i586
159@@ -71,14 +71,6 @@ ifeq ($(CXX),g++)
160 CXXARCHFLAGS += -mtune=v8 -pipe -Wno-deprecated -DICE_USE_MUTEX_SHARED
161 endif
162
163- ifeq ($(MACHINE),x86_64)
164- ifeq ($(LP64),yes)
165- CXXARCHFLAGS += -m64
166- else
167- CXXARCHFLAGS += -m32
168- endif
169- endif
170-
171 CXXFLAGS = $(CXXARCHFLAGS) -Wall -Werror -pthread
172
173 ifneq ($(GENPIC),no)
174@@ -102,15 +94,11 @@ ifeq ($(CXX),g++)
175
176 rpathlink = -Wl,-rpath-link,$(1)
177
178- ifneq ($(embedded_runpath_prefix),)
179- LDPLATFORMFLAGS = -Wl,--enable-new-dtags -Wl,-rpath,$(runpath_libdir)
180- else
181- LDPLATFORMFLAGS = -Wl,--enable-new-dtags
182- endif
183+ LDPLATFORMFLAGS = -Wl,--enable-new-dtags -Wl,-rpath,../../../lib
184
185 LDPLATFORMFLAGS += -rdynamic
186
187-endif
188+#endif
189
190 ifeq ($(CXX),icpc)
191 $(warning ===================================================================)
192diff --git a/cpp/src/IceStorm/FreezeDB/Makefile b/cpp/src/IceStorm/FreezeDB/Makefile
193index 7c844b7..cf15cb1 100644
194--- a/cpp/src/IceStorm/FreezeDB/Makefile
195+++ b/cpp/src/IceStorm/FreezeDB/Makefile
196@@ -66,7 +66,7 @@ $(libdir)/$(LIBNAME): $(libdir)/$(SONAME)
197
198 $(MIGRATE): $(MOBJS)
199 rm -f $@
200- $(CXX) $(LDFLAGS) -o $@ $(MOBJS) $(DB_RPATH_LINK) -lIceStormService -lIceStorm -lFreeze $(LIBS)
201+ $(CXX) $(LDFLAGS) -o $@ $(MOBJS) $(DB_RPATH_LINK) -lIceStormService -lIceStorm -lFreeze $(LIBS) -ldb_cxx
202
203 # The slice2freeze rules are structured like this to avoid issues with
204 # parallel make.
205diff --git a/py/config/Make.rules b/py/config/Make.rules
206index 43ce01b..1349342 100644
207--- a/py/config/Make.rules
208+++ b/py/config/Make.rules
209@@ -92,21 +92,23 @@ ifeq ($(shell test -f $(top_srcdir)/config/Make.rules.$(UNAME) && echo 0),0)
210 include $(top_srcdir)/config/Make.rules.$(UNAME)
211 else
212 include $(top_srcdir)/../cpp/config/Make.rules.$(UNAME)
213-endif
214+endif
215
216 libdir = $(top_srcdir)/python
217-ifneq ($(prefix), /usr)
218-install_pythondir = $(prefix)/python
219-install_libdir = $(prefix)/python
220-else
221- ifeq ($(shell test -d $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/dist-packages && echo 0),0)
222- install_pythondir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/dist-packages
223- install_libdir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/dist-packages
224- else
225- install_pythondir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/site-packages
226- install_libdir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/site-packages
227- endif
228-endif
229+#ifneq ($(prefix), /usr)
230+#install_pythondir = $(prefix)/python
231+#install_libdir = $(prefix)/python
232+#else
233+# ifeq ($(shell test -d $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/dist-packages && echo 0),0)
234+# install_pythondir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/dist-packages
235+# install_libdir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/dist-packages
236+# else
237+# install_pythondir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/site-packages
238+# install_libdir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/site-packages
239+# endif
240+#endif
241+install_pythondir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/site-packages
242+install_libdir = $(prefix)/$(libsubdir)/$(PYTHON_VERSION)/site-packages
243
244 ifeq ($(UNAME),SunOS)
245 ifeq ($(LP64),yes)
246@@ -115,19 +117,21 @@ ifeq ($(UNAME),SunOS)
247 endif
248 endif
249
250-ifdef ice_src_dist
251- ifeq ($(ice_cpp_dir), $(ice_dir)/cpp)
252- ICE_LIB_DIR = -L$(ice_cpp_dir)/lib
253- else
254- ICE_LIB_DIR = -L$(ice_cpp_dir)/$(libsubdir)
255- endif
256- ICE_LIB_DIR = -L$(ice_cpp_dir)/lib
257- ICE_FLAGS = -I$(ice_cpp_dir)/include
258-endif
259-ifdef ice_bin_dist
260- ICE_LIB_DIR = -L$(ice_dir)/$(libsubdir)
261- ICE_FLAGS = -I$(ice_dir)/include
262-endif
263+#ifdef ice_src_dist
264+# ifeq ($(ice_cpp_dir), $(ice_dir)/cpp)
265+# ICE_LIB_DIR = -L$(ice_cpp_dir)/lib
266+# else
267+# ICE_LIB_DIR = -L$(ice_cpp_dir)/$(libsubdir)
268+# endif
269+# ICE_LIB_DIR = -L$(ice_cpp_dir)/lib
270+# ICE_FLAGS = -I$(ice_cpp_dir)/include
271+#endif
272+#ifdef ice_bin_dist
273+# ICE_LIB_DIR = -L$(ice_dir)/$(libsubdir)
274+# ICE_FLAGS = -I$(ice_dir)/include
275+#endif
276+ICE_LIB_DIR = -L$(top_srcdir)/../cpp/lib
277+ICE_FLAGS = -I$(ice_cpp_dir)/include
278 ICE_LIBS = $(ICE_LIB_DIR) -lIce -lSlice -lIceUtil
279
280 ifneq ($(embedded_runpath_prefix),)
281@@ -137,7 +141,7 @@ endif
282 CPPFLAGS =
283 ICECPPFLAGS = -I$(slicedir)
284 SLICE2PYFLAGS = $(ICECPPFLAGS)
285-LDFLAGS = $(LDPLATFORMFLAGS) $(CXXFLAGS) -L$(libdir)
286+LDFLAGS += $(LDPLATFORMFLAGS) $(CXXFLAGS) -L$(libdir)
287
288 ifdef ice_src_dist
289 ifeq ($(ice_cpp_dir), $(ice_dir)/cpp)
290--
2911.7.9.5
292
diff --git a/meta-oe/recipes-connectivity/zeroc-ice/zeroc-ice_3.5.1.bb b/meta-oe/recipes-connectivity/zeroc-ice/zeroc-ice_3.5.1.bb
new file mode 100644
index 000000000..2b6f56ed4
--- /dev/null
+++ b/meta-oe/recipes-connectivity/zeroc-ice/zeroc-ice_3.5.1.bb
@@ -0,0 +1,87 @@
1DESCRIPTION = "The Internet Communications Engine"
2LICENSE = "GPLv2"
3LIC_FILES_CHKSUM = "file://ICE_LICENSE;md5=3dc3037023cc2ae6b2c5b995da529515"
4DEPENDS = "bzip2 expat openssl python db mcpp"
5DEPENDS_prepend_class-target = "zeroc-ice-native "
6
7SRC_URI = "http://www.zeroc.com/download/Ice/3.5/Ice-${PV}.tar.gz \
8 file://0002-Modify-Makefile-for-cross-compile.patch \
9 "
10SRC_URI[md5sum] = "f00c59983cc904bca977133c0a9b3e80"
11SRC_URI[sha256sum] = "989e51194c6adadbd156da3288e37bad847b93b3b876502e83033b70493af392"
12
13#| MapDb.cpp: In constructor 'Freeze::MapDb::MapDb(const ConnectionIPtr&, const string&, const string&, const string&, const KeyCompareBasePtr&, const std::vector<IceUtil::Handle<Freeze::MapIndexBase> >&, bool)':
14#| MapDb.cpp:138:46: error: call of overloaded 'set_bt_compare(int (*)(DB*, const DBT*, const DBT*))' is ambiguous
15#| MapDb.cpp:138:46: note: candidates are:
16#| /home/jenkins/oe/world/shr-core/tmp-eglibc/sysroots/x86_64-linux/usr/include/db_cxx.h:272:14: note: virtual int Db::set_bt_compare(bt_compare_fcn_type) <near match>
17#| /home/jenkins/oe/world/shr-core/tmp-eglibc/sysroots/x86_64-linux/usr/include/db_cxx.h:272:14: note: no known conversion for argument 1 from 'int (*)(DB*, const DBT*, const DBT*) {aka int (*)(__db*, const __db_dbt*, const __db_dbt*)}' to 'bt_compare_fcn_type {aka int (*)(__db*, const __db_dbt*, const __db_dbt*, long unsigned int*)}'
18#| /home/jenkins/oe/world/shr-core/tmp-eglibc/sysroots/x86_64-linux/usr/include/db_cxx.h:273:14: note: virtual int Db::set_bt_compare(int (*)(Db*, const Dbt*, const Dbt*, size_t*)) <near match>
19#| /home/jenkins/oe/world/shr-core/tmp-eglibc/sysroots/x86_64-linux/usr/include/db_cxx.h:273:14: note: no known conversion for argument 1 from 'int (*)(DB*, const DBT*, const DBT*) {aka int (*)(__db*, const __db_dbt*, const __db_dbt*)}' to 'int (*)(Db*, const Dbt*, const Dbt*, size_t*) {aka int (*)(Db*, const Dbt*, const Dbt*, long unsigned int*)}'
20#| make[3]: *** [MapDb.o] Error 1
21PNBLACKLIST[zeroc-ice] ?= "BROKEN: not compatible with default db version"
22
23S = "${WORKDIR}/Ice-${PV}"
24
25inherit python-dir pkgconfig
26
27export PYTHON_VERSION = "python2.7"
28
29do_configure() {
30 :
31}
32
33do_compile_prepend_class-target () {
34 export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}"
35 export PYTHON_LIB_DIR="${STAGING_LIBDIR}/${PYTHON_DIR}"
36 export ICE_HOME="${STAGING_DIR_NATIVE}/usr"
37}
38
39do_compile_prepend_class-nativesdk () {
40 export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}"
41 export PYTHON_LIB_DIR="${STAGING_LIBDIR}/${PYTHON_DIR}"
42 export ICE_HOME="${STAGING_DIR_NATIVE}/usr"
43}
44
45do_compile_prepend_class-native () {
46 export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}"
47 export PYTHON_LIB_DIR="${STAGING_LIBDIR}/${PYTHON_DIR}"
48 export ICE_HOME="${S}/cpp"
49}
50
51do_compile() {
52 oe_runmake -C ${S} cpp
53 oe_runmake -C ${S} py
54}
55
56do_install_prepend_class-target () {
57 export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}"
58 export PYTHON_LIB_DIR="${STAGING_LIBDIR}/${PYTHON_DIR}"
59 export ICE_HOME="${STAGING_DIR_NATIVE}/usr"
60}
61
62do_install_prepend_class-nativesdk () {
63 export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}"
64 export PYTHON_LIB_DIR="${STAGING_LIBDIR}/${PYTHON_DIR}"
65 export ICE_HOME="${STAGING_DIR_NATIVE}/usr"
66}
67
68do_install_prepend_class-native () {
69 export ICE_HOME="${S}/cpp"
70}
71
72do_install() {
73 oe_runmake -C ${S}/cpp prefix=${D}${prefix} install install-common
74 oe_runmake -C ${S}/py prefix=${D}${prefix} install
75}
76
77PACKAGES += "${PN}-python ${PN}-python-dev ${PN}-python-dbg"
78
79FILES_${PN}-doc += "${prefix}/man/man1"
80FILES_${PN} += "${prefix}/*LICENSE ${libdir}/ImportKey.class ${prefix}/RELEASE_NOTES ${prefix}/CHANGES"
81FILES_${PN}-dev += "${includedir} ${prefix}/slice ${prefix}/config"
82FILES_${PN}-python-dev = "${PYTHON_SITEPACKAGES_DIR}/IcePy.so"
83FILES_${PN}-python = "${PYTHON_SITEPACKAGES_DIR}/*.py ${PYTHON_SITEPACKAGES_DIR}/IcePy.so.* ${PYTHON_SITEPACKAGES_DIR}/Ice*/*.py"
84FILES_${PN}-python-dbg = "${PYTHON_SITEPACKAGES_DIR}/.debug"
85FILES_${PN}-dev += "${bindir}/slice* ${datadir}/Ice-${PV}/slice/*"
86
87BBCLASSEXTEND = "native nativesdk"
diff --git a/meta-oe/recipes-connectivity/zeromq/cppzmq_git.bb b/meta-oe/recipes-connectivity/zeromq/cppzmq_git.bb
new file mode 100644
index 000000000..34099766c
--- /dev/null
+++ b/meta-oe/recipes-connectivity/zeromq/cppzmq_git.bb
@@ -0,0 +1,19 @@
1DESCRIPTION = "C++ bindings for ZeroMQ"
2HOMEPAGE = "http://www.zeromq.org"
3LICENSE = "MIT"
4LIC_FILES_CHKSUM = "file://LICENSE;md5=db174eaf7b55a34a7c89551197f66e94"
5DEPENDS = "zeromq"
6SRCREV = "ee47ae4cddc304741526c9bb2035f98c3274e0ec"
7
8SRC_URI = "git://github.com/zeromq/cppzmq.git"
9
10S = "${WORKDIR}/git"
11
12do_install () {
13 install -d ${D}/usr/include
14 install -m 0755 ${S}/zmq.hpp ${D}/usr/include/
15}
16
17PACKAGES = "${PN}-dev"
18
19RDEPENDS_${PN}-dev = "zeromq-dev"
diff --git a/meta-oe/recipes-connectivity/zeromq/files/run-ptest b/meta-oe/recipes-connectivity/zeromq/files/run-ptest
new file mode 100755
index 000000000..48b9cd9fc
--- /dev/null
+++ b/meta-oe/recipes-connectivity/zeromq/files/run-ptest
@@ -0,0 +1,10 @@
1#!/bin/sh
2
3cd tests
4for i in `ls *`; do
5 if [ ./$i ] ; then
6 echo "PASS: $i"
7 else
8 echo "FAIL: $i"
9 fi
10done
diff --git a/meta-oe/recipes-connectivity/zeromq/zeromq_4.0.4.bb b/meta-oe/recipes-connectivity/zeromq/zeromq_4.0.4.bb
new file mode 100644
index 000000000..80907fa7c
--- /dev/null
+++ b/meta-oe/recipes-connectivity/zeromq/zeromq_4.0.4.bb
@@ -0,0 +1,24 @@
1DESCRIPTION = "ZeroMQ looks like an embeddable networking library but acts like a concurrency framework"
2HOMEPAGE = "http://www.zeromq.org"
3LICENSE = "LGPLv3+"
4LIC_FILES_CHKSUM = "file://COPYING.LESSER;md5=d5311495d952062e0e4fbba39cbf3de1"
5
6SRC_URI = "http://download.zeromq.org/zeromq-${PV}.tar.gz \
7 file://run-ptest \
8 "
9SRC_URI[md5sum] = "f3c3defbb5ef6cc000ca65e529fdab3b"
10SRC_URI[sha256sum] = "1ef71d46e94f33e27dd5a1661ed626cd39be4d2d6967792a275040e34457d399"
11
12S = "${WORKDIR}/zeromq-${PV}"
13
14#Uncomment to choose polling system manually. valid values are kqueue, epoll, devpoll, poll or select
15#EXTRA_OECONF += "--with-poller=kqueue"
16#CFLAGS_append += "-O0"
17#CXXFLAGS_append += "-O0"
18
19inherit autotools ptest
20
21do_install_ptest () {
22 install -d ${D}${PTEST_PATH}/tests
23 install -m 0755 ${B}/tests/.libs/* ${D}${PTEST_PATH}/tests
24}