summaryrefslogtreecommitdiffstats
path: root/meta-oe/recipes-connectivity/samba/samba-3.6.8/waf-as-source.patch
diff options
context:
space:
mode:
Diffstat (limited to 'meta-oe/recipes-connectivity/samba/samba-3.6.8/waf-as-source.patch')
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.8/waf-as-source.patch18538
1 files changed, 18538 insertions, 0 deletions
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.8/waf-as-source.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.8/waf-as-source.patch
new file mode 100644
index 000000000..3ba7d23ae
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.8/waf-as-source.patch
@@ -0,0 +1,18538 @@
1Description: Include waf as an extracted source directory, rather than as a one-in-a-file script.
2Author: Jelmer Vernooij <jelmer@samba.org>
3Bug-Debian: http://bugs.debian.org/654499
4Forwarded: yes
5Bug: https://bugzilla.samba.org/show_bug.cgi?id=8923
6
7diff --git a/buildtools/README b/buildtools/README
8new file mode 100644
9index 0000000..eab0382
10--- /dev/null
11+++ b/buildtools/README
12@@ -0,0 +1,12 @@
13+See http://code.google.com/p/waf/ for more information on waf
14+
15+You can get a svn copy of the upstream source with:
16+
17+ svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
18+
19+Samba currently uses waf 1.5, which can be found at:
20+
21+ http://waf.googlecode.com/svn/branches/waf-1.5
22+
23+To update the current copy of waf, use the update-waf.sh script in this
24+directory.
25diff --git a/buildtools/bin/README b/buildtools/bin/README
26deleted file mode 100644
27index 9ef8a1f..0000000
28--- a/buildtools/bin/README
29+++ /dev/null
30@@ -1,16 +0,0 @@
31-This copy of waf-svn is taken from the git mirror of waf
32-at:
33-
34- git://git.samba.org/tridge/waf-svn.git
35-
36-using the waf-samba branch
37-
38-It was built using the command:
39-
40- ./waf-light --zip-type=gz --make-waf
41-
42-See http://code.google.com/p/waf/ for more information on waf
43-
44-You can get a svn copy of the upstream source with:
45-
46- svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
47diff --git a/buildtools/bin/waf-svn b/buildtools/bin/waf-svn
48deleted file mode 100755
49index 6d54d5f..0000000
50Binary files a/buildtools/bin/waf-svn and /dev/null differ
51diff --git a/buildtools/update-waf.sh b/buildtools/update-waf.sh
52new file mode 100755
53index 0000000..bb3a4bf
54--- /dev/null
55+++ b/buildtools/update-waf.sh
56@@ -0,0 +1,13 @@
57+#!/bin/sh
58+# Update our copy of waf
59+
60+TARGETDIR="`dirname $0`"
61+WORKDIR="`mktemp -d`"
62+
63+mkdir -p "$WORKDIR"
64+
65+svn checkout http://waf.googlecode.com/svn/branches/waf-1.5/wafadmin "$WORKDIR/wafadmin"
66+
67+rsync -C -avz --delete "$WORKDIR/wafadmin/" "$TARGETDIR/wafadmin/"
68+
69+rm -rf "$WORKDIR"
70diff --git a/buildtools/wafadmin/3rdparty/ParallelDebug.py b/buildtools/wafadmin/3rdparty/ParallelDebug.py
71new file mode 100644
72index 0000000..9d0493e
73--- /dev/null
74+++ b/buildtools/wafadmin/3rdparty/ParallelDebug.py
75@@ -0,0 +1,299 @@
76+#! /usr/bin/env python
77+# encoding: utf-8
78+# Thomas Nagy, 2007-2010 (ita)
79+
80+"""
81+debugging helpers for parallel compilation, outputs
82+a svg file in the build directory
83+"""
84+
85+import os, time, sys, threading
86+try: from Queue import Queue
87+except: from queue import Queue
88+import Runner, Options, Utils, Task, Logs
89+from Constants import *
90+
91+#import random
92+#random.seed(100)
93+
94+def set_options(opt):
95+ opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
96+ help='title for the svg diagram', dest='dtitle')
97+ opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=1000, dest='dwidth')
98+ opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
99+ opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
100+ opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
101+
102+# red #ff4d4d
103+# green #4da74d
104+# lila #a751ff
105+
106+color2code = {
107+ 'GREEN' : '#4da74d',
108+ 'YELLOW' : '#fefe44',
109+ 'PINK' : '#a751ff',
110+ 'RED' : '#cc1d1d',
111+ 'BLUE' : '#6687bb',
112+ 'CYAN' : '#34e2e2',
113+
114+}
115+
116+mp = {}
117+info = [] # list of (text,color)
118+
119+def map_to_color(name):
120+ if name in mp:
121+ return mp[name]
122+ try:
123+ cls = Task.TaskBase.classes[name]
124+ except KeyError:
125+ return color2code['RED']
126+ if cls.color in mp:
127+ return mp[cls.color]
128+ if cls.color in color2code:
129+ return color2code[cls.color]
130+ return color2code['RED']
131+
132+def loop(self):
133+ while 1:
134+ tsk=Runner.TaskConsumer.ready.get()
135+ tsk.master.set_running(1, id(threading.currentThread()), tsk)
136+ Runner.process_task(tsk)
137+ tsk.master.set_running(-1, id(threading.currentThread()), tsk)
138+Runner.TaskConsumer.loop = loop
139+
140+
141+old_start = Runner.Parallel.start
142+def do_start(self):
143+ print Options.options
144+ try:
145+ Options.options.dband
146+ except AttributeError:
147+ raise ValueError('use def options(opt): opt.load("parallel_debug")!')
148+
149+ self.taskinfo = Queue()
150+ old_start(self)
151+ process_colors(self)
152+Runner.Parallel.start = do_start
153+
154+def set_running(self, by, i, tsk):
155+ self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
156+Runner.Parallel.set_running = set_running
157+
158+def name2class(name):
159+ return name.replace(' ', '_').replace('.', '_')
160+
161+def process_colors(producer):
162+ # first, cast the parameters
163+ tmp = []
164+ try:
165+ while True:
166+ tup = producer.taskinfo.get(False)
167+ tmp.append(list(tup))
168+ except:
169+ pass
170+
171+ try:
172+ ini = float(tmp[0][2])
173+ except:
174+ return
175+
176+ if not info:
177+ seen = []
178+ for x in tmp:
179+ name = x[3]
180+ if not name in seen:
181+ seen.append(name)
182+ else:
183+ continue
184+
185+ info.append((name, map_to_color(name)))
186+ info.sort(key=lambda x: x[0])
187+
188+ thread_count = 0
189+ acc = []
190+ for x in tmp:
191+ thread_count += x[6]
192+ acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
193+ f = open('pdebug.dat', 'w')
194+ #Utils.write('\n'.join(acc))
195+ f.write('\n'.join(acc))
196+
197+ tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
198+
199+ st = {}
200+ for l in tmp:
201+ if not l[0] in st:
202+ st[l[0]] = len(st.keys())
203+ tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
204+ THREAD_AMOUNT = len(st.keys())
205+
206+ st = {}
207+ for l in tmp:
208+ if not l[1] in st:
209+ st[l[1]] = len(st.keys())
210+ tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
211+
212+
213+ BAND = Options.options.dband
214+
215+ seen = {}
216+ acc = []
217+ for x in range(len(tmp)):
218+ line = tmp[x]
219+ id = line[1]
220+
221+ if id in seen:
222+ continue
223+ seen[id] = True
224+
225+ begin = line[2]
226+ thread_id = line[0]
227+ for y in range(x + 1, len(tmp)):
228+ line = tmp[y]
229+ if line[1] == id:
230+ end = line[2]
231+ #print id, thread_id, begin, end
232+ #acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
233+ acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
234+ break
235+
236+ if Options.options.dmaxtime < 0.1:
237+ gwidth = 1
238+ for x in tmp:
239+ m = BAND * x[2]
240+ if m > gwidth:
241+ gwidth = m
242+ else:
243+ gwidth = BAND * Options.options.dmaxtime
244+
245+ ratio = float(Options.options.dwidth) / gwidth
246+ gwidth = Options.options.dwidth
247+
248+ gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
249+
250+ out = []
251+
252+ out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
253+<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
254+\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">
255+<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.0\"
256+ x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
257+ id=\"svg602\" xml:space=\"preserve\">
258+
259+<style type='text/css' media='screen'>
260+ g.over rect { stroke:#FF0000; fill-opacity:0.4 }
261+</style>
262+
263+<script type='text/javascript'><![CDATA[
264+ var svg = document.getElementsByTagName('svg')[0];
265+ var svgNS = svg.getAttribute('xmlns');
266+ svg.addEventListener('mouseover',function(e){
267+ var g = e.target.parentNode;
268+ var x = document.getElementById('r_'+g.id);
269+ if (x) {
270+ g.setAttribute('class', g.getAttribute('class')+' over');
271+ x.setAttribute('class', x.getAttribute('class')+' over');
272+ showInfo(e, g.id);
273+ }
274+ },false);
275+ svg.addEventListener('mouseout',function(e){
276+ var g = e.target.parentNode;
277+ var x = document.getElementById('r_'+g.id);
278+ if (x) {
279+ g.setAttribute('class',g.getAttribute('class').replace(' over',''));
280+ x.setAttribute('class',x.getAttribute('class').replace(' over',''));
281+ hideInfo(e);
282+ }
283+ },false);
284+
285+function showInfo(evt, txt) {
286+ tooltip = document.getElementById('tooltip');
287+
288+ var t = document.getElementById('tooltiptext');
289+ t.firstChild.data = txt;
290+
291+ var x = evt.clientX+10;
292+ if (x > 200) { x -= t.getComputedTextLength() + 16; }
293+ var y = evt.clientY+30;
294+ tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
295+ tooltip.setAttributeNS(null,"visibility","visible");
296+
297+ var r = document.getElementById('tooltiprect');
298+ r.setAttribute('width', t.getComputedTextLength()+6)
299+}
300+
301+
302+function hideInfo(evt) {
303+ tooltip = document.getElementById('tooltip');
304+ tooltip.setAttributeNS(null,"visibility","hidden");
305+}
306+
307+]]></script>
308+
309+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
310+<rect
311+ x='%r' y='%r'
312+ width='%r' height='%r' z-index='10'
313+ style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
314+ />\n
315+
316+""" % (0, 0, gwidth + 4, gheight + 4, 0, 0, gwidth + 4, gheight + 4))
317+
318+ # main title
319+ if Options.options.dtitle:
320+ out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
321+""" % (gwidth/2, gheight - 5, Options.options.dtitle))
322+
323+ # the rectangles
324+ groups = {}
325+ for (x, y, w, h, clsname) in acc:
326+ try:
327+ groups[clsname].append((x, y, w, h))
328+ except:
329+ groups[clsname] = [(x, y, w, h)]
330+
331+ for cls in groups:
332+
333+ out.append("<g id='%s'>\n" % name2class(cls))
334+
335+ for (x, y, w, h) in groups[cls]:
336+ out.append(""" <rect
337+ x='%r' y='%r'
338+ width='%r' height='%r' z-index='11'
339+ style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
340+ />\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
341+
342+ out.append("</g>\n")
343+
344+ # output the caption
345+ cnt = THREAD_AMOUNT
346+
347+ for (text, color) in info:
348+ # caption box
349+ b = BAND/2
350+ out.append("""<g id='r_%s'><rect
351+ x='%r' y='%r'
352+ width='%r' height='%r'
353+ style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
354+ />\n""" % (name2class(text), 2 + BAND, 5 + (cnt + 0.5) * BAND, b, b, color))
355+
356+ # caption text
357+ out.append("""<text
358+ style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
359+ x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
360+ cnt += 1
361+
362+ out.append("""
363+<g transform="translate(0,0)" visibility="hidden" id="tooltip">
364+ <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
365+ <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
366+</g>""")
367+
368+ out.append("\n</svg>")
369+
370+ #node = producer.bld.path.make_node('pdebug.svg')
371+ f = open('pdebug.svg', 'w')
372+ f.write("".join(out))
373+
374+
375diff --git a/buildtools/wafadmin/3rdparty/batched_cc.py b/buildtools/wafadmin/3rdparty/batched_cc.py
376new file mode 100644
377index 0000000..8e31074
378--- /dev/null
379+++ b/buildtools/wafadmin/3rdparty/batched_cc.py
380@@ -0,0 +1,183 @@
381+#!/usr/bin/env python
382+# encoding: utf-8
383+# Thomas Nagy, 2006 (ita)
384+
385+"""
386+Batched builds - compile faster
387+instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
388+cc -c ../file1.c ../file2.c ../file3.c
389+
390+Files are output on the directory where the compiler is called, and dependencies are more difficult
391+to track (do not run the command on all source files if only one file changes)
392+
393+As such, we do as if the files were compiled one by one, but no command is actually run:
394+replace each cc/cpp Task by a TaskSlave
395+A new task called TaskMaster collects the signatures from each slave and finds out the command-line
396+to run.
397+
398+To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
399+it is only necessary to import this module in the configuration (no other change required)
400+"""
401+
402+MAX_BATCH = 50
403+MAXPARALLEL = False
404+
405+EXT_C = ['.c', '.cc', '.cpp', '.cxx']
406+
407+import os, threading
408+import TaskGen, Task, ccroot, Build, Logs
409+from TaskGen import extension, feature, before
410+from Constants import *
411+
412+cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
413+cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
414+
415+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
416+cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
417+
418+count = 70000
419+class batch_task(Task.Task):
420+ color = 'RED'
421+
422+ after = 'cc cxx'
423+ before = 'cc_link cxx_link static_link'
424+
425+ def __str__(self):
426+ return '(batch compilation for %d slaves)\n' % len(self.slaves)
427+
428+ def __init__(self, *k, **kw):
429+ Task.Task.__init__(self, *k, **kw)
430+ self.slaves = []
431+ self.inputs = []
432+ self.hasrun = 0
433+
434+ global count
435+ count += 1
436+ self.idx = count
437+
438+ def add_slave(self, slave):
439+ self.slaves.append(slave)
440+ self.set_run_after(slave)
441+
442+ def runnable_status(self):
443+ for t in self.run_after:
444+ if not t.hasrun:
445+ return ASK_LATER
446+
447+ for t in self.slaves:
448+ #if t.executed:
449+ if t.hasrun != SKIPPED:
450+ return RUN_ME
451+
452+ return SKIP_ME
453+
454+ def run(self):
455+ outputs = []
456+ self.outputs = []
457+
458+ srclst = []
459+ slaves = []
460+ for t in self.slaves:
461+ if t.hasrun != SKIPPED:
462+ slaves.append(t)
463+ srclst.append(t.inputs[0].abspath(self.env))
464+
465+ self.env.SRCLST = srclst
466+ self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
467+
468+ env = self.env
469+ app = env.append_unique
470+ cpppath_st = env['CPPPATH_ST']
471+ env._CCINCFLAGS = env.CXXINCFLAGS = []
472+
473+ # local flags come first
474+ # set the user-defined includes paths
475+ for i in env['INC_PATHS']:
476+ app('_CCINCFLAGS', cpppath_st % i.abspath())
477+ app('_CXXINCFLAGS', cpppath_st % i.abspath())
478+ app('_CCINCFLAGS', cpppath_st % i.abspath(env))
479+ app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
480+
481+ # set the library include paths
482+ for i in env['CPPPATH']:
483+ app('_CCINCFLAGS', cpppath_st % i)
484+ app('_CXXINCFLAGS', cpppath_st % i)
485+
486+ if self.slaves[0].__class__.__name__ == 'cc':
487+ ret = cc_fun(self)
488+ else:
489+ ret = cxx_fun(self)
490+
491+ if ret:
492+ return ret
493+
494+ for t in slaves:
495+ t.old_post_run()
496+
497+from TaskGen import extension, feature, after
498+
499+import cc, cxx
500+def wrap(fun):
501+ def foo(self, node):
502+ # we cannot control the extension, this sucks
503+ self.obj_ext = '.o'
504+
505+ task = fun(self, node)
506+ if not getattr(self, 'masters', None):
507+ self.masters = {}
508+ self.allmasters = []
509+
510+ if not node.parent.id in self.masters:
511+ m = self.masters[node.parent.id] = self.master = self.create_task('batch')
512+ self.allmasters.append(m)
513+ else:
514+ m = self.masters[node.parent.id]
515+ if len(m.slaves) > MAX_BATCH:
516+ m = self.masters[node.parent.id] = self.master = self.create_task('batch')
517+ self.allmasters.append(m)
518+
519+ m.add_slave(task)
520+ return task
521+ return foo
522+
523+c_hook = wrap(cc.c_hook)
524+extension(cc.EXT_CC)(c_hook)
525+
526+cxx_hook = wrap(cxx.cxx_hook)
527+extension(cxx.EXT_CXX)(cxx_hook)
528+
529+
530+@feature('cprogram', 'cshlib', 'cstaticlib')
531+@after('apply_link')
532+def link_after_masters(self):
533+ if getattr(self, 'allmasters', None):
534+ for m in self.allmasters:
535+ self.link_task.set_run_after(m)
536+
537+for c in ['cc', 'cxx']:
538+ t = Task.TaskBase.classes[c]
539+ def run(self):
540+ pass
541+
542+ def post_run(self):
543+ #self.executed=1
544+ pass
545+
546+ def can_retrieve_cache(self):
547+ if self.old_can_retrieve_cache():
548+ for m in self.generator.allmasters:
549+ try:
550+ m.slaves.remove(self)
551+ except ValueError:
552+ pass #this task wasn't included in that master
553+ return 1
554+ else:
555+ return None
556+
557+ setattr(t, 'oldrun', t.__dict__['run'])
558+ setattr(t, 'run', run)
559+ setattr(t, 'old_post_run', t.post_run)
560+ setattr(t, 'post_run', post_run)
561+ setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
562+ setattr(t, 'can_retrieve_cache', can_retrieve_cache)
563+
564diff --git a/buildtools/wafadmin/3rdparty/boost.py b/buildtools/wafadmin/3rdparty/boost.py
565new file mode 100644
566index 0000000..e690a4e
567--- /dev/null
568+++ b/buildtools/wafadmin/3rdparty/boost.py
569@@ -0,0 +1,343 @@
570+#!/usr/bin/env python
571+# encoding: utf-8
572+#
573+# partially based on boost.py written by Gernot Vormayr
574+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
575+# modified by Bjoern Michaelsen, 2008
576+# modified by Luca Fossati, 2008
577+# rewritten for waf 1.5.1, Thomas Nagy, 2008
578+#
579+#def set_options(opt):
580+# opt.tool_options('boost')
581+# # ...
582+#
583+#def configure(conf):
584+# # ... (e.g. conf.check_tool('g++'))
585+# conf.check_tool('boost')
586+# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
587+#
588+#def build(bld):
589+# bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
590+#
591+#ISSUES:
592+# * find_includes should be called only once!
593+# * support mandatory
594+
595+######## boost update ###########
596+## ITA: * the method get_boost_version_number does work
597+## * the rest of the code has not really been tried
598+# * make certain a demo is provided (in demos/adv for example)
599+
600+# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
601+
602+import os.path, glob, types, re, sys
603+import Configure, config_c, Options, Utils, Logs
604+from Logs import warn, debug
605+from Configure import conf
606+
607+boost_code = '''
608+#include <iostream>
609+#include <boost/version.hpp>
610+int main() { std::cout << BOOST_VERSION << std::endl; }
611+'''
612+
613+boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
614+boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
615+
616+STATIC_NOSTATIC = 'nostatic'
617+STATIC_BOTH = 'both'
618+STATIC_ONLYSTATIC = 'onlystatic'
619+
620+is_versiontag = re.compile('^\d+_\d+_?\d*$')
621+is_threadingtag = re.compile('^mt$')
622+is_abitag = re.compile('^[sgydpn]+$')
623+is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
624+is_pythontag=re.compile('^py[0-9]{2}$')
625+
626+def set_options(opt):
627+ opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
628+ opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
629+
630+def string_to_version(s):
631+ version = s.split('.')
632+ if len(version) < 3: return 0
633+ return int(version[0])*100000 + int(version[1])*100 + int(version[2])
634+
635+def version_string(version):
636+ major = version / 100000
637+ minor = version / 100 % 1000
638+ minor_minor = version % 100
639+ if minor_minor == 0:
640+ return "%d_%d" % (major, minor)
641+ else:
642+ return "%d_%d_%d" % (major, minor, minor_minor)
643+
644+def libfiles(lib, pattern, lib_paths):
645+ result = []
646+ for lib_path in lib_paths:
647+ libname = pattern % ('boost_%s[!_]*' % lib)
648+ result += glob.glob(os.path.join(lib_path, libname))
649+ return result
650+
651+@conf
652+def get_boost_version_number(self, dir):
653+ """silently retrieve the boost version number"""
654+ try:
655+ return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
656+ except Configure.ConfigurationError, e:
657+ return -1
658+
659+def set_default(kw, var, val):
660+ if not var in kw:
661+ kw[var] = val
662+
663+def tags_score(tags, kw):
664+ """
665+ checks library tags
666+
667+ see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
668+ """
669+ score = 0
670+ needed_tags = {
671+ 'threading': kw['tag_threading'],
672+ 'abi': kw['tag_abi'],
673+ 'toolset': kw['tag_toolset'],
674+ 'version': kw['tag_version'],
675+ 'python': kw['tag_python']
676+ }
677+
678+ if kw['tag_toolset'] is None:
679+ v = kw['env']
680+ toolset = v['CXX_NAME']
681+ if v['CXX_VERSION']:
682+ version_no = v['CXX_VERSION'].split('.')
683+ toolset += version_no[0]
684+ if len(version_no) > 1:
685+ toolset += version_no[1]
686+ needed_tags['toolset'] = toolset
687+
688+ found_tags = {}
689+ for tag in tags:
690+ if is_versiontag.match(tag): found_tags['version'] = tag
691+ if is_threadingtag.match(tag): found_tags['threading'] = tag
692+ if is_abitag.match(tag): found_tags['abi'] = tag
693+ if is_toolsettag.match(tag): found_tags['toolset'] = tag
694+ if is_pythontag.match(tag): found_tags['python'] = tag
695+
696+ for tagname in needed_tags.iterkeys():
697+ if needed_tags[tagname] is not None and tagname in found_tags:
698+ if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
699+ score += kw['score_' + tagname][0]
700+ else:
701+ score += kw['score_' + tagname][1]
702+ return score
703+
704+@conf
705+def validate_boost(self, kw):
706+ ver = kw.get('version', '')
707+
708+ for x in 'min_version max_version version'.split():
709+ set_default(kw, x, ver)
710+
711+ set_default(kw, 'lib', '')
712+ kw['lib'] = Utils.to_list(kw['lib'])
713+
714+ set_default(kw, 'env', self.env)
715+
716+ set_default(kw, 'libpath', boost_libpath)
717+ set_default(kw, 'cpppath', boost_cpppath)
718+
719+ for x in 'tag_threading tag_version tag_toolset'.split():
720+ set_default(kw, x, None)
721+ set_default(kw, 'tag_abi', '^[^d]*$')
722+
723+ set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
724+ set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
725+
726+ set_default(kw, 'score_threading', (10, -10))
727+ set_default(kw, 'score_abi', (10, -10))
728+ set_default(kw, 'score_python', (10,-10))
729+ set_default(kw, 'score_toolset', (1, -1))
730+ set_default(kw, 'score_version', (100, -100))
731+
732+ set_default(kw, 'score_min', 0)
733+ set_default(kw, 'static', STATIC_NOSTATIC)
734+ set_default(kw, 'found_includes', False)
735+ set_default(kw, 'min_score', 0)
736+
737+ set_default(kw, 'errmsg', 'not found')
738+ set_default(kw, 'okmsg', 'ok')
739+
740+@conf
741+def find_boost_includes(self, kw):
742+ """
743+ check every path in kw['cpppath'] for subdir
744+ that either starts with boost- or is named boost.
745+
746+ Then the version is checked and selected accordingly to
747+ min_version/max_version. The highest possible version number is
748+ selected!
749+
750+ If no versiontag is set the versiontag is set accordingly to the
751+ selected library and CPPPATH_BOOST is set.
752+ """
753+ boostPath = getattr(Options.options, 'boostincludes', '')
754+ if boostPath:
755+ boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
756+ else:
757+ boostPath = Utils.to_list(kw['cpppath'])
758+
759+ min_version = string_to_version(kw.get('min_version', ''))
760+ max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
761+
762+ version = 0
763+ for include_path in boostPath:
764+ boost_paths = [p for p in glob.glob(os.path.join(include_path, 'boost*')) if os.path.isdir(p)]
765+ debug('BOOST Paths: %r' % boost_paths)
766+ for path in boost_paths:
767+ pathname = os.path.split(path)[-1]
768+ ret = -1
769+ if pathname == 'boost':
770+ path = include_path
771+ ret = self.get_boost_version_number(path)
772+ elif pathname.startswith('boost-'):
773+ ret = self.get_boost_version_number(path)
774+ ret = int(ret)
775+
776+ if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
777+ boost_path = path
778+ version = ret
779+ if not version:
780+ self.fatal('boost headers not found! (required version min: %s max: %s)'
781+ % (kw['min_version'], kw['max_version']))
782+ return False
783+
784+ found_version = version_string(version)
785+ versiontag = '^' + found_version + '$'
786+ if kw['tag_version'] is None:
787+ kw['tag_version'] = versiontag
788+ elif kw['tag_version'] != versiontag:
789+ warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
790+ env = self.env
791+ env['CPPPATH_BOOST'] = boost_path
792+ env['BOOST_VERSION'] = found_version
793+ self.found_includes = 1
794+ ret = 'Version %s (%s)' % (found_version, boost_path)
795+ return ret
796+
797+@conf
798+def find_boost_library(self, lib, kw):
799+
800+ def find_library_from_list(lib, files):
801+ lib_pattern = re.compile('.*boost_(.*?)\..*')
802+ result = (None, None)
803+ resultscore = kw['min_score'] - 1
804+ for file in files:
805+ m = lib_pattern.search(file, 1)
806+ if m:
807+ libname = m.group(1)
808+ libtags = libname.split('-')[1:]
809+ currentscore = tags_score(libtags, kw)
810+ if currentscore > resultscore:
811+ result = (libname, file)
812+ resultscore = currentscore
813+ return result
814+
815+ lib_paths = getattr(Options.options, 'boostlibs', '')
816+ if lib_paths:
817+ lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
818+ else:
819+ lib_paths = Utils.to_list(kw['libpath'])
820+
821+ v = kw.get('env', self.env)
822+
823+ (libname, file) = (None, None)
824+ if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
825+ st_env_prefix = 'LIB'
826+ files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
827+ (libname, file) = find_library_from_list(lib, files)
828+ if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
829+ st_env_prefix = 'STATICLIB'
830+ staticLibPattern = v['staticlib_PATTERN']
831+ if self.env['CC_NAME'] == 'msvc':
832+ staticLibPattern = 'lib' + staticLibPattern
833+ files = libfiles(lib, staticLibPattern, lib_paths)
834+ (libname, file) = find_library_from_list(lib, files)
835+ if libname is not None:
836+ v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
837+ if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
838+ v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
839+ else:
840+ v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
841+ return
842+ self.fatal('lib boost_' + lib + ' not found!')
843+
844+@conf
845+def check_boost(self, *k, **kw):
846+ """
847+ This should be the main entry point
848+
849+- min_version
850+- max_version
851+- version
852+- include_path
853+- lib_path
854+- lib
855+- toolsettag - None or a regexp
856+- threadingtag - None or a regexp
857+- abitag - None or a regexp
858+- versiontag - WARNING: you should rather use version or min_version/max_version
859+- static - look for static libs (values:
860+ 'nostatic' or STATIC_NOSTATIC - ignore static libs (default)
861+ 'both' or STATIC_BOTH - find static libs, too
862+ 'onlystatic' or STATIC_ONLYSTATIC - find only static libs
863+- score_version
864+- score_abi
865+- scores_threading
866+- score_toolset
867+ * the scores are tuples (match_score, nomatch_score)
868+ match_score is the added to the score if the tag is matched
869+ nomatch_score is added when a tag is found and does not match
870+- min_score
871+ """
872+
873+ if not self.env['CXX']:
874+ self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
875+ self.validate_boost(kw)
876+ ret = None
877+ try:
878+ if not kw.get('found_includes', None):
879+ self.check_message_1(kw.get('msg_includes', 'boost headers'))
880+ ret = self.find_boost_includes(kw)
881+
882+ except Configure.ConfigurationError, e:
883+ if 'errmsg' in kw:
884+ self.check_message_2(kw['errmsg'], 'YELLOW')
885+ if 'mandatory' in kw:
886+ if Logs.verbose > 1:
887+ raise
888+ else:
889+ self.fatal('the configuration failed (see %r)' % self.log.name)
890+ else:
891+ if 'okmsg' in kw:
892+ self.check_message_2(kw.get('okmsg_includes', ret))
893+
894+ for lib in kw['lib']:
895+ self.check_message_1('library boost_'+lib)
896+ try:
897+ self.find_boost_library(lib, kw)
898+ except Configure.ConfigurationError, e:
899+ ret = False
900+ if 'errmsg' in kw:
901+ self.check_message_2(kw['errmsg'], 'YELLOW')
902+ if 'mandatory' in kw:
903+ if Logs.verbose > 1:
904+ raise
905+ else:
906+ self.fatal('the configuration failed (see %r)' % self.log.name)
907+ else:
908+ if 'okmsg' in kw:
909+ self.check_message_2(kw['okmsg'])
910+
911+ return ret
912+
913diff --git a/buildtools/wafadmin/3rdparty/fluid.py b/buildtools/wafadmin/3rdparty/fluid.py
914new file mode 100644
915index 0000000..117edef
916--- /dev/null
917+++ b/buildtools/wafadmin/3rdparty/fluid.py
918@@ -0,0 +1,27 @@
919+#!/usr/bin/python
920+# encoding: utf-8
921+# Grygoriy Fuchedzhy 2009
922+
923+"""
924+Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
925+"""
926+
927+import Task
928+from TaskGen import extension
929+
930+Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
931+
932+@extension('.fl')
933+def fluid(self, node):
934+ """add the .fl to the source list; the cxx file generated will be compiled when possible"""
935+ cpp = node.change_ext('.cpp')
936+ hpp = node.change_ext('.hpp')
937+ self.create_task('fluid', node, [cpp, hpp])
938+
939+ if 'cxx' in self.features:
940+ self.allnodes.append(cpp)
941+
942+def detect(conf):
943+ fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
944+ conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
945+
946diff --git a/buildtools/wafadmin/3rdparty/gccdeps.py b/buildtools/wafadmin/3rdparty/gccdeps.py
947new file mode 100644
948index 0000000..6600c9c
949--- /dev/null
950+++ b/buildtools/wafadmin/3rdparty/gccdeps.py
951@@ -0,0 +1,128 @@
952+#!/usr/bin/env python
953+# encoding: utf-8
954+# Thomas Nagy, 2008-2010 (ita)
955+
956+"""
957+Execute the tasks with gcc -MD, read the dependencies from the .d file
958+and prepare the dependency calculation for the next run
959+"""
960+
961+import os, re, threading
962+import Task, Logs, Utils, preproc
963+from TaskGen import before, after, feature
964+
965+lock = threading.Lock()
966+
967+preprocessor_flag = '-MD'
968+
969+@feature('cc')
970+@before('apply_core')
971+def add_mmd_cc(self):
972+ if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
973+ self.env.append_value('CCFLAGS', preprocessor_flag)
974+
975+@feature('cxx')
976+@before('apply_core')
977+def add_mmd_cxx(self):
978+ if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
979+ self.env.append_value('CXXFLAGS', preprocessor_flag)
980+
981+def scan(self):
982+ "the scanner does not do anything initially"
983+ nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
984+ names = []
985+ return (nodes, names)
986+
987+re_o = re.compile("\.o$")
988+re_src = re.compile("^(\.\.)[\\/](.*)$")
989+
990+def post_run(self):
991+ # The following code is executed by threads, it is not safe, so a lock is needed...
992+
993+ if getattr(self, 'cached', None):
994+ return Task.Task.post_run(self)
995+
996+ name = self.outputs[0].abspath(self.env)
997+ name = re_o.sub('.d', name)
998+ txt = Utils.readf(name)
999+ #os.unlink(name)
1000+
1001+ txt = txt.replace('\\\n', '')
1002+
1003+ lst = txt.strip().split(':')
1004+ val = ":".join(lst[1:])
1005+ val = val.split()
1006+
1007+ nodes = []
1008+ bld = self.generator.bld
1009+
1010+ f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
1011+ for x in val:
1012+ if os.path.isabs(x):
1013+
1014+ if not preproc.go_absolute:
1015+ continue
1016+
1017+ lock.acquire()
1018+ try:
1019+ node = bld.root.find_resource(x)
1020+ finally:
1021+ lock.release()
1022+ else:
1023+ g = re.search(re_src, x)
1024+ if g:
1025+ x = g.group(2)
1026+ lock.acquire()
1027+ try:
1028+ node = bld.bldnode.parent.find_resource(x)
1029+ finally:
1030+ lock.release()
1031+ else:
1032+ g = re.search(f, x)
1033+ if g:
1034+ x = g.group(2)
1035+ lock.acquire()
1036+ try:
1037+ node = bld.srcnode.find_resource(x)
1038+ finally:
1039+ lock.release()
1040+
1041+ if id(node) == id(self.inputs[0]):
1042+ # ignore the source file, it is already in the dependencies
1043+ # this way, successful config tests may be retrieved from the cache
1044+ continue
1045+
1046+ if not node:
1047+ raise ValueError('could not find %r for %r' % (x, self))
1048+ else:
1049+ nodes.append(node)
1050+
1051+ Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
1052+
1053+ bld.node_deps[self.unique_id()] = nodes
1054+ bld.raw_deps[self.unique_id()] = []
1055+
1056+ try:
1057+ del self.cache_sig
1058+ except:
1059+ pass
1060+
1061+ Task.Task.post_run(self)
1062+
1063+import Constants, Utils
1064+def sig_implicit_deps(self):
1065+ try:
1066+ return Task.Task.sig_implicit_deps(self)
1067+ except Utils.WafError:
1068+ return Constants.SIG_NIL
1069+
1070+for name in 'cc cxx'.split():
1071+ try:
1072+ cls = Task.TaskBase.classes[name]
1073+ except KeyError:
1074+ pass
1075+ else:
1076+ cls.post_run = post_run
1077+ cls.scan = scan
1078+ cls.sig_implicit_deps = sig_implicit_deps
1079+
1080diff --git a/buildtools/wafadmin/3rdparty/go.py b/buildtools/wafadmin/3rdparty/go.py
1081new file mode 100644
1082index 0000000..2d8df0d
1083--- /dev/null
1084+++ b/buildtools/wafadmin/3rdparty/go.py
1085@@ -0,0 +1,111 @@
1086+#!/usr/bin/env python
1087+# encoding: utf-8
1088+# go.py - Waf tool for the Go programming language
1089+# By: Tom Wambold <tom5760@gmail.com>
1090+
1091+import platform, os
1092+
1093+import Task
1094+import Utils
1095+from TaskGen import feature, extension, after
1096+
1097+Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
1098+Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
1099+Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
1100+
1101+def detect(conf):
1102+
1103+ def set_def(var, val):
1104+ if not conf.env[var]:
1105+ conf.env[var] = val
1106+
1107+ goarch = os.getenv("GOARCH")
1108+
1109+ if goarch == '386':
1110+ set_def('GO_PLATFORM', 'i386')
1111+ elif goarch == 'amd64':
1112+ set_def('GO_PLATFORM', 'x86_64')
1113+ elif goarch == 'arm':
1114+ set_def('GO_PLATFORM', 'arm')
1115+ else:
1116+ set_def('GO_PLATFORM', platform.machine())
1117+
1118+ if conf.env.GO_PLATFORM == 'x86_64':
1119+ set_def('GO_COMPILER', '6g')
1120+ set_def('GO_LINKER', '6l')
1121+ set_def('GO_EXTENSION', '.6')
1122+ elif conf.env.GO_PLATFORM in ['i386', 'i486', 'i586', 'i686']:
1123+ set_def('GO_COMPILER', '8g')
1124+ set_def('GO_LINKER', '8l')
1125+ set_def('GO_EXTENSION', '.8')
1126+ elif conf.env.GO_PLATFORM == 'arm':
1127+ set_def('GO_COMPILER', '5g')
1128+ set_def('GO_LINKER', '5l')
1129+ set_def('GO_EXTENSION', '.5')
1130+
1131+ if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
1132+ raise conf.fatal('Unsupported platform ' + platform.machine())
1133+
1134+ set_def('GO_PACK', 'gopack')
1135+ set_def('GO_PACK_EXTENSION', '.a')
1136+
1137+ conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
1138+ conf.find_program(conf.env.GO_LINKER, var='GOL', mandatory=True)
1139+ conf.find_program(conf.env.GO_PACK, var='GOP', mandatory=True)
1140+ conf.find_program('cgo', var='CGO', mandatory=True)
1141+
1142+@extension('.go')
1143+def compile_go(self, node):
1144+ try:
1145+ self.go_nodes.append(node)
1146+ except AttributeError:
1147+ self.go_nodes = [node]
1148+
1149+@feature('go')
1150+@after('apply_core')
1151+def apply_compile_go(self):
1152+ try:
1153+ nodes = self.go_nodes
1154+ except AttributeError:
1155+ self.go_compile_task = None
1156+ else:
1157+ self.go_compile_task = self.create_task('gocompile',
1158+ nodes,
1159+ [self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
1160+
1161+@feature('gopackage', 'goprogram')
1162+@after('apply_compile_go')
1163+def apply_goinc(self):
1164+ if not getattr(self, 'go_compile_task', None):
1165+ return
1166+
1167+ names = self.to_list(getattr(self, 'uselib_local', []))
1168+ for name in names:
1169+ obj = self.name_to_obj(name)
1170+ if not obj:
1171+ raise Utils.WafError('object %r was not found in uselib_local '
1172+ '(required by %r)' % (lib_name, self.name))
1173+ obj.post()
1174+ self.go_compile_task.set_run_after(obj.go_package_task)
1175+ self.go_compile_task.dep_nodes.extend(obj.go_package_task.outputs)
1176+ self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
1177+ self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
1178+
1179+@feature('gopackage')
1180+@after('apply_goinc')
1181+def apply_gopackage(self):
1182+ self.go_package_task = self.create_task('gopack',
1183+ self.go_compile_task.outputs[0],
1184+ self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
1185+ self.go_package_task.set_run_after(self.go_compile_task)
1186+ self.go_package_task.dep_nodes.extend(self.go_compile_task.outputs)
1187+
1188+@feature('goprogram')
1189+@after('apply_goinc')
1190+def apply_golink(self):
1191+ self.go_link_task = self.create_task('golink',
1192+ self.go_compile_task.outputs[0],
1193+ self.path.find_or_declare(self.target))
1194+ self.go_link_task.set_run_after(self.go_compile_task)
1195+ self.go_link_task.dep_nodes.extend(self.go_compile_task.outputs)
1196+
1197diff --git a/buildtools/wafadmin/3rdparty/lru_cache.py b/buildtools/wafadmin/3rdparty/lru_cache.py
1198new file mode 100644
1199index 0000000..5b00abc
1200--- /dev/null
1201+++ b/buildtools/wafadmin/3rdparty/lru_cache.py
1202@@ -0,0 +1,97 @@
1203+#! /usr/bin/env python
1204+# encoding: utf-8
1205+# Thomas Nagy 2011
1206+
1207+import os, shutil, re
1208+import Options, Build, Logs
1209+
1210+"""
1211+Apply a least recently used policy to the Waf cache.
1212+
1213+For performance reasons, it is called after the build is complete.
1214+
1215+We assume that the the folders are written atomically
1216+
1217+Do export WAFCACHE=/tmp/foo-xyz where xyz represents the cache size in megabytes
1218+If missing, the default cache size will be set to 10GB
1219+"""
1220+
1221+re_num = re.compile('[a-zA-Z_]+(\d+)')
1222+
1223+CACHESIZE = 10*1024*1024*1024 # in bytes
1224+CLEANRATIO = 0.8
1225+DIRSIZE = 4096
1226+
1227+def compile(self):
1228+ if Options.cache_global and not Options.options.nocache:
1229+ try:
1230+ os.makedirs(Options.cache_global)
1231+ except:
1232+ pass
1233+
1234+ try:
1235+ self.raw_compile()
1236+ finally:
1237+ if Options.cache_global and not Options.options.nocache:
1238+ self.sweep()
1239+
1240+def sweep(self):
1241+ global CACHESIZE
1242+ CACHEDIR = Options.cache_global
1243+
1244+ # get the cache max size from the WAFCACHE filename
1245+ re_num = re.compile('[a-zA-Z_]+(\d+)')
1246+ val = re_num.sub('\\1', os.path.basename(Options.cache_global))
1247+ try:
1248+ CACHESIZE = int(val)
1249+ except:
1250+ pass
1251+
1252+ # map folder names to timestamps
1253+ flist = {}
1254+ for x in os.listdir(CACHEDIR):
1255+ j = os.path.join(CACHEDIR, x)
1256+ if os.path.isdir(j) and len(x) == 32: # dir names are md5 hexdigests
1257+ flist[x] = [os.stat(j).st_mtime, 0]
1258+
1259+ for (x, v) in flist.items():
1260+ cnt = DIRSIZE # each entry takes 4kB
1261+ d = os.path.join(CACHEDIR, x)
1262+ for k in os.listdir(d):
1263+ cnt += os.stat(os.path.join(d, k)).st_size
1264+ flist[x][1] = cnt
1265+
1266+ total = sum([x[1] for x in flist.values()])
1267+ Logs.debug('lru: Cache size is %r' % total)
1268+
1269+ if total >= CACHESIZE:
1270+ Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
1271+
1272+ # make a list to sort the folders by timestamp
1273+ lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
1274+ lst.sort(key=lambda x: x[1]) # sort by timestamp
1275+ lst.reverse()
1276+
1277+ while total >= CACHESIZE * CLEANRATIO:
1278+ (k, t, s) = lst.pop()
1279+ p = os.path.join(CACHEDIR, k)
1280+ v = p + '.del'
1281+ try:
1282+ os.rename(p, v)
1283+ except:
1284+ # someone already did it
1285+ pass
1286+ else:
1287+ try:
1288+ shutil.rmtree(v)
1289+ except:
1290+ # this should not happen, but who knows?
1291+ Logs.warn('If you ever see this message, report it (%r)' % v)
1292+ total -= s
1293+ del flist[k]
1294+ Logs.debug('lru: Total at the end %r' % total)
1295+
1296+Build.BuildContext.raw_compile = Build.BuildContext.compile
1297+Build.BuildContext.compile = compile
1298+Build.BuildContext.sweep = sweep
1299+
1300diff --git a/buildtools/wafadmin/3rdparty/paranoid.py b/buildtools/wafadmin/3rdparty/paranoid.py
1301new file mode 100644
1302index 0000000..ead64ea
1303--- /dev/null
1304+++ b/buildtools/wafadmin/3rdparty/paranoid.py
1305@@ -0,0 +1,35 @@
1306+#!/usr/bin/env python
1307+# encoding: utf-8
1308+# ita 2010
1309+
1310+import Logs, Utils, Build, Task
1311+
1312+def say(txt):
1313+ Logs.warn("^o^: %s" % txt)
1314+
1315+try:
1316+ ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
1317+except Exception, e:
1318+ pass
1319+else:
1320+ def say(txt):
1321+ f = Utils.cmd_output([ret, txt])
1322+ Utils.pprint('PINK', f)
1323+
1324+say('you make the errors, we detect them')
1325+
1326+def check_task_classes(self):
1327+ for x in Task.TaskBase.classes:
1328+ if isinstance(x, Task.Task):
1329+ if not getattr(cls, 'ext_in', None) or getattr(cls, 'before', None):
1330+ say('class %s has no precedence constraints (ext_in/before)')
1331+ if not getattr(cls, 'ext_out', None) or getattr(cls, 'after', None):
1332+ say('class %s has no precedence constraints (ext_out/after)')
1333+
1334+comp = Build.BuildContext.compile
1335+def compile(self):
1336+ if not getattr(self, 'magic', None):
1337+ check_task_classes(self)
1338+ return comp(self)
1339+Build.BuildContext.compile = compile
1340+
1341diff --git a/buildtools/wafadmin/3rdparty/swig.py b/buildtools/wafadmin/3rdparty/swig.py
1342new file mode 100644
1343index 0000000..c0a4108
1344--- /dev/null
1345+++ b/buildtools/wafadmin/3rdparty/swig.py
1346@@ -0,0 +1,190 @@
1347+#! /usr/bin/env python
1348+# encoding: UTF-8
1349+# Petar Forai
1350+# Thomas Nagy 2008
1351+
1352+import re
1353+import Task, Utils, Logs
1354+from TaskGen import extension
1355+from Configure import conf
1356+import preproc
1357+
1358+"""
1359+Welcome in the hell of adding tasks dynamically
1360+
1361+swig interface files may be created at runtime, the module name may be unknown in advance
1362+
1363+rev 5859 is much more simple
1364+"""
1365+
1366+SWIG_EXTS = ['.swig', '.i']
1367+
1368+swig_str = '${SWIG} ${SWIGFLAGS} ${_CCINCFLAGS} ${_CXXINCFLAGS} ${_CCDEFFLAGS} ${_CXXDEFFLAGS} ${SRC}'
1369+cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
1370+
1371+def runnable_status(self):
1372+ for t in self.run_after:
1373+ if not t.hasrun:
1374+ return ASK_LATER
1375+
1376+ if not getattr(self, 'init_outputs', None):
1377+ self.init_outputs = True
1378+ if not getattr(self, 'module', None):
1379+ # search the module name
1380+ txt = self.inputs[0].read(self.env)
1381+ m = re_module.search(txt)
1382+ if not m:
1383+ raise ValueError("could not find the swig module name")
1384+ self.module = m.group(1)
1385+
1386+ swig_c(self)
1387+
1388+ # add the language-specific output files as nodes
1389+ # call funs in the dict swig_langs
1390+ for x in self.env['SWIGFLAGS']:
1391+ # obtain the language
1392+ x = x[1:]
1393+ try:
1394+ fun = swig_langs[x]
1395+ except KeyError:
1396+ pass
1397+ else:
1398+ fun(self)
1399+
1400+ return Task.Task.runnable_status(self)
1401+setattr(cls, 'runnable_status', runnable_status)
1402+
1403+re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
1404+
1405+re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
1406+re_2 = re.compile('%include "(.*)"', re.M)
1407+re_3 = re.compile('#include "(.*)"', re.M)
1408+
1409+def scan(self):
1410+ "scan for swig dependencies, climb the .i files"
1411+ env = self.env
1412+
1413+ lst_src = []
1414+
1415+ seen = []
1416+ to_see = [self.inputs[0]]
1417+
1418+ while to_see:
1419+ node = to_see.pop(0)
1420+ if node.id in seen:
1421+ continue
1422+ seen.append(node.id)
1423+ lst_src.append(node)
1424+
1425+ # read the file
1426+ code = node.read(env)
1427+ code = preproc.re_nl.sub('', code)
1428+ code = preproc.re_cpp.sub(preproc.repl, code)
1429+
1430+ # find .i files and project headers
1431+ names = re_2.findall(code) + re_3.findall(code)
1432+ for n in names:
1433+ for d in self.generator.env.INC_PATHS + [node.parent]:
1434+ u = d.find_resource(n)
1435+ if u:
1436+ to_see.append(u)
1437+ break
1438+ else:
1439+ Logs.warn('could not find %r' % n)
1440+
1441+ # list of nodes this one depends on, and module name if present
1442+ if Logs.verbose:
1443+ Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
1444+ return (lst_src, [])
1445+cls.scan = scan
1446+
1447+# provide additional language processing
1448+swig_langs = {}
1449+def swig(fun):
1450+ swig_langs[fun.__name__.replace('swig_', '')] = fun
1451+
1452+def swig_c(self):
1453+ ext = '.swigwrap_%d.c' % self.generator.idx
1454+ flags = self.env['SWIGFLAGS']
1455+ if '-c++' in flags:
1456+ ext += 'xx'
1457+ out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
1458+
1459+ try:
1460+ if '-c++' in flags:
1461+ fun = self.generator.cxx_hook
1462+ else:
1463+ fun = self.generator.c_hook
1464+ except AttributeError:
1465+ raise Utils.WafError('No c%s compiler was found to process swig files' % ('-c++' in flags and '++' or ''))
1466+
1467+ task = fun(out_node)
1468+ task.set_run_after(self)
1469+
1470+ ge = self.generator.bld.generator
1471+ ge.outstanding.insert(0, task)
1472+ ge.total += 1
1473+
1474+ try:
1475+ ltask = self.generator.link_task
1476+ except AttributeError:
1477+ pass
1478+ else:
1479+ ltask.inputs.append(task.outputs[0])
1480+
1481+ self.outputs.append(out_node)
1482+
1483+ if not '-o' in self.env['SWIGFLAGS']:
1484+ self.env.append_value('SWIGFLAGS', '-o')
1485+ self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
1486+
1487+@swig
1488+def swig_python(tsk):
1489+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
1490+
1491+@swig
1492+def swig_ocaml(tsk):
1493+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
1494+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
1495+
1496+@extension(SWIG_EXTS)
1497+def i_file(self, node):
1498+ # the task instance
1499+ tsk = self.create_task('swig')
1500+ tsk.set_inputs(node)
1501+ tsk.module = getattr(self, 'swig_module', None)
1502+
1503+ flags = self.to_list(getattr(self, 'swig_flags', []))
1504+ self.env.append_value('SWIGFLAGS', flags)
1505+
1506+ if not '-outdir' in flags:
1507+ flags.append('-outdir')
1508+ flags.append(node.parent.abspath(self.env))
1509+
1510+@conf
1511+def check_swig_version(conf, minver=None):
1512+ """Check for a minimum swig version like conf.check_swig_version('1.3.28')
1513+ or conf.check_swig_version((1,3,28)) """
1514+ reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
1515+
1516+ swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
1517+
1518+ swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
1519+ if isinstance(minver, basestring):
1520+ minver = [int(s) for s in minver.split(".")]
1521+ if isinstance(minver, tuple):
1522+ minver = [int(s) for s in minver]
1523+ result = (minver is None) or (minver[:3] <= swigver[:3])
1524+ swigver_full = '.'.join(map(str, swigver))
1525+ if result:
1526+ conf.env['SWIG_VERSION'] = swigver_full
1527+ minver_str = '.'.join(map(str, minver))
1528+ if minver is None:
1529+ conf.check_message_custom('swig version', '', swigver_full)
1530+ else:
1531+ conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
1532+ return result
1533+
1534+def detect(conf):
1535+ swig = conf.find_program('swig', var='SWIG', mandatory=True)
1536+
1537diff --git a/buildtools/wafadmin/3rdparty/valadoc.py b/buildtools/wafadmin/3rdparty/valadoc.py
1538new file mode 100644
1539index 0000000..d0a9fe8
1540--- /dev/null
1541+++ b/buildtools/wafadmin/3rdparty/valadoc.py
1542@@ -0,0 +1,113 @@
1543+#! /usr/bin/env python
1544+# encoding: UTF-8
1545+# Nicolas Joseph 2009
1546+
1547+from fnmatch import fnmatchcase
1548+import os, os.path, re, stat
1549+import Task, Utils, Node, Constants
1550+from TaskGen import feature, extension, after
1551+from Logs import debug, warn, error
1552+
1553+VALADOC_STR = '${VALADOC}'
1554+
1555+class valadoc_task(Task.Task):
1556+
1557+ vars = ['VALADOC', 'VALADOCFLAGS']
1558+ color = 'BLUE'
1559+ after = 'cxx_link cc_link'
1560+ quiet = True
1561+
1562+ output_dir = ''
1563+ doclet = ''
1564+ package_name = ''
1565+ package_version = ''
1566+ files = []
1567+ protected = True
1568+ private = False
1569+ inherit = False
1570+ deps = False
1571+ enable_non_null_experimental = False
1572+ force = False
1573+
1574+ def runnable_status(self):
1575+ return True
1576+
1577+ def run(self):
1578+ if self.env['VALADOC']:
1579+ if not self.env['VALADOCFLAGS']:
1580+ self.env['VALADOCFLAGS'] = ''
1581+ cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
1582+ cmd.append ('-o %s' % self.output_dir)
1583+ if getattr(self, 'doclet', None):
1584+ cmd.append ('--doclet %s' % self.doclet)
1585+ cmd.append ('--package-name %s' % self.package_name)
1586+ if getattr(self, 'version', None):
1587+ cmd.append ('--package-version %s' % self.package_version)
1588+ if getattr(self, 'packages', None):
1589+ for package in self.packages:
1590+ cmd.append ('--pkg %s' % package)
1591+ if getattr(self, 'vapi_dirs', None):
1592+ for vapi_dir in self.vapi_dirs:
1593+ cmd.append ('--vapidir %s' % vapi_dir)
1594+ if not getattr(self, 'protected', None):
1595+ cmd.append ('--no-protected')
1596+ if getattr(self, 'private', None):
1597+ cmd.append ('--private')
1598+ if getattr(self, 'inherit', None):
1599+ cmd.append ('--inherit')
1600+ if getattr(self, 'deps', None):
1601+ cmd.append ('--deps')
1602+ if getattr(self, 'enable_non_null_experimental', None):
1603+ cmd.append ('--enable-non-null-experimental')
1604+ if getattr(self, 'force', None):
1605+ cmd.append ('--force')
1606+ cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
1607+ return self.generator.bld.exec_command(' '.join(cmd))
1608+ else:
1609+ error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
1610+ return -1
1611+
1612+@feature('valadoc')
1613+def process_valadoc(self):
1614+ task = getattr(self, 'task', None)
1615+ if not task:
1616+ task = self.create_task('valadoc')
1617+ self.task = task
1618+ if getattr(self, 'output_dir', None):
1619+ task.output_dir = self.output_dir
1620+ else:
1621+ Utils.WafError('no output directory')
1622+ if getattr(self, 'doclet', None):
1623+ task.doclet = self.doclet
1624+ else:
1625+ Utils.WafError('no doclet directory')
1626+ if getattr(self, 'package_name', None):
1627+ task.package_name = self.package_name
1628+ else:
1629+ Utils.WafError('no package name')
1630+ if getattr(self, 'package_version', None):
1631+ task.package_version = self.package_version
1632+ if getattr(self, 'packages', None):
1633+ task.packages = Utils.to_list(self.packages)
1634+ if getattr(self, 'vapi_dirs', None):
1635+ task.vapi_dirs = Utils.to_list(self.vapi_dirs)
1636+ if getattr(self, 'files', None):
1637+ task.files = self.files
1638+ else:
1639+ Utils.WafError('no input file')
1640+ if getattr(self, 'protected', None):
1641+ task.protected = self.protected
1642+ if getattr(self, 'private', None):
1643+ task.private = self.private
1644+ if getattr(self, 'inherit', None):
1645+ task.inherit = self.inherit
1646+ if getattr(self, 'deps', None):
1647+ task.deps = self.deps
1648+ if getattr(self, 'enable_non_null_experimental', None):
1649+ task.enable_non_null_experimental = self.enable_non_null_experimental
1650+ if getattr(self, 'force', None):
1651+ task.force = self.force
1652+
1653+def detect(conf):
1654+ conf.find_program('valadoc', var='VALADOC', mandatory=False)
1655+
1656diff --git a/buildtools/wafadmin/Build.py b/buildtools/wafadmin/Build.py
1657new file mode 100644
1658index 0000000..8e7c72c
1659--- /dev/null
1660+++ b/buildtools/wafadmin/Build.py
1661@@ -0,0 +1,1033 @@
1662+#!/usr/bin/env python
1663+# encoding: utf-8
1664+# Thomas Nagy, 2005 (ita)
1665+
1666+"""
1667+Dependency tree holder
1668+
1669+The class Build holds all the info related to a build:
1670+* file system representation (tree of Node instances)
1671+* various cached objects (task signatures, file scan results, ..)
1672+
1673+There is only one Build object at a time (bld singleton)
1674+"""
1675+
1676+import os, sys, errno, re, glob, gc, datetime, shutil
1677+try: import cPickle
1678+except: import pickle as cPickle
1679+import Runner, TaskGen, Node, Scripting, Utils, Environment, Task, Logs, Options
1680+from Logs import debug, error, info
1681+from Constants import *
1682+
1683+SAVED_ATTRS = 'root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
1684+"Build class members to save"
1685+
1686+bld = None
1687+"singleton - safe to use when Waf is not used as a library"
1688+
1689+class BuildError(Utils.WafError):
1690+ def __init__(self, b=None, t=[]):
1691+ self.bld = b
1692+ self.tasks = t
1693+ self.ret = 1
1694+ Utils.WafError.__init__(self, self.format_error())
1695+
1696+ def format_error(self):
1697+ lst = ['Build failed:']
1698+ for tsk in self.tasks:
1699+ txt = tsk.format_error()
1700+ if txt: lst.append(txt)
1701+ sep = ' '
1702+ if len(lst) > 2:
1703+ sep = '\n'
1704+ return sep.join(lst)
1705+
1706+def group_method(fun):
1707+ """
1708+ sets a build context method to execute after the current group has finished executing
1709+ this is useful for installing build files:
1710+ * calling install_files/install_as will fail if called too early
1711+ * people do not want to define install method in their task classes
1712+
1713+ TODO: try it
1714+ """
1715+ def f(*k, **kw):
1716+ if not k[0].is_install:
1717+ return False
1718+
1719+ postpone = True
1720+ if 'postpone' in kw:
1721+ postpone = kw['postpone']
1722+ del kw['postpone']
1723+
1724+ # TODO waf 1.6 in theory there should be no reference to the TaskManager internals here
1725+ if postpone:
1726+ m = k[0].task_manager
1727+ if not m.groups: m.add_group()
1728+ m.groups[m.current_group].post_funs.append((fun, k, kw))
1729+ if not 'cwd' in kw:
1730+ kw['cwd'] = k[0].path
1731+ else:
1732+ fun(*k, **kw)
1733+ return f
1734+
1735+class BuildContext(Utils.Context):
1736+ "holds the dependency tree"
1737+ def __init__(self):
1738+
1739+ # not a singleton, but provided for compatibility
1740+ global bld
1741+ bld = self
1742+
1743+ self.task_manager = Task.TaskManager()
1744+
1745+ # instead of hashing the nodes, we assign them a unique id when they are created
1746+ self.id_nodes = 0
1747+ self.idx = {}
1748+
1749+ # map names to environments, the 'default' must be defined
1750+ self.all_envs = {}
1751+
1752+ # ======================================= #
1753+ # code for reading the scripts
1754+
1755+ # project build directory - do not reset() from load_dirs()
1756+ self.bdir = ''
1757+
1758+ # the current directory from which the code is run
1759+ # the folder changes everytime a wscript is read
1760+ self.path = None
1761+
1762+ # Manual dependencies.
1763+ self.deps_man = Utils.DefaultDict(list)
1764+
1765+ # ======================================= #
1766+ # cache variables
1767+
1768+ # local cache for absolute paths - cache_node_abspath[variant][node]
1769+ self.cache_node_abspath = {}
1770+
1771+ # list of folders that are already scanned
1772+ # so that we do not need to stat them one more time
1773+ self.cache_scanned_folders = {}
1774+
1775+ # list of targets to uninstall for removing the empty folders after uninstalling
1776+ self.uninstall = []
1777+
1778+ # ======================================= #
1779+ # tasks and objects
1780+
1781+ # build dir variants (release, debug, ..)
1782+ for v in 'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
1783+ var = {}
1784+ setattr(self, v, var)
1785+
1786+ self.cache_dir_contents = {}
1787+
1788+ self.all_task_gen = []
1789+ self.task_gen_cache_names = {}
1790+ self.cache_sig_vars = {}
1791+ self.log = None
1792+
1793+ self.root = None
1794+ self.srcnode = None
1795+ self.bldnode = None
1796+
1797+ # bind the build context to the nodes in use
1798+ # this means better encapsulation and no build context singleton
1799+ class node_class(Node.Node):
1800+ pass
1801+ self.node_class = node_class
1802+ self.node_class.__module__ = "Node"
1803+ self.node_class.__name__ = "Nodu"
1804+ self.node_class.bld = self
1805+
1806+ self.is_install = None
1807+
1808+ def __copy__(self):
1809+ "nodes are not supposed to be copied"
1810+ raise Utils.WafError('build contexts are not supposed to be cloned')
1811+
1812+ def load(self):
1813+ "load the cache from the disk"
1814+ try:
1815+ env = Environment.Environment(os.path.join(self.cachedir, 'build.config.py'))
1816+ except (IOError, OSError):
1817+ pass
1818+ else:
1819+ if env['version'] < HEXVERSION:
1820+ raise Utils.WafError('Version mismatch! reconfigure the project')
1821+ for t in env['tools']:
1822+ self.setup(**t)
1823+
1824+ try:
1825+ gc.disable()
1826+ f = data = None
1827+
1828+ Node.Nodu = self.node_class
1829+
1830+ try:
1831+ f = open(os.path.join(self.bdir, DBFILE), 'rb')
1832+ except (IOError, EOFError):
1833+ # handle missing file/empty file
1834+ pass
1835+
1836+ try:
1837+ if f: data = cPickle.load(f)
1838+ except AttributeError:
1839+ # handle file of an old Waf version
1840+ # that has an attribute which no longer exist
1841+ # (e.g. AttributeError: 'module' object has no attribute 'BuildDTO')
1842+ if Logs.verbose > 1: raise
1843+
1844+ if data:
1845+ for x in SAVED_ATTRS: setattr(self, x, data[x])
1846+ else:
1847+ debug('build: Build cache loading failed')
1848+
1849+ finally:
1850+ if f: f.close()
1851+ gc.enable()
1852+
1853+ def save(self):
1854+ "store the cache on disk, see self.load"
1855+ gc.disable()
1856+ self.root.__class__.bld = None
1857+
1858+ # some people are very nervous with ctrl+c so we have to make a temporary file
1859+ Node.Nodu = self.node_class
1860+ db = os.path.join(self.bdir, DBFILE)
1861+ file = open(db + '.tmp', 'wb')
1862+ data = {}
1863+ for x in SAVED_ATTRS: data[x] = getattr(self, x)
1864+ cPickle.dump(data, file, -1)
1865+ file.close()
1866+
1867+ # do not use shutil.move
1868+ try: os.unlink(db)
1869+ except OSError: pass
1870+ os.rename(db + '.tmp', db)
1871+ self.root.__class__.bld = self
1872+ gc.enable()
1873+
1874+ # ======================================= #
1875+
1876+ def clean(self):
1877+ debug('build: clean called')
1878+
1879+ # does not clean files created during the configuration
1880+ precious = set([])
1881+ for env in self.all_envs.values():
1882+ for x in env[CFG_FILES]:
1883+ node = self.srcnode.find_resource(x)
1884+ if node:
1885+ precious.add(node.id)
1886+
1887+ def clean_rec(node):
1888+ for x in list(node.childs.keys()):
1889+ nd = node.childs[x]
1890+
1891+ tp = nd.id & 3
1892+ if tp == Node.DIR:
1893+ clean_rec(nd)
1894+ elif tp == Node.BUILD:
1895+ if nd.id in precious: continue
1896+ for env in self.all_envs.values():
1897+ try: os.remove(nd.abspath(env))
1898+ except OSError: pass
1899+ node.childs.__delitem__(x)
1900+
1901+ clean_rec(self.srcnode)
1902+
1903+ for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
1904+ setattr(self, v, {})
1905+
1906+ def compile(self):
1907+ """The cache file is not written if nothing was build at all (build is up to date)"""
1908+ debug('build: compile called')
1909+
1910+ """
1911+ import cProfile, pstats
1912+ cProfile.run("import Build\nBuild.bld.flush()", 'profi.txt')
1913+ p = pstats.Stats('profi.txt')
1914+ p.sort_stats('cumulative').print_stats(80)
1915+ """
1916+ self.flush()
1917+ #"""
1918+
1919+ self.generator = Runner.Parallel(self, Options.options.jobs)
1920+
1921+ def dw(on=True):
1922+ if Options.options.progress_bar:
1923+ if on: sys.stderr.write(Logs.colors.cursor_on)
1924+ else: sys.stderr.write(Logs.colors.cursor_off)
1925+
1926+ debug('build: executor starting')
1927+
1928+ back = os.getcwd()
1929+ os.chdir(self.bldnode.abspath())
1930+
1931+ try:
1932+ try:
1933+ dw(on=False)
1934+ self.generator.start()
1935+ except KeyboardInterrupt:
1936+ dw()
1937+ # if self.generator.processed != 1: TODO
1938+ self.save()
1939+ raise
1940+ except Exception:
1941+ dw()
1942+ # do not store anything, for something bad happened
1943+ raise
1944+ else:
1945+ dw()
1946+ #if self.generator.processed != 1: TODO
1947+ self.save()
1948+
1949+ if self.generator.error:
1950+ raise BuildError(self, self.task_manager.tasks_done)
1951+
1952+ finally:
1953+ os.chdir(back)
1954+
1955+ def install(self):
1956+ "this function is called for both install and uninstall"
1957+ debug('build: install called')
1958+
1959+ self.flush()
1960+
1961+ # remove empty folders after uninstalling
1962+ if self.is_install < 0:
1963+ lst = []
1964+ for x in self.uninstall:
1965+ dir = os.path.dirname(x)
1966+ if not dir in lst: lst.append(dir)
1967+ lst.sort()
1968+ lst.reverse()
1969+
1970+ nlst = []
1971+ for y in lst:
1972+ x = y
1973+ while len(x) > 4:
1974+ if not x in nlst: nlst.append(x)
1975+ x = os.path.dirname(x)
1976+
1977+ nlst.sort()
1978+ nlst.reverse()
1979+ for x in nlst:
1980+ try: os.rmdir(x)
1981+ except OSError: pass
1982+
1983+ def new_task_gen(self, *k, **kw):
1984+ if self.task_gen_cache_names:
1985+ self.task_gen_cache_names = {}
1986+
1987+ kw['bld'] = self
1988+ if len(k) == 0:
1989+ ret = TaskGen.task_gen(*k, **kw)
1990+ else:
1991+ cls_name = k[0]
1992+
1993+ try: cls = TaskGen.task_gen.classes[cls_name]
1994+ except KeyError: raise Utils.WscriptError('%s is not a valid task generator -> %s' %
1995+ (cls_name, [x for x in TaskGen.task_gen.classes]))
1996+ ret = cls(*k, **kw)
1997+ return ret
1998+
1999+ def __call__(self, *k, **kw):
2000+ if self.task_gen_cache_names:
2001+ self.task_gen_cache_names = {}
2002+
2003+ kw['bld'] = self
2004+ return TaskGen.task_gen(*k, **kw)
2005+
2006+ def load_envs(self):
2007+ try:
2008+ lst = Utils.listdir(self.cachedir)
2009+ except OSError, e:
2010+ if e.errno == errno.ENOENT:
2011+ raise Utils.WafError('The project was not configured: run "waf configure" first!')
2012+ else:
2013+ raise
2014+
2015+ if not lst:
2016+ raise Utils.WafError('The cache directory is empty: reconfigure the project')
2017+
2018+ for file in lst:
2019+ if file.endswith(CACHE_SUFFIX):
2020+ env = Environment.Environment(os.path.join(self.cachedir, file))
2021+ name = file[:-len(CACHE_SUFFIX)]
2022+
2023+ self.all_envs[name] = env
2024+
2025+ self.init_variants()
2026+
2027+ for env in self.all_envs.values():
2028+ for f in env[CFG_FILES]:
2029+ newnode = self.path.find_or_declare(f)
2030+ try:
2031+ hash = Utils.h_file(newnode.abspath(env))
2032+ except (IOError, AttributeError):
2033+ error("cannot find "+f)
2034+ hash = SIG_NIL
2035+ self.node_sigs[env.variant()][newnode.id] = hash
2036+
2037+ # TODO: hmmm, these nodes are removed from the tree when calling rescan()
2038+ self.bldnode = self.root.find_dir(self.bldnode.abspath())
2039+ self.path = self.srcnode = self.root.find_dir(self.srcnode.abspath())
2040+ self.cwd = self.bldnode.abspath()
2041+
2042+ def setup(self, tool, tooldir=None, funs=None):
2043+ "setup tools for build process"
2044+ if isinstance(tool, list):
2045+ for i in tool: self.setup(i, tooldir)
2046+ return
2047+
2048+ if not tooldir: tooldir = Options.tooldir
2049+
2050+ module = Utils.load_tool(tool, tooldir)
2051+ if hasattr(module, "setup"): module.setup(self)
2052+
2053+ def init_variants(self):
2054+ debug('build: init variants')
2055+
2056+ lstvariants = []
2057+ for env in self.all_envs.values():
2058+ if not env.variant() in lstvariants:
2059+ lstvariants.append(env.variant())
2060+ self.lst_variants = lstvariants
2061+
2062+ debug('build: list of variants is %r', lstvariants)
2063+
2064+ for name in lstvariants+[0]:
2065+ for v in 'node_sigs cache_node_abspath'.split():
2066+ var = getattr(self, v)
2067+ if not name in var:
2068+ var[name] = {}
2069+
2070+ # ======================================= #
2071+ # node and folder handling
2072+
2073+ # this should be the main entry point
2074+ def load_dirs(self, srcdir, blddir, load_cache=1):
2075+ "this functions should be the start of everything"
2076+
2077+ assert(os.path.isabs(srcdir))
2078+ assert(os.path.isabs(blddir))
2079+
2080+ self.cachedir = os.path.join(blddir, CACHE_DIR)
2081+
2082+ if srcdir == blddir:
2083+ raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))
2084+
2085+ self.bdir = blddir
2086+
2087+ # try to load the cache file, if it does not exist, nothing happens
2088+ self.load()
2089+
2090+ if not self.root:
2091+ Node.Nodu = self.node_class
2092+ self.root = Node.Nodu('', None, Node.DIR)
2093+
2094+ if not self.srcnode:
2095+ self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
2096+ debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)
2097+
2098+ self.path = self.srcnode
2099+
2100+ # create this build dir if necessary
2101+ try: os.makedirs(blddir)
2102+ except OSError: pass
2103+
2104+ if not self.bldnode:
2105+ self.bldnode = self.root.ensure_dir_node_from_path(blddir)
2106+
2107+ self.init_variants()
2108+
2109+ def rescan(self, src_dir_node):
2110+ """
2111+ look the contents of a (folder)node and update its list of childs
2112+
2113+ The intent is to perform the following steps
2114+ * remove the nodes for the files that have disappeared
2115+ * remove the signatures for the build files that have disappeared
2116+ * cache the results of os.listdir
2117+ * create the build folder equivalent (mkdir) for each variant
2118+ src/bar -> build/default/src/bar, build/release/src/bar
2119+
2120+ when a folder in the source directory is removed, we do not check recursively
2121+ to remove the unused nodes. To do that, call 'waf clean' and build again.
2122+ """
2123+
2124+ # do not rescan over and over again
2125+ # TODO use a single variable in waf 1.6
2126+ if self.cache_scanned_folders.get(src_dir_node.id, None): return
2127+ self.cache_scanned_folders[src_dir_node.id] = True
2128+
2129+ # TODO remove in waf 1.6
2130+ if hasattr(self, 'repository'): self.repository(src_dir_node)
2131+
2132+ if not src_dir_node.name and sys.platform == 'win32':
2133+ # the root has no name, contains drive letters, and cannot be listed
2134+ return
2135+
2136+
2137+ # first, take the case of the source directory
2138+ parent_path = src_dir_node.abspath()
2139+ try:
2140+ lst = set(Utils.listdir(parent_path))
2141+ except OSError:
2142+ lst = set([])
2143+
2144+ # TODO move this at the bottom
2145+ self.cache_dir_contents[src_dir_node.id] = lst
2146+
2147+ # hash the existing source files, remove the others
2148+ cache = self.node_sigs[0]
2149+ for x in src_dir_node.childs.values():
2150+ if x.id & 3 != Node.FILE: continue
2151+ if x.name in lst:
2152+ try:
2153+ cache[x.id] = Utils.h_file(x.abspath())
2154+ except IOError:
2155+ raise Utils.WafError('The file %s is not readable or has become a dir' % x.abspath())
2156+ else:
2157+ try: del cache[x.id]
2158+ except KeyError: pass
2159+
2160+ del src_dir_node.childs[x.name]
2161+
2162+
2163+ # first obtain the differences between srcnode and src_dir_node
2164+ h1 = self.srcnode.height()
2165+ h2 = src_dir_node.height()
2166+
2167+ lst = []
2168+ child = src_dir_node
2169+ while h2 > h1:
2170+ lst.append(child.name)
2171+ child = child.parent
2172+ h2 -= 1
2173+ lst.reverse()
2174+
2175+ # list the files in the build dirs
2176+ try:
2177+ for variant in self.lst_variants:
2178+ sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
2179+ self.listdir_bld(src_dir_node, sub_path, variant)
2180+ except OSError:
2181+
2182+ # listdir failed, remove the build node signatures for all variants
2183+ for node in src_dir_node.childs.values():
2184+ if node.id & 3 != Node.BUILD:
2185+ continue
2186+
2187+ for dct in self.node_sigs.values():
2188+ if node.id in dct:
2189+ dct.__delitem__(node.id)
2190+
2191+ # the policy is to avoid removing nodes representing directories
2192+ src_dir_node.childs.__delitem__(node.name)
2193+
2194+ for variant in self.lst_variants:
2195+ sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
2196+ try:
2197+ os.makedirs(sub_path)
2198+ except OSError:
2199+ pass
2200+
2201+ # ======================================= #
2202+ def listdir_src(self, parent_node):
2203+ """do not use, kept for compatibility"""
2204+ pass
2205+
2206+ def remove_node(self, node):
2207+ """do not use, kept for compatibility"""
2208+ pass
2209+
2210+ def listdir_bld(self, parent_node, path, variant):
2211+ """in this method we do not add timestamps but we remove them
2212+ when the files no longer exist (file removed in the build dir)"""
2213+
2214+ i_existing_nodes = [x for x in parent_node.childs.values() if x.id & 3 == Node.BUILD]
2215+
2216+ lst = set(Utils.listdir(path))
2217+ node_names = set([x.name for x in i_existing_nodes])
2218+ remove_names = node_names - lst
2219+
2220+ # remove the stamps of the build nodes that no longer exist on the filesystem
2221+ ids_to_remove = [x.id for x in i_existing_nodes if x.name in remove_names]
2222+ cache = self.node_sigs[variant]
2223+ for nid in ids_to_remove:
2224+ if nid in cache:
2225+ cache.__delitem__(nid)
2226+
2227+ def get_env(self):
2228+ return self.env_of_name('default')
2229+ def set_env(self, name, val):
2230+ self.all_envs[name] = val
2231+
2232+ env = property(get_env, set_env)
2233+
2234+ def add_manual_dependency(self, path, value):
2235+ if isinstance(path, Node.Node):
2236+ node = path
2237+ elif os.path.isabs(path):
2238+ node = self.root.find_resource(path)
2239+ else:
2240+ node = self.path.find_resource(path)
2241+ self.deps_man[node.id].append(value)
2242+
2243+ def launch_node(self):
2244+ """return the launch directory as a node"""
2245+ # p_ln is kind of private, but public in case if
2246+ try:
2247+ return self.p_ln
2248+ except AttributeError:
2249+ self.p_ln = self.root.find_dir(Options.launch_dir)
2250+ return self.p_ln
2251+
2252+ def glob(self, pattern, relative=True):
2253+ "files matching the pattern, seen from the current folder"
2254+ path = self.path.abspath()
2255+ files = [self.root.find_resource(x) for x in glob.glob(path+os.sep+pattern)]
2256+ if relative:
2257+ files = [x.path_to_parent(self.path) for x in files if x]
2258+ else:
2259+ files = [x.abspath() for x in files if x]
2260+ return files
2261+
2262+ ## the following methods are candidates for the stable apis ##
2263+
2264+ def add_group(self, *k):
2265+ self.task_manager.add_group(*k)
2266+
2267+ def set_group(self, *k, **kw):
2268+ self.task_manager.set_group(*k, **kw)
2269+
2270+ def hash_env_vars(self, env, vars_lst):
2271+ """hash environment variables
2272+ ['CXX', ..] -> [env['CXX'], ..] -> md5()"""
2273+
2274+ # ccroot objects use the same environment for building the .o at once
2275+ # the same environment and the same variables are used
2276+
2277+ idx = str(id(env)) + str(vars_lst)
2278+ try: return self.cache_sig_vars[idx]
2279+ except KeyError: pass
2280+
2281+ lst = [str(env[a]) for a in vars_lst]
2282+ ret = Utils.h_list(lst)
2283+ debug('envhash: %r %r', ret, lst)
2284+
2285+ # next time
2286+ self.cache_sig_vars[idx] = ret
2287+ return ret
2288+
2289+ def name_to_obj(self, name, env):
2290+ """retrieve a task generator from its name or its target name
2291+ remember that names must be unique"""
2292+ cache = self.task_gen_cache_names
2293+ if not cache:
2294+ # create the index lazily
2295+ for x in self.all_task_gen:
2296+ vt = x.env.variant() + '_'
2297+ if x.name:
2298+ cache[vt + x.name] = x
2299+ else:
2300+ if isinstance(x.target, str):
2301+ target = x.target
2302+ else:
2303+ target = ' '.join(x.target)
2304+ v = vt + target
2305+ if not cache.get(v, None):
2306+ cache[v] = x
2307+ return cache.get(env.variant() + '_' + name, None)
2308+
2309+ def flush(self, all=1):
2310+ """tell the task generators to create the tasks"""
2311+
2312+ self.ini = datetime.datetime.now()
2313+ # force the initialization of the mapping name->object in flush
2314+ # name_to_obj can be used in userland scripts, in that case beware of incomplete mapping
2315+ self.task_gen_cache_names = {}
2316+ self.name_to_obj('', self.env)
2317+
2318+ debug('build: delayed operation TaskGen.flush() called')
2319+
2320+ if Options.options.compile_targets:
2321+ debug('task_gen: posting objects %r listed in compile_targets', Options.options.compile_targets)
2322+
2323+ mana = self.task_manager
2324+ to_post = []
2325+ min_grp = 0
2326+
2327+ # ensure the target names exist, fail before any post()
2328+ target_objects = Utils.DefaultDict(list)
2329+ for target_name in Options.options.compile_targets.split(','):
2330+ # trim target_name (handle cases when the user added spaces to targets)
2331+ target_name = target_name.strip()
2332+ for env in self.all_envs.values():
2333+ tg = self.name_to_obj(target_name, env)
2334+ if tg:
2335+ target_objects[target_name].append(tg)
2336+
2337+ m = mana.group_idx(tg)
2338+ if m > min_grp:
2339+ min_grp = m
2340+ to_post = [tg]
2341+ elif m == min_grp:
2342+ to_post.append(tg)
2343+
2344+ if not target_name in target_objects and all:
2345+ raise Utils.WafError("target '%s' does not exist" % target_name)
2346+
2347+ debug('group: Forcing up to group %s for target %s', mana.group_name(min_grp), Options.options.compile_targets)
2348+
2349+ # post all the task generators in previous groups
2350+ for i in xrange(len(mana.groups)):
2351+ mana.current_group = i
2352+ if i == min_grp:
2353+ break
2354+ g = mana.groups[i]
2355+ debug('group: Forcing group %s', mana.group_name(g))
2356+ for t in g.tasks_gen:
2357+ debug('group: Posting %s', t.name or t.target)
2358+ t.post()
2359+
2360+ # then post the task generators listed in compile_targets in the last group
2361+ for t in to_post:
2362+ t.post()
2363+
2364+ else:
2365+ debug('task_gen: posting objects (normal)')
2366+ ln = self.launch_node()
2367+ # if the build is started from the build directory, do as if it was started from the top-level
2368+ # for the pretty-printing (Node.py), the two lines below cannot be moved to Build::launch_node
2369+ if ln.is_child_of(self.bldnode) or not ln.is_child_of(self.srcnode):
2370+ ln = self.srcnode
2371+
2372+ # if the project file is located under the source directory, build all targets by default
2373+ # else 'waf configure build' does nothing
2374+ proj_node = self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
2375+ if proj_node.id != self.srcnode.id:
2376+ ln = self.srcnode
2377+
2378+ for i in xrange(len(self.task_manager.groups)):
2379+ g = self.task_manager.groups[i]
2380+ self.task_manager.current_group = i
2381+ if Logs.verbose:
2382+ groups = [x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x]) == id(g)]
2383+ name = groups and groups[0] or 'unnamed'
2384+ Logs.debug('group: group', name)
2385+ for tg in g.tasks_gen:
2386+ if not tg.path.is_child_of(ln):
2387+ continue
2388+ if Logs.verbose:
2389+ Logs.debug('group: %s' % tg)
2390+ tg.post()
2391+
2392+ def env_of_name(self, name):
2393+ try:
2394+ return self.all_envs[name]
2395+ except KeyError:
2396+ error('no such environment: '+name)
2397+ return None
2398+
2399+ def progress_line(self, state, total, col1, col2):
2400+ n = len(str(total))
2401+
2402+ Utils.rot_idx += 1
2403+ ind = Utils.rot_chr[Utils.rot_idx % 4]
2404+
2405+ ini = self.ini
2406+
2407+ pc = (100.*state)/total
2408+ eta = Utils.get_elapsed_time(ini)
2409+ fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind)
2410+ left = fs % (state, total, col1, pc, col2)
2411+ right = '][%s%s%s]' % (col1, eta, col2)
2412+
2413+ cols = Utils.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
2414+ if cols < 7: cols = 7
2415+
2416+ ratio = int((cols*state)/total) - 1
2417+
2418+ bar = ('='*ratio+'>').ljust(cols)
2419+ msg = Utils.indicator % (left, bar, right)
2420+
2421+ return msg
2422+
2423+
2424+ # do_install is not used anywhere
2425+ def do_install(self, src, tgt, chmod=O644):
2426+ """returns true if the file was effectively installed or uninstalled, false otherwise"""
2427+ if self.is_install > 0:
2428+ if not Options.options.force:
2429+ # check if the file is already there to avoid a copy
2430+ try:
2431+ st1 = os.stat(tgt)
2432+ st2 = os.stat(src)
2433+ except OSError:
2434+ pass
2435+ else:
2436+ # same size and identical timestamps -> make no copy
2437+ if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
2438+ return False
2439+
2440+ srclbl = src.replace(self.srcnode.abspath(None)+os.sep, '')
2441+ info("* installing %s as %s" % (srclbl, tgt))
2442+
2443+ # following is for shared libs and stale inodes (-_-)
2444+ try: os.remove(tgt)
2445+ except OSError: pass
2446+
2447+ try:
2448+ shutil.copy2(src, tgt)
2449+ os.chmod(tgt, chmod)
2450+ except IOError:
2451+ try:
2452+ os.stat(src)
2453+ except (OSError, IOError):
2454+ error('File %r does not exist' % src)
2455+ raise Utils.WafError('Could not install the file %r' % tgt)
2456+ return True
2457+
2458+ elif self.is_install < 0:
2459+ info("* uninstalling %s" % tgt)
2460+
2461+ self.uninstall.append(tgt)
2462+
2463+ try:
2464+ os.remove(tgt)
2465+ except OSError, e:
2466+ if e.errno != errno.ENOENT:
2467+ if not getattr(self, 'uninstall_error', None):
2468+ self.uninstall_error = True
2469+ Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
2470+ if Logs.verbose > 1:
2471+ Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
2472+ return True
2473+
2474+ red = re.compile(r"^([A-Za-z]:)?[/\\\\]*")
2475+ def get_install_path(self, path, env=None):
2476+ "installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
2477+ if not env: env = self.env
2478+ destdir = env.get_destdir()
2479+ path = path.replace('/', os.sep)
2480+ destpath = Utils.subst_vars(path, env)
2481+ if destdir:
2482+ destpath = os.path.join(destdir, self.red.sub('', destpath))
2483+ return destpath
2484+
2485+ def install_dir(self, path, env=None):
2486+ """
2487+ create empty folders for the installation (very rarely used)
2488+ """
2489+ if env:
2490+ assert isinstance(env, Environment.Environment), "invalid parameter"
2491+ else:
2492+ env = self.env
2493+
2494+ if not path:
2495+ return []
2496+
2497+ destpath = self.get_install_path(path, env)
2498+
2499+ if self.is_install > 0:
2500+ info('* creating %s' % destpath)
2501+ Utils.check_dir(destpath)
2502+ elif self.is_install < 0:
2503+ info('* removing %s' % destpath)
2504+ self.uninstall.append(destpath + '/xxx') # yes, ugly
2505+
2506+ def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
2507+ """To install files only after they have been built, put the calls in a method named
2508+ post_build on the top-level wscript
2509+
2510+ The files must be a list and contain paths as strings or as Nodes
2511+
2512+ The relative_trick flag can be set to install folders, use bld.path.ant_glob() with it
2513+ """
2514+ if env:
2515+ assert isinstance(env, Environment.Environment), "invalid parameter"
2516+ else:
2517+ env = self.env
2518+
2519+ if not path: return []
2520+
2521+ if not cwd:
2522+ cwd = self.path
2523+
2524+ if isinstance(files, str) and '*' in files:
2525+ gl = cwd.abspath() + os.sep + files
2526+ lst = glob.glob(gl)
2527+ else:
2528+ lst = Utils.to_list(files)
2529+
2530+ if not getattr(lst, '__iter__', False):
2531+ lst = [lst]
2532+
2533+ destpath = self.get_install_path(path, env)
2534+
2535+ Utils.check_dir(destpath)
2536+
2537+ installed_files = []
2538+ for filename in lst:
2539+ if isinstance(filename, str) and os.path.isabs(filename):
2540+ alst = Utils.split_path(filename)
2541+ destfile = os.path.join(destpath, alst[-1])
2542+ else:
2543+ if isinstance(filename, Node.Node):
2544+ nd = filename
2545+ else:
2546+ nd = cwd.find_resource(filename)
2547+ if not nd:
2548+ raise Utils.WafError("Unable to install the file %r (not found in %s)" % (filename, cwd))
2549+
2550+ if relative_trick:
2551+ destfile = os.path.join(destpath, filename)
2552+ Utils.check_dir(os.path.dirname(destfile))
2553+ else:
2554+ destfile = os.path.join(destpath, nd.name)
2555+
2556+ filename = nd.abspath(env)
2557+
2558+ if self.do_install(filename, destfile, chmod):
2559+ installed_files.append(destfile)
2560+ return installed_files
2561+
2562+ def install_as(self, path, srcfile, env=None, chmod=O644, cwd=None):
2563+ """
2564+ srcfile may be a string or a Node representing the file to install
2565+
2566+ returns True if the file was effectively installed, False otherwise
2567+ """
2568+ if env:
2569+ assert isinstance(env, Environment.Environment), "invalid parameter"
2570+ else:
2571+ env = self.env
2572+
2573+ if not path:
2574+ raise Utils.WafError("where do you want to install %r? (%r?)" % (srcfile, path))
2575+
2576+ if not cwd:
2577+ cwd = self.path
2578+
2579+ destpath = self.get_install_path(path, env)
2580+
2581+ dir, name = os.path.split(destpath)
2582+ Utils.check_dir(dir)
2583+
2584+ # the source path
2585+ if isinstance(srcfile, Node.Node):
2586+ src = srcfile.abspath(env)
2587+ else:
2588+ src = srcfile
2589+ if not os.path.isabs(srcfile):
2590+ node = cwd.find_resource(srcfile)
2591+ if not node:
2592+ raise Utils.WafError("Unable to install the file %r (not found in %s)" % (srcfile, cwd))
2593+ src = node.abspath(env)
2594+
2595+ return self.do_install(src, destpath, chmod)
2596+
2597+ def symlink_as(self, path, src, env=None, cwd=None):
2598+ """example: bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') """
2599+
2600+ if sys.platform == 'win32':
2601+ # well, this *cannot* work
2602+ return
2603+
2604+ if not path:
2605+ raise Utils.WafError("where do you want to install %r? (%r?)" % (src, path))
2606+
2607+ tgt = self.get_install_path(path, env)
2608+
2609+ dir, name = os.path.split(tgt)
2610+ Utils.check_dir(dir)
2611+
2612+ if self.is_install > 0:
2613+ link = False
2614+ if not os.path.islink(tgt):
2615+ link = True
2616+ elif os.readlink(tgt) != src:
2617+ link = True
2618+
2619+ if link:
2620+ try: os.remove(tgt)
2621+ except OSError: pass
2622+
2623+ info('* symlink %s (-> %s)' % (tgt, src))
2624+ os.symlink(src, tgt)
2625+ return 0
2626+
2627+ else: # UNINSTALL
2628+ try:
2629+ info('* removing %s' % (tgt))
2630+ os.remove(tgt)
2631+ return 0
2632+ except OSError:
2633+ return 1
2634+
2635+ def exec_command(self, cmd, **kw):
2636+ # 'runner' zone is printed out for waf -v, see wafadmin/Options.py
2637+ debug('runner: system command -> %s', cmd)
2638+ if self.log:
2639+ self.log.write('%s\n' % cmd)
2640+ kw['log'] = self.log
2641+ try:
2642+ if not kw.get('cwd', None):
2643+ kw['cwd'] = self.cwd
2644+ except AttributeError:
2645+ self.cwd = kw['cwd'] = self.bldnode.abspath()
2646+ return Utils.exec_command(cmd, **kw)
2647+
2648+ def printout(self, s):
2649+ f = self.log or sys.stderr
2650+ f.write(s)
2651+ f.flush()
2652+
2653+ def add_subdirs(self, dirs):
2654+ self.recurse(dirs, 'build')
2655+
2656+ def pre_recurse(self, name_or_mod, path, nexdir):
2657+ if not hasattr(self, 'oldpath'):
2658+ self.oldpath = []
2659+ self.oldpath.append(self.path)
2660+ self.path = self.root.find_dir(nexdir)
2661+ return {'bld': self, 'ctx': self}
2662+
2663+ def post_recurse(self, name_or_mod, path, nexdir):
2664+ self.path = self.oldpath.pop()
2665+
2666+ ###### user-defined behaviour
2667+
2668+ def pre_build(self):
2669+ if hasattr(self, 'pre_funs'):
2670+ for m in self.pre_funs:
2671+ m(self)
2672+
2673+ def post_build(self):
2674+ if hasattr(self, 'post_funs'):
2675+ for m in self.post_funs:
2676+ m(self)
2677+
2678+ def add_pre_fun(self, meth):
2679+ try: self.pre_funs.append(meth)
2680+ except AttributeError: self.pre_funs = [meth]
2681+
2682+ def add_post_fun(self, meth):
2683+ try: self.post_funs.append(meth)
2684+ except AttributeError: self.post_funs = [meth]
2685+
2686+ def use_the_magic(self):
2687+ Task.algotype = Task.MAXPARALLEL
2688+ Task.file_deps = Task.extract_deps
2689+ self.magic = True
2690+
2691+ install_as = group_method(install_as)
2692+ install_files = group_method(install_files)
2693+ symlink_as = group_method(symlink_as)
2694+
2695diff --git a/buildtools/wafadmin/Configure.py b/buildtools/wafadmin/Configure.py
2696new file mode 100644
2697index 0000000..35b4e51
2698--- /dev/null
2699+++ b/buildtools/wafadmin/Configure.py
2700@@ -0,0 +1,444 @@
2701+#!/usr/bin/env python
2702+# encoding: utf-8
2703+# Thomas Nagy, 2005-2008 (ita)
2704+
2705+"""
2706+Configuration system
2707+
2708+A configuration instance is created when "waf configure" is called, it is used to:
2709+* create data dictionaries (Environment instances)
2710+* store the list of modules to import
2711+
2712+The old model (copied from Scons) was to store logic (mapping file extensions to functions)
2713+along with the data. In Waf a way was found to separate that logic by adding an indirection
2714+layer (storing the names in the Environment instances)
2715+
2716+In the new model, the logic is more object-oriented, and the user scripts provide the
2717+logic. The data files (Environments) must contain configuration data only (flags, ..).
2718+
2719+Note: the c/c++ related code is in the module config_c
2720+"""
2721+
2722+import os, shlex, sys, time
2723+try: import cPickle
2724+except ImportError: import pickle as cPickle
2725+import Environment, Utils, Options, Logs
2726+from Logs import warn
2727+from Constants import *
2728+
2729+try:
2730+ from urllib import request
2731+except:
2732+ from urllib import urlopen
2733+else:
2734+ urlopen = request.urlopen
2735+
2736+conf_template = '''# project %(app)s configured on %(now)s by
2737+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
2738+# using %(args)s
2739+#
2740+'''
2741+
2742+class ConfigurationError(Utils.WscriptError):
2743+ pass
2744+
2745+autoconfig = False
2746+"reconfigure the project automatically"
2747+
2748+def find_file(filename, path_list):
2749+ """find a file in a list of paths
2750+ @param filename: name of the file to search for
2751+ @param path_list: list of directories to search
2752+ @return: the first occurrence filename or '' if filename could not be found
2753+"""
2754+ for directory in Utils.to_list(path_list):
2755+ if os.path.exists(os.path.join(directory, filename)):
2756+ return directory
2757+ return ''
2758+
2759+def find_program_impl(env, filename, path_list=[], var=None, environ=None):
2760+ """find a program in folders path_lst, and sets env[var]
2761+ @param env: environment
2762+ @param filename: name of the program to search for
2763+ @param path_list: list of directories to search for filename
2764+ @param var: environment value to be checked for in env or os.environ
2765+ @return: either the value that is referenced with [var] in env or os.environ
2766+ or the first occurrence filename or '' if filename could not be found
2767+"""
2768+
2769+ if not environ:
2770+ environ = os.environ
2771+
2772+ try: path_list = path_list.split()
2773+ except AttributeError: pass
2774+
2775+ if var:
2776+ if env[var]: return env[var]
2777+ if var in environ: env[var] = environ[var]
2778+
2779+ if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
2780+
2781+ ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
2782+ for y in [filename+x for x in ext.split(',')]:
2783+ for directory in path_list:
2784+ x = os.path.join(directory, y)
2785+ if os.path.isfile(x):
2786+ if var: env[var] = x
2787+ return x
2788+ return ''
2789+
2790+class ConfigurationContext(Utils.Context):
2791+ tests = {}
2792+ error_handlers = []
2793+ def __init__(self, env=None, blddir='', srcdir=''):
2794+ self.env = None
2795+ self.envname = ''
2796+
2797+ self.environ = dict(os.environ)
2798+
2799+ self.line_just = 40
2800+
2801+ self.blddir = blddir
2802+ self.srcdir = srcdir
2803+ self.all_envs = {}
2804+
2805+ # curdir: necessary for recursion
2806+ self.cwd = self.curdir = os.getcwd()
2807+
2808+ self.tools = [] # tools loaded in the configuration, and that will be loaded when building
2809+
2810+ self.setenv(DEFAULT)
2811+
2812+ self.lastprog = ''
2813+
2814+ self.hash = 0
2815+ self.files = []
2816+
2817+ self.tool_cache = []
2818+
2819+ if self.blddir:
2820+ self.post_init()
2821+
2822+ def post_init(self):
2823+
2824+ self.cachedir = os.path.join(self.blddir, CACHE_DIR)
2825+
2826+ path = os.path.join(self.blddir, WAF_CONFIG_LOG)
2827+ try: os.unlink(path)
2828+ except (OSError, IOError): pass
2829+
2830+ try:
2831+ self.log = open(path, 'w')
2832+ except (OSError, IOError):
2833+ self.fatal('could not open %r for writing' % path)
2834+
2835+ app = Utils.g_module.APPNAME
2836+ if app:
2837+ ver = getattr(Utils.g_module, 'VERSION', '')
2838+ if ver:
2839+ app = "%s (%s)" % (app, ver)
2840+
2841+ now = time.ctime()
2842+ pyver = sys.hexversion
2843+ systype = sys.platform
2844+ args = " ".join(sys.argv)
2845+ wafver = WAFVERSION
2846+ abi = ABI
2847+ self.log.write(conf_template % vars())
2848+
2849+ def __del__(self):
2850+ """cleanup function: close config.log"""
2851+
2852+ # may be ran by the gc, not always after initialization
2853+ if hasattr(self, 'log') and self.log:
2854+ self.log.close()
2855+
2856+ def fatal(self, msg):
2857+ raise ConfigurationError(msg)
2858+
2859+ def check_tool(self, input, tooldir=None, funs=None):
2860+ "load a waf tool"
2861+
2862+ tools = Utils.to_list(input)
2863+ if tooldir: tooldir = Utils.to_list(tooldir)
2864+ for tool in tools:
2865+ tool = tool.replace('++', 'xx')
2866+ if tool == 'java': tool = 'javaw'
2867+ if tool.lower() == 'unittest': tool = 'unittestw'
2868+ # avoid loading the same tool more than once with the same functions
2869+ # used by composite projects
2870+
2871+ mag = (tool, id(self.env), funs)
2872+ if mag in self.tool_cache:
2873+ continue
2874+ self.tool_cache.append(mag)
2875+
2876+ module = None
2877+ try:
2878+ module = Utils.load_tool(tool, tooldir)
2879+ except Exception, e:
2880+ ex = e
2881+ if Options.options.download:
2882+ _3rdparty = os.path.normpath(Options.tooldir[0] + os.sep + '..' + os.sep + '3rdparty')
2883+
2884+ # try to download the tool from the repository then
2885+ # the default is set to false
2886+ for x in Utils.to_list(Options.remote_repo):
2887+ for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
2888+ url = '/'.join((x, sub, tool + '.py'))
2889+ try:
2890+ web = urlopen(url)
2891+ if web.getcode() != 200:
2892+ continue
2893+ except Exception, e:
2894+ # on python3 urlopen throws an exception
2895+ continue
2896+ else:
2897+ loc = None
2898+ try:
2899+ loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
2900+ loc.write(web.read())
2901+ web.close()
2902+ finally:
2903+ if loc:
2904+ loc.close()
2905+ Logs.warn('downloaded %s from %s' % (tool, url))
2906+ try:
2907+ module = Utils.load_tool(tool, tooldir)
2908+ except:
2909+ Logs.warn('module %s from %s is unusable' % (tool, url))
2910+ try:
2911+ os.unlink(_3rdparty + os.sep + tool + '.py')
2912+ except:
2913+ pass
2914+ continue
2915+ else:
2916+ break
2917+
2918+ if not module:
2919+ Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
2920+ raise ex
2921+ else:
2922+ Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s' % (tool, sys.path, e))
2923+ raise ex
2924+
2925+ if funs is not None:
2926+ self.eval_rules(funs)
2927+ else:
2928+ func = getattr(module, 'detect', None)
2929+ if func:
2930+ if type(func) is type(find_file): func(self)
2931+ else: self.eval_rules(func)
2932+
2933+ self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
2934+
2935+ def sub_config(self, k):
2936+ "executes the configure function of a wscript module"
2937+ self.recurse(k, name='configure')
2938+
2939+ def pre_recurse(self, name_or_mod, path, nexdir):
2940+ return {'conf': self, 'ctx': self}
2941+
2942+ def post_recurse(self, name_or_mod, path, nexdir):
2943+ if not autoconfig:
2944+ return
2945+ self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
2946+ self.files.append(path)
2947+
2948+ def store(self, file=''):
2949+ "save the config results into the cache file"
2950+ if not os.path.isdir(self.cachedir):
2951+ os.makedirs(self.cachedir)
2952+
2953+ if not file:
2954+ file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
2955+ file.write('version = 0x%x\n' % HEXVERSION)
2956+ file.write('tools = %r\n' % self.tools)
2957+ file.close()
2958+
2959+ if not self.all_envs:
2960+ self.fatal('nothing to store in the configuration context!')
2961+ for key in self.all_envs:
2962+ tmpenv = self.all_envs[key]
2963+ tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
2964+
2965+ def set_env_name(self, name, env):
2966+ "add a new environment called name"
2967+ self.all_envs[name] = env
2968+ return env
2969+
2970+ def retrieve(self, name, fromenv=None):
2971+ "retrieve an environment called name"
2972+ try:
2973+ env = self.all_envs[name]
2974+ except KeyError:
2975+ env = Environment.Environment()
2976+ env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
2977+ self.all_envs[name] = env
2978+ else:
2979+ if fromenv: warn("The environment %s may have been configured already" % name)
2980+ return env
2981+
2982+ def setenv(self, name):
2983+ "enable the environment called name"
2984+ self.env = self.retrieve(name)
2985+ self.envname = name
2986+
2987+ def add_os_flags(self, var, dest=None):
2988+ # do not use 'get' to make certain the variable is not defined
2989+ try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
2990+ except KeyError: pass
2991+
2992+ def check_message_1(self, sr):
2993+ self.line_just = max(self.line_just, len(sr))
2994+ for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
2995+ self.log.write(x)
2996+ Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
2997+
2998+ def check_message_2(self, sr, color='GREEN'):
2999+ self.log.write(sr)
3000+ self.log.write('\n')
3001+ Utils.pprint(color, sr)
3002+
3003+ def check_message(self, th, msg, state, option=''):
3004+ sr = 'Checking for %s %s' % (th, msg)
3005+ self.check_message_1(sr)
3006+ p = self.check_message_2
3007+ if state: p('ok ' + str(option))
3008+ else: p('not found', 'YELLOW')
3009+
3010+ # FIXME remove in waf 1.6
3011+ # the parameter 'option' is not used (kept for compatibility)
3012+ def check_message_custom(self, th, msg, custom, option='', color='PINK'):
3013+ sr = 'Checking for %s %s' % (th, msg)
3014+ self.check_message_1(sr)
3015+ self.check_message_2(custom, color)
3016+
3017+ def msg(self, msg, result, color=None):
3018+ """Prints a configuration message 'Checking for xxx: ok'"""
3019+ self.start_msg('Checking for ' + msg)
3020+
3021+ if not isinstance(color, str):
3022+ color = result and 'GREEN' or 'YELLOW'
3023+
3024+ self.end_msg(result, color)
3025+
3026+ def start_msg(self, msg):
3027+ try:
3028+ if self.in_msg:
3029+ return
3030+ except:
3031+ self.in_msg = 0
3032+ self.in_msg += 1
3033+
3034+ self.line_just = max(self.line_just, len(msg))
3035+ for x in ('\n', self.line_just * '-', '\n', msg, '\n'):
3036+ self.log.write(x)
3037+ Utils.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
3038+
3039+ def end_msg(self, result, color):
3040+ self.in_msg -= 1
3041+ if self.in_msg:
3042+ return
3043+
3044+ if not color:
3045+ color = 'GREEN'
3046+ if result == True:
3047+ msg = 'ok'
3048+ elif result == False:
3049+ msg = 'not found'
3050+ color = 'YELLOW'
3051+ else:
3052+ msg = str(result)
3053+
3054+ self.log.write(msg)
3055+ self.log.write('\n')
3056+ Utils.pprint(color, msg)
3057+
3058+ def find_program(self, filename, path_list=[], var=None, mandatory=False):
3059+ "wrapper that adds a configuration message"
3060+
3061+ ret = None
3062+ if var:
3063+ if self.env[var]:
3064+ ret = self.env[var]
3065+ elif var in os.environ:
3066+ ret = os.environ[var]
3067+
3068+ if not isinstance(filename, list): filename = [filename]
3069+ if not ret:
3070+ for x in filename:
3071+ ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
3072+ if ret: break
3073+
3074+ self.check_message_1('Checking for program %s' % ' or '.join(filename))
3075+ self.log.write(' find program=%r paths=%r var=%r\n -> %r\n' % (filename, path_list, var, ret))
3076+ if ret:
3077+ Utils.pprint('GREEN', str(ret))
3078+ else:
3079+ Utils.pprint('YELLOW', 'not found')
3080+ if mandatory:
3081+ self.fatal('The program %r is required' % filename)
3082+
3083+ if var:
3084+ self.env[var] = ret
3085+ return ret
3086+
3087+ def cmd_to_list(self, cmd):
3088+ "commands may be written in pseudo shell like 'ccache g++'"
3089+ if isinstance(cmd, str) and cmd.find(' '):
3090+ try:
3091+ os.stat(cmd)
3092+ except OSError:
3093+ return shlex.split(cmd)
3094+ else:
3095+ return [cmd]
3096+ return cmd
3097+
3098+ def __getattr__(self, name):
3099+ r = self.__class__.__dict__.get(name, None)
3100+ if r: return r
3101+ if name and name.startswith('require_'):
3102+
3103+ for k in ['check_', 'find_']:
3104+ n = name.replace('require_', k)
3105+ ret = self.__class__.__dict__.get(n, None)
3106+ if ret:
3107+ def run(*k, **kw):
3108+ r = ret(self, *k, **kw)
3109+ if not r:
3110+ self.fatal('requirement failure')
3111+ return r
3112+ return run
3113+ self.fatal('No such method %r' % name)
3114+
3115+ def eval_rules(self, rules):
3116+ self.rules = Utils.to_list(rules)
3117+ for x in self.rules:
3118+ f = getattr(self, x)
3119+ if not f: self.fatal("No such method '%s'." % x)
3120+ try:
3121+ f()
3122+ except Exception, e:
3123+ ret = self.err_handler(x, e)
3124+ if ret == BREAK:
3125+ break
3126+ elif ret == CONTINUE:
3127+ continue
3128+ else:
3129+ self.fatal(e)
3130+
3131+ def err_handler(self, fun, error):
3132+ pass
3133+
3134+def conf(f):
3135+ "decorator: attach new configuration functions"
3136+ setattr(ConfigurationContext, f.__name__, f)
3137+ return f
3138+
3139+def conftest(f):
3140+ "decorator: attach new configuration tests (registered as strings)"
3141+ ConfigurationContext.tests[f.__name__] = f
3142+ return conf(f)
3143+
3144+
3145diff --git a/buildtools/wafadmin/Constants.py b/buildtools/wafadmin/Constants.py
3146new file mode 100644
3147index 0000000..e67dda6
3148--- /dev/null
3149+++ b/buildtools/wafadmin/Constants.py
3150@@ -0,0 +1,76 @@
3151+#!/usr/bin/env python
3152+# encoding: utf-8
3153+# Yinon dot me gmail 2008
3154+
3155+"""
3156+these constants are somewhat public, try not to mess them
3157+
3158+maintainer: the version number is updated from the top-level wscript file
3159+"""
3160+
3161+# do not touch these three lines, they are updated automatically
3162+HEXVERSION=0x105019
3163+WAFVERSION="1.5.19"
3164+WAFREVISION = "9709M"
3165+ABI = 7
3166+
3167+# permissions
3168+O644 = 420
3169+O755 = 493
3170+
3171+MAXJOBS = 99999999
3172+
3173+CACHE_DIR = 'c4che'
3174+CACHE_SUFFIX = '.cache.py'
3175+DBFILE = '.wafpickle-%d' % ABI
3176+WSCRIPT_FILE = 'wscript'
3177+WSCRIPT_BUILD_FILE = 'wscript_build'
3178+WAF_CONFIG_LOG = 'config.log'
3179+WAF_CONFIG_H = 'config.h'
3180+
3181+SIG_NIL = 'iluvcuteoverload'
3182+
3183+VARIANT = '_VARIANT_'
3184+DEFAULT = 'default'
3185+
3186+SRCDIR = 'srcdir'
3187+BLDDIR = 'blddir'
3188+APPNAME = 'APPNAME'
3189+VERSION = 'VERSION'
3190+
3191+DEFINES = 'defines'
3192+UNDEFINED = ()
3193+
3194+BREAK = "break"
3195+CONTINUE = "continue"
3196+
3197+# task scheduler options
3198+JOBCONTROL = "JOBCONTROL"
3199+MAXPARALLEL = "MAXPARALLEL"
3200+NORMAL = "NORMAL"
3201+
3202+# task state
3203+NOT_RUN = 0
3204+MISSING = 1
3205+CRASHED = 2
3206+EXCEPTION = 3
3207+SKIPPED = 8
3208+SUCCESS = 9
3209+
3210+ASK_LATER = -1
3211+SKIP_ME = -2
3212+RUN_ME = -3
3213+
3214+
3215+LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
3216+HOUR_FORMAT = "%H:%M:%S"
3217+
3218+TEST_OK = True
3219+
3220+CFG_FILES = 'cfg_files'
3221+
3222+# positive '->' install
3223+# negative '<-' uninstall
3224+INSTALL = 1337
3225+UNINSTALL = -1337
3226+
3227diff --git a/buildtools/wafadmin/Environment.py b/buildtools/wafadmin/Environment.py
3228new file mode 100644
3229index 0000000..52c83b4
3230--- /dev/null
3231+++ b/buildtools/wafadmin/Environment.py
3232@@ -0,0 +1,210 @@
3233+#!/usr/bin/env python
3234+# encoding: utf-8
3235+# Thomas Nagy, 2005 (ita)
3236+
3237+"""Environment representation
3238+
3239+There is one gotcha: getitem returns [] if the contents evals to False
3240+This means env['foo'] = {}; print env['foo'] will print [] not {}
3241+"""
3242+
3243+import os, copy, re
3244+import Logs, Options, Utils
3245+from Constants import *
3246+re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
3247+
3248+class Environment(object):
3249+ """A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
3250+ An environment instance can be stored into a file and loaded easily
3251+ """
3252+ __slots__ = ("table", "parent")
3253+ def __init__(self, filename=None):
3254+ self.table = {}
3255+ #self.parent = None
3256+
3257+ if filename:
3258+ self.load(filename)
3259+
3260+ def __contains__(self, key):
3261+ if key in self.table: return True
3262+ try: return self.parent.__contains__(key)
3263+ except AttributeError: return False # parent may not exist
3264+
3265+ def __str__(self):
3266+ keys = set()
3267+ cur = self
3268+ while cur:
3269+ keys.update(cur.table.keys())
3270+ cur = getattr(cur, 'parent', None)
3271+ keys = list(keys)
3272+ keys.sort()
3273+ return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
3274+
3275+ def __getitem__(self, key):
3276+ try:
3277+ while 1:
3278+ x = self.table.get(key, None)
3279+ if not x is None:
3280+ return x
3281+ self = self.parent
3282+ except AttributeError:
3283+ return []
3284+
3285+ def __setitem__(self, key, value):
3286+ self.table[key] = value
3287+
3288+ def __delitem__(self, key):
3289+ del self.table[key]
3290+
3291+ def pop(self, key, *args):
3292+ if len(args):
3293+ return self.table.pop(key, *args)
3294+ return self.table.pop(key)
3295+
3296+ def set_variant(self, name):
3297+ self.table[VARIANT] = name
3298+
3299+ def variant(self):
3300+ try:
3301+ while 1:
3302+ x = self.table.get(VARIANT, None)
3303+ if not x is None:
3304+ return x
3305+ self = self.parent
3306+ except AttributeError:
3307+ return DEFAULT
3308+
3309+ def copy(self):
3310+ # TODO waf 1.6 rename this method derive, #368
3311+ newenv = Environment()
3312+ newenv.parent = self
3313+ return newenv
3314+
3315+ def detach(self):
3316+ """TODO try it
3317+ modifying the original env will not change the copy"""
3318+ tbl = self.get_merged_dict()
3319+ try:
3320+ delattr(self, 'parent')
3321+ except AttributeError:
3322+ pass
3323+ else:
3324+ keys = tbl.keys()
3325+ for x in keys:
3326+ tbl[x] = copy.deepcopy(tbl[x])
3327+ self.table = tbl
3328+
3329+ def get_flat(self, key):
3330+ s = self[key]
3331+ if isinstance(s, str): return s
3332+ return ' '.join(s)
3333+
3334+ def _get_list_value_for_modification(self, key):
3335+ """Gets a value that must be a list for further modification. The
3336+ list may be modified inplace and there is no need to
3337+ "self.table[var] = value" afterwards.
3338+ """
3339+ try:
3340+ value = self.table[key]
3341+ except KeyError:
3342+ try: value = self.parent[key]
3343+ except AttributeError: value = []
3344+ if isinstance(value, list):
3345+ value = value[:]
3346+ else:
3347+ value = [value]
3348+ else:
3349+ if not isinstance(value, list):
3350+ value = [value]
3351+ self.table[key] = value
3352+ return value
3353+
3354+ def append_value(self, var, value):
3355+ current_value = self._get_list_value_for_modification(var)
3356+
3357+ if isinstance(value, list):
3358+ current_value.extend(value)
3359+ else:
3360+ current_value.append(value)
3361+
3362+ def prepend_value(self, var, value):
3363+ current_value = self._get_list_value_for_modification(var)
3364+
3365+ if isinstance(value, list):
3366+ current_value = value + current_value
3367+ # a new list: update the dictionary entry
3368+ self.table[var] = current_value
3369+ else:
3370+ current_value.insert(0, value)
3371+
3372+ # prepend unique would be ambiguous
3373+ def append_unique(self, var, value):
3374+ current_value = self._get_list_value_for_modification(var)
3375+
3376+ if isinstance(value, list):
3377+ for value_item in value:
3378+ if value_item not in current_value:
3379+ current_value.append(value_item)
3380+ else:
3381+ if value not in current_value:
3382+ current_value.append(value)
3383+
3384+ def get_merged_dict(self):
3385+ """compute a merged table"""
3386+ table_list = []
3387+ env = self
3388+ while 1:
3389+ table_list.insert(0, env.table)
3390+ try: env = env.parent
3391+ except AttributeError: break
3392+ merged_table = {}
3393+ for table in table_list:
3394+ merged_table.update(table)
3395+ return merged_table
3396+
3397+ def store(self, filename):
3398+ "Write the variables into a file"
3399+ file = open(filename, 'w')
3400+ merged_table = self.get_merged_dict()
3401+ keys = list(merged_table.keys())
3402+ keys.sort()
3403+ for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
3404+ file.close()
3405+
3406+ def load(self, filename):
3407+ "Retrieve the variables from a file"
3408+ tbl = self.table
3409+ code = Utils.readf(filename)
3410+ for m in re_imp.finditer(code):
3411+ g = m.group
3412+ tbl[g(2)] = eval(g(3))
3413+ Logs.debug('env: %s', self.table)
3414+
3415+ def get_destdir(self):
3416+ "return the destdir, useful for installing"
3417+ if self.__getitem__('NOINSTALL'): return ''
3418+ return Options.options.destdir
3419+
3420+ def update(self, d):
3421+ for k, v in d.iteritems():
3422+ self[k] = v
3423+
3424+
3425+ def __getattr__(self, name):
3426+ if name in self.__slots__:
3427+ return object.__getattr__(self, name)
3428+ else:
3429+ return self[name]
3430+
3431+ def __setattr__(self, name, value):
3432+ if name in self.__slots__:
3433+ object.__setattr__(self, name, value)
3434+ else:
3435+ self[name] = value
3436+
3437+ def __delattr__(self, name):
3438+ if name in self.__slots__:
3439+ object.__delattr__(self, name)
3440+ else:
3441+ del self[name]
3442+
3443diff --git a/buildtools/wafadmin/Logs.py b/buildtools/wafadmin/Logs.py
3444new file mode 100644
3445index 0000000..c160b37
3446--- /dev/null
3447+++ b/buildtools/wafadmin/Logs.py
3448@@ -0,0 +1,134 @@
3449+#!/usr/bin/env python
3450+# encoding: utf-8
3451+# Thomas Nagy, 2005 (ita)
3452+
3453+import ansiterm
3454+import os, re, logging, traceback, sys
3455+from Constants import *
3456+
3457+zones = ''
3458+verbose = 0
3459+
3460+colors_lst = {
3461+'USE' : True,
3462+'BOLD' :'\x1b[01;1m',
3463+'RED' :'\x1b[01;31m',
3464+'GREEN' :'\x1b[32m',
3465+'YELLOW':'\x1b[33m',
3466+'PINK' :'\x1b[35m',
3467+'BLUE' :'\x1b[01;34m',
3468+'CYAN' :'\x1b[36m',
3469+'NORMAL':'\x1b[0m',
3470+'cursor_on' :'\x1b[?25h',
3471+'cursor_off' :'\x1b[?25l',
3472+}
3473+
3474+got_tty = False
3475+term = os.environ.get('TERM', 'dumb')
3476+if not term in ['dumb', 'emacs']:
3477+ try:
3478+ got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
3479+ except AttributeError:
3480+ pass
3481+
3482+import Utils
3483+
3484+if not got_tty or 'NOCOLOR' in os.environ:
3485+ colors_lst['USE'] = False
3486+
3487+# test
3488+#if sys.platform == 'win32':
3489+# colors_lst['USE'] = True
3490+
3491+def get_color(cl):
3492+ if not colors_lst['USE']: return ''
3493+ return colors_lst.get(cl, '')
3494+
3495+class foo(object):
3496+ def __getattr__(self, a):
3497+ return get_color(a)
3498+ def __call__(self, a):
3499+ return get_color(a)
3500+
3501+colors = foo()
3502+
3503+re_log = re.compile(r'(\w+): (.*)', re.M)
3504+class log_filter(logging.Filter):
3505+ def __init__(self, name=None):
3506+ pass
3507+
3508+ def filter(self, rec):
3509+ rec.c1 = colors.PINK
3510+ rec.c2 = colors.NORMAL
3511+ rec.zone = rec.module
3512+ if rec.levelno >= logging.INFO:
3513+ if rec.levelno >= logging.ERROR:
3514+ rec.c1 = colors.RED
3515+ elif rec.levelno >= logging.WARNING:
3516+ rec.c1 = colors.YELLOW
3517+ else:
3518+ rec.c1 = colors.GREEN
3519+ return True
3520+
3521+ zone = ''
3522+ m = re_log.match(rec.msg)
3523+ if m:
3524+ zone = rec.zone = m.group(1)
3525+ rec.msg = m.group(2)
3526+
3527+ if zones:
3528+ return getattr(rec, 'zone', '') in zones or '*' in zones
3529+ elif not verbose > 2:
3530+ return False
3531+ return True
3532+
3533+class formatter(logging.Formatter):
3534+ def __init__(self):
3535+ logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
3536+
3537+ def format(self, rec):
3538+ if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
3539+ try:
3540+ return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
3541+ except:
3542+ return rec.c1+rec.msg+rec.c2
3543+ return logging.Formatter.format(self, rec)
3544+
3545+def debug(*k, **kw):
3546+ if verbose:
3547+ k = list(k)
3548+ k[0] = k[0].replace('\n', ' ')
3549+ logging.debug(*k, **kw)
3550+
3551+def error(*k, **kw):
3552+ logging.error(*k, **kw)
3553+ if verbose > 1:
3554+ if isinstance(k[0], Utils.WafError):
3555+ st = k[0].stack
3556+ else:
3557+ st = traceback.extract_stack()
3558+ if st:
3559+ st = st[:-1]
3560+ buf = []
3561+ for filename, lineno, name, line in st:
3562+ buf.append(' File "%s", line %d, in %s' % (filename, lineno, name))
3563+ if line:
3564+ buf.append(' %s' % line.strip())
3565+ if buf: logging.error("\n".join(buf))
3566+
3567+warn = logging.warn
3568+info = logging.info
3569+
3570+def init_log():
3571+ log = logging.getLogger()
3572+ log.handlers = []
3573+ log.filters = []
3574+ hdlr = logging.StreamHandler()
3575+ hdlr.setFormatter(formatter())
3576+ log.addHandler(hdlr)
3577+ log.addFilter(log_filter())
3578+ log.setLevel(logging.DEBUG)
3579+
3580+# may be initialized more than once
3581+init_log()
3582+
3583diff --git a/buildtools/wafadmin/Node.py b/buildtools/wafadmin/Node.py
3584new file mode 100644
3585index 0000000..236dd0d
3586--- /dev/null
3587+++ b/buildtools/wafadmin/Node.py
3588@@ -0,0 +1,695 @@
3589+#!/usr/bin/env python
3590+# encoding: utf-8
3591+# Thomas Nagy, 2005 (ita)
3592+
3593+"""
3594+Node: filesystem structure, contains lists of nodes
3595+
3596+IMPORTANT:
3597+1. Each file/folder is represented by exactly one node.
3598+
3599+2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
3600+unused class members increase the .wafpickle file size sensibly with lots of objects.
3601+
3602+3. The build is launched from the top of the build dir (for example, in _build_/).
3603+
3604+4. Node should not be instantiated directly.
3605+Each instance of Build.BuildContext has a Node subclass.
3606+(aka: 'Nodu', see BuildContext initializer)
3607+The BuildContext is referenced here as self.__class__.bld
3608+Its Node class is referenced here as self.__class__
3609+
3610+The public and advertised apis are the following:
3611+${TGT} -> dir/to/file.ext
3612+${TGT[0].base()} -> dir/to/file
3613+${TGT[0].dir(env)} -> dir/to
3614+${TGT[0].file()} -> file.ext
3615+${TGT[0].file_base()} -> file
3616+${TGT[0].suffix()} -> .ext
3617+${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
3618+
3619+"""
3620+
3621+import os, sys, fnmatch, re, stat
3622+import Utils, Constants
3623+
3624+UNDEFINED = 0
3625+DIR = 1
3626+FILE = 2
3627+BUILD = 3
3628+
3629+type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
3630+
3631+# These fnmatch expressions are used by default to prune the directory tree
3632+# while doing the recursive traversal in the find_iter method of the Node class.
3633+prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
3634+
3635+# These fnmatch expressions are used by default to exclude files and dirs
3636+# while doing the recursive traversal in the find_iter method of the Node class.
3637+exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
3638+
3639+# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
3640+# while doing the recursive traversal in the ant_glob method of the Node class.
3641+exclude_regs = '''
3642+**/*~
3643+**/#*#
3644+**/.#*
3645+**/%*%
3646+**/._*
3647+**/CVS
3648+**/CVS/**
3649+**/.cvsignore
3650+**/SCCS
3651+**/SCCS/**
3652+**/vssver.scc
3653+**/.svn
3654+**/.svn/**
3655+**/.git
3656+**/.git/**
3657+**/.gitignore
3658+**/.bzr
3659+**/.bzr/**
3660+**/.hg
3661+**/.hg/**
3662+**/_MTN
3663+**/_MTN/**
3664+**/_darcs
3665+**/_darcs/**
3666+**/.DS_Store'''
3667+
3668+class Node(object):
3669+ __slots__ = ("name", "parent", "id", "childs")
3670+ def __init__(self, name, parent, node_type = UNDEFINED):
3671+ self.name = name
3672+ self.parent = parent
3673+
3674+ # assumption: one build object at a time
3675+ self.__class__.bld.id_nodes += 4
3676+ self.id = self.__class__.bld.id_nodes + node_type
3677+
3678+ if node_type == DIR: self.childs = {}
3679+
3680+ # We do not want to add another type attribute (memory)
3681+ # use the id to find out: type = id & 3
3682+ # for setting: new type = type + x - type & 3
3683+
3684+ if parent and name in parent.childs:
3685+ raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
3686+
3687+ if parent: parent.childs[name] = self
3688+
3689+ def __setstate__(self, data):
3690+ if len(data) == 4:
3691+ (self.parent, self.name, self.id, self.childs) = data
3692+ else:
3693+ (self.parent, self.name, self.id) = data
3694+
3695+ def __getstate__(self):
3696+ if getattr(self, 'childs', None) is None:
3697+ return (self.parent, self.name, self.id)
3698+ else:
3699+ return (self.parent, self.name, self.id, self.childs)
3700+
3701+ def __str__(self):
3702+ if not self.parent: return ''
3703+ return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
3704+
3705+ def __repr__(self):
3706+ return self.__str__()
3707+
3708+ def __hash__(self):
3709+ "expensive, make certain it is not used"
3710+ raise Utils.WafError('nodes, you are doing it wrong')
3711+
3712+ def __copy__(self):
3713+ "nodes are not supposed to be copied"
3714+ raise Utils.WafError('nodes are not supposed to be cloned')
3715+
3716+ def get_type(self):
3717+ return self.id & 3
3718+
3719+ def set_type(self, t):
3720+ "dangerous, you are not supposed to use this"
3721+ self.id = self.id + t - self.id & 3
3722+
3723+ def dirs(self):
3724+ return [x for x in self.childs.values() if x.id & 3 == DIR]
3725+
3726+ def files(self):
3727+ return [x for x in self.childs.values() if x.id & 3 == FILE]
3728+
3729+ def get_dir(self, name, default=None):
3730+ node = self.childs.get(name, None)
3731+ if not node or node.id & 3 != DIR: return default
3732+ return node
3733+
3734+ def get_file(self, name, default=None):
3735+ node = self.childs.get(name, None)
3736+ if not node or node.id & 3 != FILE: return default
3737+ return node
3738+
3739+ def get_build(self, name, default=None):
3740+ node = self.childs.get(name, None)
3741+ if not node or node.id & 3 != BUILD: return default
3742+ return node
3743+
3744+ def find_resource(self, lst):
3745+ "Find an existing input file: either a build node declared previously or a source node"
3746+ if isinstance(lst, str):
3747+ lst = Utils.split_path(lst)
3748+
3749+ if len(lst) == 1:
3750+ parent = self
3751+ else:
3752+ parent = self.find_dir(lst[:-1])
3753+ if not parent: return None
3754+ self.__class__.bld.rescan(parent)
3755+
3756+ name = lst[-1]
3757+ node = parent.childs.get(name, None)
3758+ if node:
3759+ tp = node.id & 3
3760+ if tp == FILE or tp == BUILD:
3761+ return node
3762+ else:
3763+ return None
3764+
3765+ tree = self.__class__.bld
3766+ if not name in tree.cache_dir_contents[parent.id]:
3767+ return None
3768+
3769+ path = parent.abspath() + os.sep + name
3770+ try:
3771+ st = Utils.h_file(path)
3772+ except IOError:
3773+ return None
3774+
3775+ child = self.__class__(name, parent, FILE)
3776+ tree.node_sigs[0][child.id] = st
3777+ return child
3778+
3779+ def find_or_declare(self, lst):
3780+ "Used for declaring a build node representing a file being built"
3781+ if isinstance(lst, str):
3782+ lst = Utils.split_path(lst)
3783+
3784+ if len(lst) == 1:
3785+ parent = self
3786+ else:
3787+ parent = self.find_dir(lst[:-1])
3788+ if not parent: return None
3789+ self.__class__.bld.rescan(parent)
3790+
3791+ name = lst[-1]
3792+ node = parent.childs.get(name, None)
3793+ if node:
3794+ tp = node.id & 3
3795+ if tp != BUILD:
3796+ raise Utils.WafError('find_or_declare found a source file where a build file was expected %r' % '/'.join(lst))
3797+ return node
3798+ node = self.__class__(name, parent, BUILD)
3799+ return node
3800+
3801+ def find_dir(self, lst):
3802+ "search a folder in the filesystem"
3803+
3804+ if isinstance(lst, str):
3805+ lst = Utils.split_path(lst)
3806+
3807+ current = self
3808+ for name in lst:
3809+ self.__class__.bld.rescan(current)
3810+ prev = current
3811+
3812+ if not current.parent and name == current.name:
3813+ continue
3814+ elif not name:
3815+ continue
3816+ elif name == '.':
3817+ continue
3818+ elif name == '..':
3819+ current = current.parent or current
3820+ else:
3821+ current = prev.childs.get(name, None)
3822+ if current is None:
3823+ dir_cont = self.__class__.bld.cache_dir_contents
3824+ if prev.id in dir_cont and name in dir_cont[prev.id]:
3825+ if not prev.name:
3826+ if os.sep == '/':
3827+ # cygwin //machine/share
3828+ dirname = os.sep + name
3829+ else:
3830+ # windows c:
3831+ dirname = name
3832+ else:
3833+ # regular path
3834+ dirname = prev.abspath() + os.sep + name
3835+ if not os.path.isdir(dirname):
3836+ return None
3837+ current = self.__class__(name, prev, DIR)
3838+ elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
3839+ # drive letter or \\ path for windows
3840+ current = self.__class__(name, prev, DIR)
3841+ else:
3842+ return None
3843+ else:
3844+ if current.id & 3 != DIR:
3845+ return None
3846+ return current
3847+
3848+ def ensure_dir_node_from_path(self, lst):
3849+ "used very rarely, force the construction of a branch of node instance for representing folders"
3850+
3851+ if isinstance(lst, str):
3852+ lst = Utils.split_path(lst)
3853+
3854+ current = self
3855+ for name in lst:
3856+ if not name:
3857+ continue
3858+ elif name == '.':
3859+ continue
3860+ elif name == '..':
3861+ current = current.parent or current
3862+ else:
3863+ prev = current
3864+ current = prev.childs.get(name, None)
3865+ if current is None:
3866+ current = self.__class__(name, prev, DIR)
3867+ return current
3868+
3869+ def exclusive_build_node(self, path):
3870+ """
3871+ create a hierarchy in the build dir (no source folders) for ill-behaving compilers
3872+ the node is not hashed, so you must do it manually
3873+
3874+ after declaring such a node, find_dir and find_resource should work as expected
3875+ """
3876+ lst = Utils.split_path(path)
3877+ name = lst[-1]
3878+ if len(lst) > 1:
3879+ parent = None
3880+ try:
3881+ parent = self.find_dir(lst[:-1])
3882+ except OSError:
3883+ pass
3884+ if not parent:
3885+ parent = self.ensure_dir_node_from_path(lst[:-1])
3886+ self.__class__.bld.rescan(parent)
3887+ else:
3888+ try:
3889+ self.__class__.bld.rescan(parent)
3890+ except OSError:
3891+ pass
3892+ else:
3893+ parent = self
3894+
3895+ node = parent.childs.get(name, None)
3896+ if not node:
3897+ node = self.__class__(name, parent, BUILD)
3898+
3899+ return node
3900+
3901+ def path_to_parent(self, parent):
3902+ "path relative to a direct ancestor, as string"
3903+ lst = []
3904+ p = self
3905+ h1 = parent.height()
3906+ h2 = p.height()
3907+ while h2 > h1:
3908+ h2 -= 1
3909+ lst.append(p.name)
3910+ p = p.parent
3911+ if lst:
3912+ lst.reverse()
3913+ ret = os.path.join(*lst)
3914+ else:
3915+ ret = ''
3916+ return ret
3917+
3918+ def find_ancestor(self, node):
3919+ "find a common ancestor for two nodes - for the shortest path in hierarchy"
3920+ dist = self.height() - node.height()
3921+ if dist < 0: return node.find_ancestor(self)
3922+ # now the real code
3923+ cand = self
3924+ while dist > 0:
3925+ cand = cand.parent
3926+ dist -= 1
3927+ if cand == node: return cand
3928+ cursor = node
3929+ while cand.parent:
3930+ cand = cand.parent
3931+ cursor = cursor.parent
3932+ if cand == cursor: return cand
3933+
3934+ def relpath_gen(self, from_node):
3935+ "string representing a relative path between self to another node"
3936+
3937+ if self == from_node: return '.'
3938+ if from_node.parent == self: return '..'
3939+
3940+ # up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
3941+ ancestor = self.find_ancestor(from_node)
3942+ lst = []
3943+ cand = self
3944+ while not cand.id == ancestor.id:
3945+ lst.append(cand.name)
3946+ cand = cand.parent
3947+ cand = from_node
3948+ while not cand.id == ancestor.id:
3949+ lst.append('..')
3950+ cand = cand.parent
3951+ lst.reverse()
3952+ return os.sep.join(lst)
3953+
3954+ def nice_path(self, env=None):
3955+ "printed in the console, open files easily from the launch directory"
3956+ tree = self.__class__.bld
3957+ ln = tree.launch_node()
3958+
3959+ if self.id & 3 == FILE: return self.relpath_gen(ln)
3960+ else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
3961+
3962+ def is_child_of(self, node):
3963+ "does this node belong to the subtree node"
3964+ p = self
3965+ diff = self.height() - node.height()
3966+ while diff > 0:
3967+ diff -= 1
3968+ p = p.parent
3969+ return p.id == node.id
3970+
3971+ def variant(self, env):
3972+ "variant, or output directory for this node, a source has for variant 0"
3973+ if not env: return 0
3974+ elif self.id & 3 == FILE: return 0
3975+ else: return env.variant()
3976+
3977+ def height(self):
3978+ "amount of parents"
3979+ # README a cache can be added here if necessary
3980+ d = self
3981+ val = -1
3982+ while d:
3983+ d = d.parent
3984+ val += 1
3985+ return val
3986+
3987+ # helpers for building things
3988+
3989+ def abspath(self, env=None):
3990+ """
3991+ absolute path
3992+ @param env [Environment]:
3993+ * obligatory for build nodes: build/variant/src/dir/bar.o
3994+ * optional for dirs: get either src/dir or build/variant/src/dir
3995+ * excluded for source nodes: src/dir/bar.c
3996+
3997+ Instead of computing the absolute path each time again,
3998+ store the already-computed absolute paths in one of (variants+1) dictionaries:
3999+ bld.cache_node_abspath[0] holds absolute paths for source nodes.
4000+ bld.cache_node_abspath[variant] holds the absolute path for the build nodes
4001+ which reside in the variant given by env.
4002+ """
4003+ ## absolute path - hot zone, so do not touch
4004+
4005+ # less expensive
4006+ variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
4007+
4008+ ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
4009+ if ret: return ret
4010+
4011+ if not variant:
4012+ # source directory
4013+ if not self.parent:
4014+ val = os.sep == '/' and os.sep or ''
4015+ elif not self.parent.name: # root
4016+ val = (os.sep == '/' and os.sep or '') + self.name
4017+ else:
4018+ val = self.parent.abspath() + os.sep + self.name
4019+ else:
4020+ # build directory
4021+ val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
4022+ self.__class__.bld.cache_node_abspath[variant][self.id] = val
4023+ return val
4024+
4025+ def change_ext(self, ext):
4026+ "node of the same path, but with a different extension - hot zone so do not touch"
4027+ name = self.name
4028+ k = name.rfind('.')
4029+ if k >= 0:
4030+ name = name[:k] + ext
4031+ else:
4032+ name = name + ext
4033+
4034+ return self.parent.find_or_declare([name])
4035+
4036+ def src_dir(self, env):
4037+ "src path without the file name"
4038+ return self.parent.srcpath(env)
4039+
4040+ def bld_dir(self, env):
4041+ "build path without the file name"
4042+ return self.parent.bldpath(env)
4043+
4044+ def bld_base(self, env):
4045+ "build path without the extension: src/dir/foo(.cpp)"
4046+ s = os.path.splitext(self.name)[0]
4047+ return os.path.join(self.bld_dir(env), s)
4048+
4049+ def bldpath(self, env=None):
4050+ "path seen from the build dir default/src/foo.cpp"
4051+ if self.id & 3 == FILE:
4052+ return self.relpath_gen(self.__class__.bld.bldnode)
4053+ p = self.path_to_parent(self.__class__.bld.srcnode)
4054+ if p is not '':
4055+ return env.variant() + os.sep + p
4056+ return env.variant()
4057+
4058+ def srcpath(self, env=None):
4059+ "path in the srcdir from the build dir ../src/foo.cpp"
4060+ if self.id & 3 == BUILD:
4061+ return self.bldpath(env)
4062+ return self.relpath_gen(self.__class__.bld.bldnode)
4063+
4064+ def read(self, env):
4065+ "get the contents of a file, it is not used anywhere for the moment"
4066+ return Utils.readf(self.abspath(env))
4067+
4068+ def dir(self, env):
4069+ "scons-like"
4070+ return self.parent.abspath(env)
4071+
4072+ def file(self):
4073+ "scons-like"
4074+ return self.name
4075+
4076+ def file_base(self):
4077+ "scons-like"
4078+ return os.path.splitext(self.name)[0]
4079+
4080+ def suffix(self):
4081+ "scons-like - hot zone so do not touch"
4082+ k = max(0, self.name.rfind('.'))
4083+ return self.name[k:]
4084+
4085+ def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
4086+ """find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
4087+ bld_ctx = self.__class__.bld
4088+ bld_ctx.rescan(self)
4089+ for name in bld_ctx.cache_dir_contents[self.id]:
4090+ if accept_name(self, name):
4091+ node = self.find_resource(name)
4092+ if node:
4093+ if src and node.id & 3 == FILE:
4094+ yield node
4095+ else:
4096+ node = self.find_dir(name)
4097+ if node and node.id != bld_ctx.bldnode.id:
4098+ if dir:
4099+ yield node
4100+ if not is_prune(self, name):
4101+ if maxdepth:
4102+ for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
4103+ yield k
4104+ else:
4105+ if not is_prune(self, name):
4106+ node = self.find_resource(name)
4107+ if not node:
4108+ # not a file, it is a dir
4109+ node = self.find_dir(name)
4110+ if node and node.id != bld_ctx.bldnode.id:
4111+ if maxdepth:
4112+ for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
4113+ yield k
4114+
4115+ if bld:
4116+ for node in self.childs.values():
4117+ if node.id == bld_ctx.bldnode.id:
4118+ continue
4119+ if node.id & 3 == BUILD:
4120+ if accept_name(self, node.name):
4121+ yield node
4122+ raise StopIteration
4123+
4124+ def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
4125+ """find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
4126+
4127+ if not (src or bld or dir):
4128+ raise StopIteration
4129+
4130+ if self.id & 3 != DIR:
4131+ raise StopIteration
4132+
4133+ in_pat = Utils.to_list(in_pat)
4134+ ex_pat = Utils.to_list(ex_pat)
4135+ prune_pat = Utils.to_list(prune_pat)
4136+
4137+ def accept_name(node, name):
4138+ for pat in ex_pat:
4139+ if fnmatch.fnmatchcase(name, pat):
4140+ return False
4141+ for pat in in_pat:
4142+ if fnmatch.fnmatchcase(name, pat):
4143+ return True
4144+ return False
4145+
4146+ def is_prune(node, name):
4147+ for pat in prune_pat:
4148+ if fnmatch.fnmatchcase(name, pat):
4149+ return True
4150+ return False
4151+
4152+ ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
4153+ if flat:
4154+ return " ".join([x.relpath_gen(self) for x in ret])
4155+
4156+ return ret
4157+
4158+ def ant_glob(self, *k, **kw):
4159+ """
4160+ known gotcha: will enumerate the files, but only if the folder exists in the source directory
4161+ """
4162+
4163+ src=kw.get('src', 1)
4164+ bld=kw.get('bld', 0)
4165+ dir=kw.get('dir', 0)
4166+ excl = kw.get('excl', exclude_regs)
4167+ incl = k and k[0] or kw.get('incl', '**')
4168+
4169+ def to_pat(s):
4170+ lst = Utils.to_list(s)
4171+ ret = []
4172+ for x in lst:
4173+ x = x.replace('//', '/')
4174+ if x.endswith('/'):
4175+ x += '**'
4176+ lst2 = x.split('/')
4177+ accu = []
4178+ for k in lst2:
4179+ if k == '**':
4180+ accu.append(k)
4181+ else:
4182+ k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
4183+ k = '^%s$' % k
4184+ #print "pattern", k
4185+ accu.append(re.compile(k))
4186+ ret.append(accu)
4187+ return ret
4188+
4189+ def filtre(name, nn):
4190+ ret = []
4191+ for lst in nn:
4192+ if not lst:
4193+ pass
4194+ elif lst[0] == '**':
4195+ ret.append(lst)
4196+ if len(lst) > 1:
4197+ if lst[1].match(name):
4198+ ret.append(lst[2:])
4199+ else:
4200+ ret.append([])
4201+ elif lst[0].match(name):
4202+ ret.append(lst[1:])
4203+ return ret
4204+
4205+ def accept(name, pats):
4206+ nacc = filtre(name, pats[0])
4207+ nrej = filtre(name, pats[1])
4208+ if [] in nrej:
4209+ nacc = []
4210+ return [nacc, nrej]
4211+
4212+ def ant_iter(nodi, maxdepth=25, pats=[]):
4213+ nodi.__class__.bld.rescan(nodi)
4214+ tmp = list(nodi.__class__.bld.cache_dir_contents[nodi.id])
4215+ tmp.sort()
4216+ for name in tmp:
4217+ npats = accept(name, pats)
4218+ if npats and npats[0]:
4219+ accepted = [] in npats[0]
4220+ #print accepted, nodi, name
4221+
4222+ node = nodi.find_resource(name)
4223+ if node and accepted:
4224+ if src and node.id & 3 == FILE:
4225+ yield node
4226+ else:
4227+ node = nodi.find_dir(name)
4228+ if node and node.id != nodi.__class__.bld.bldnode.id:
4229+ if accepted and dir:
4230+ yield node
4231+ if maxdepth:
4232+ for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
4233+ yield k
4234+ if bld:
4235+ for node in nodi.childs.values():
4236+ if node.id == nodi.__class__.bld.bldnode.id:
4237+ continue
4238+ if node.id & 3 == BUILD:
4239+ npats = accept(node.name, pats)
4240+ if npats and npats[0] and [] in npats[0]:
4241+ yield node
4242+ raise StopIteration
4243+
4244+ ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
4245+
4246+ if kw.get('flat', True):
4247+ return " ".join([x.relpath_gen(self) for x in ret])
4248+
4249+ return ret
4250+
4251+ def update_build_dir(self, env=None):
4252+
4253+ if not env:
4254+ for env in bld.all_envs:
4255+ self.update_build_dir(env)
4256+ return
4257+
4258+ path = self.abspath(env)
4259+
4260+ lst = Utils.listdir(path)
4261+ try:
4262+ self.__class__.bld.cache_dir_contents[self.id].update(lst)
4263+ except KeyError:
4264+ self.__class__.bld.cache_dir_contents[self.id] = set(lst)
4265+ self.__class__.bld.cache_scanned_folders[self.id] = True
4266+
4267+ for k in lst:
4268+ npath = path + os.sep + k
4269+ st = os.stat(npath)
4270+ if stat.S_ISREG(st[stat.ST_MODE]):
4271+ ick = self.find_or_declare(k)
4272+ if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
4273+ self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
4274+ elif stat.S_ISDIR(st[stat.ST_MODE]):
4275+ child = self.find_dir(k)
4276+ if not child:
4277+ child = self.ensure_dir_node_from_path(k)
4278+ child.update_build_dir(env)
4279+
4280+
4281+class Nodu(Node):
4282+ pass
4283+
4284diff --git a/buildtools/wafadmin/Options.py b/buildtools/wafadmin/Options.py
4285new file mode 100644
4286index 0000000..c9ddcfe
4287--- /dev/null
4288+++ b/buildtools/wafadmin/Options.py
4289@@ -0,0 +1,288 @@
4290+#!/usr/bin/env python
4291+# encoding: utf-8
4292+# Scott Newton, 2005 (scottn)
4293+# Thomas Nagy, 2006 (ita)
4294+
4295+"Custom command-line options"
4296+
4297+import os, sys, imp, types, tempfile, optparse
4298+import Logs, Utils
4299+from Constants import *
4300+
4301+cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
4302+
4303+# TODO remove in waf 1.6 the following two
4304+commands = {}
4305+is_install = False
4306+
4307+options = {}
4308+arg_line = []
4309+launch_dir = ''
4310+tooldir = ''
4311+lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
4312+try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
4313+except KeyError: cache_global = ''
4314+platform = Utils.unversioned_sys_platform()
4315+conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
4316+
4317+remote_repo = ['http://waf.googlecode.com/svn/']
4318+"""remote directory for the plugins"""
4319+
4320+
4321+# Such a command-line should work: JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
4322+default_prefix = os.environ.get('PREFIX')
4323+if not default_prefix:
4324+ if platform == 'win32':
4325+ d = tempfile.gettempdir()
4326+ default_prefix = d[0].upper() + d[1:]
4327+ # win32 preserves the case, but gettempdir does not
4328+ else: default_prefix = '/usr/local/'
4329+
4330+default_jobs = os.environ.get('JOBS', -1)
4331+if default_jobs < 1:
4332+ try:
4333+ if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
4334+ default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
4335+ else:
4336+ default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
4337+ except:
4338+ if os.name == 'java': # platform.system() == 'Java'
4339+ from java.lang import Runtime
4340+ default_jobs = Runtime.getRuntime().availableProcessors()
4341+ else:
4342+ # environment var defined on win32
4343+ default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
4344+
4345+default_destdir = os.environ.get('DESTDIR', '')
4346+
4347+def get_usage(self):
4348+ cmds_str = []
4349+ module = Utils.g_module
4350+ if module:
4351+ # create the help messages for commands
4352+ tbl = module.__dict__
4353+ keys = list(tbl.keys())
4354+ keys.sort()
4355+
4356+ if 'build' in tbl:
4357+ if not module.build.__doc__:
4358+ module.build.__doc__ = 'builds the project'
4359+ if 'configure' in tbl:
4360+ if not module.configure.__doc__:
4361+ module.configure.__doc__ = 'configures the project'
4362+
4363+ ban = ['set_options', 'init', 'shutdown']
4364+
4365+ optlst = [x for x in keys if not x in ban
4366+ and type(tbl[x]) is type(parse_args_impl)
4367+ and tbl[x].__doc__
4368+ and not x.startswith('_')]
4369+
4370+ just = max([len(x) for x in optlst])
4371+
4372+ for x in optlst:
4373+ cmds_str.append(' %s: %s' % (x.ljust(just), tbl[x].__doc__))
4374+ ret = '\n'.join(cmds_str)
4375+ else:
4376+ ret = ' '.join(cmds)
4377+ return '''waf [command] [options]
4378+
4379+Main commands (example: ./waf build -j4)
4380+%s
4381+''' % ret
4382+
4383+
4384+setattr(optparse.OptionParser, 'get_usage', get_usage)
4385+
4386+def create_parser(module=None):
4387+ Logs.debug('options: create_parser is called')
4388+ parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
4389+
4390+ parser.formatter.width = Utils.get_term_cols()
4391+ p = parser.add_option
4392+
4393+ p('-j', '--jobs',
4394+ type = 'int',
4395+ default = default_jobs,
4396+ help = 'amount of parallel jobs (%r)' % default_jobs,
4397+ dest = 'jobs')
4398+
4399+ p('-k', '--keep',
4400+ action = 'store_true',
4401+ default = False,
4402+ help = 'keep running happily on independent task groups',
4403+ dest = 'keep')
4404+
4405+ p('-v', '--verbose',
4406+ action = 'count',
4407+ default = 0,
4408+ help = 'verbosity level -v -vv or -vvv [default: 0]',
4409+ dest = 'verbose')
4410+
4411+ p('--nocache',
4412+ action = 'store_true',
4413+ default = False,
4414+ help = 'ignore the WAFCACHE (if set)',
4415+ dest = 'nocache')
4416+
4417+ p('--zones',
4418+ action = 'store',
4419+ default = '',
4420+ help = 'debugging zones (task_gen, deps, tasks, etc)',
4421+ dest = 'zones')
4422+
4423+ p('-p', '--progress',
4424+ action = 'count',
4425+ default = 0,
4426+ help = '-p: progress bar; -pp: ide output',
4427+ dest = 'progress_bar')
4428+
4429+ p('--targets',
4430+ action = 'store',
4431+ default = '',
4432+ help = 'build given task generators, e.g. "target1,target2"',
4433+ dest = 'compile_targets')
4434+
4435+ gr = optparse.OptionGroup(parser, 'configuration options')
4436+ parser.add_option_group(gr)
4437+ gr.add_option('-b', '--blddir',
4438+ action = 'store',
4439+ default = '',
4440+ help = 'out dir for the project (configuration)',
4441+ dest = 'blddir')
4442+ gr.add_option('-s', '--srcdir',
4443+ action = 'store',
4444+ default = '',
4445+ help = 'top dir for the project (configuration)',
4446+ dest = 'srcdir')
4447+ gr.add_option('--prefix',
4448+ help = 'installation prefix (configuration) [default: %r]' % default_prefix,
4449+ default = default_prefix,
4450+ dest = 'prefix')
4451+
4452+ gr.add_option('--download',
4453+ action = 'store_true',
4454+ default = False,
4455+ help = 'try to download the tools if missing',
4456+ dest = 'download')
4457+
4458+ gr = optparse.OptionGroup(parser, 'installation options')
4459+ parser.add_option_group(gr)
4460+ gr.add_option('--destdir',
4461+ help = 'installation root [default: %r]' % default_destdir,
4462+ default = default_destdir,
4463+ dest = 'destdir')
4464+ gr.add_option('-f', '--force',
4465+ action = 'store_true',
4466+ default = False,
4467+ help = 'force file installation',
4468+ dest = 'force')
4469+
4470+ return parser
4471+
4472+def parse_args_impl(parser, _args=None):
4473+ global options, commands, arg_line
4474+ (options, args) = parser.parse_args(args=_args)
4475+
4476+ arg_line = args
4477+ #arg_line = args[:] # copy
4478+
4479+ # By default, 'waf' is equivalent to 'waf build'
4480+ commands = {}
4481+ for var in cmds: commands[var] = 0
4482+ if not args:
4483+ commands['build'] = 1
4484+ args.append('build')
4485+
4486+ # Parse the command arguments
4487+ for arg in args:
4488+ commands[arg] = True
4489+
4490+ # the check thing depends on the build
4491+ if 'check' in args:
4492+ idx = args.index('check')
4493+ try:
4494+ bidx = args.index('build')
4495+ if bidx > idx:
4496+ raise ValueError('build before check')
4497+ except ValueError, e:
4498+ args.insert(idx, 'build')
4499+
4500+ if args[0] != 'init':
4501+ args.insert(0, 'init')
4502+
4503+ # TODO -k => -j0
4504+ if options.keep: options.jobs = 1
4505+ if options.jobs < 1: options.jobs = 1
4506+
4507+ if 'install' in sys.argv or 'uninstall' in sys.argv:
4508+ # absolute path only if set
4509+ options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
4510+
4511+ Logs.verbose = options.verbose
4512+ Logs.init_log()
4513+
4514+ if options.zones:
4515+ Logs.zones = options.zones.split(',')
4516+ if not Logs.verbose: Logs.verbose = 1
4517+ elif Logs.verbose > 0:
4518+ Logs.zones = ['runner']
4519+ if Logs.verbose > 2:
4520+ Logs.zones = ['*']
4521+
4522+# TODO waf 1.6
4523+# 1. rename the class to OptionsContext
4524+# 2. instead of a class attribute, use a module (static 'parser')
4525+# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
4526+
4527+class Handler(Utils.Context):
4528+ """loads wscript modules in folders for adding options
4529+ This class should be named 'OptionsContext'
4530+ A method named 'recurse' is bound when used by the module Scripting"""
4531+
4532+ parser = None
4533+ # make it possible to access the reference, like Build.bld
4534+
4535+ def __init__(self, module=None):
4536+ self.parser = create_parser(module)
4537+ self.cwd = os.getcwd()
4538+ Handler.parser = self
4539+
4540+ def add_option(self, *k, **kw):
4541+ self.parser.add_option(*k, **kw)
4542+
4543+ def add_option_group(self, *k, **kw):
4544+ return self.parser.add_option_group(*k, **kw)
4545+
4546+ def get_option_group(self, opt_str):
4547+ return self.parser.get_option_group(opt_str)
4548+
4549+ def sub_options(self, *k, **kw):
4550+ if not k: raise Utils.WscriptError('folder expected')
4551+ self.recurse(k[0], name='set_options')
4552+
4553+ def tool_options(self, *k, **kw):
4554+ Utils.python_24_guard()
4555+
4556+ if not k[0]:
4557+ raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
4558+ tools = Utils.to_list(k[0])
4559+
4560+ # TODO waf 1.6 remove the global variable tooldir
4561+ path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
4562+
4563+ for tool in tools:
4564+ tool = tool.replace('++', 'xx')
4565+ if tool == 'java': tool = 'javaw'
4566+ if tool.lower() == 'unittest': tool = 'unittestw'
4567+ module = Utils.load_tool(tool, path)
4568+ try:
4569+ fun = module.set_options
4570+ except AttributeError:
4571+ pass
4572+ else:
4573+ fun(kw.get('option_group', self))
4574+
4575+ def parse_args(self, args=None):
4576+ parse_args_impl(self.parser, args)
4577+
4578diff --git a/buildtools/wafadmin/Runner.py b/buildtools/wafadmin/Runner.py
4579new file mode 100644
4580index 0000000..94db0fb
4581--- /dev/null
4582+++ b/buildtools/wafadmin/Runner.py
4583@@ -0,0 +1,236 @@
4584+#!/usr/bin/env python
4585+# encoding: utf-8
4586+# Thomas Nagy, 2005-2008 (ita)
4587+
4588+"Execute the tasks"
4589+
4590+import os, sys, random, time, threading, traceback
4591+try: from Queue import Queue
4592+except ImportError: from queue import Queue
4593+import Build, Utils, Logs, Options
4594+from Logs import debug, error
4595+from Constants import *
4596+
4597+GAP = 15
4598+
4599+run_old = threading.Thread.run
4600+def run(*args, **kwargs):
4601+ try:
4602+ run_old(*args, **kwargs)
4603+ except (KeyboardInterrupt, SystemExit):
4604+ raise
4605+ except:
4606+ sys.excepthook(*sys.exc_info())
4607+threading.Thread.run = run
4608+
4609+def process_task(tsk):
4610+
4611+ m = tsk.master
4612+ if m.stop:
4613+ m.out.put(tsk)
4614+ return
4615+
4616+ try:
4617+ tsk.generator.bld.printout(tsk.display())
4618+ if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
4619+ # actual call to task's run() function
4620+ else: ret = tsk.call_run()
4621+ except Exception, e:
4622+ tsk.err_msg = Utils.ex_stack()
4623+ tsk.hasrun = EXCEPTION
4624+
4625+ # TODO cleanup
4626+ m.error_handler(tsk)
4627+ m.out.put(tsk)
4628+ return
4629+
4630+ if ret:
4631+ tsk.err_code = ret
4632+ tsk.hasrun = CRASHED
4633+ else:
4634+ try:
4635+ tsk.post_run()
4636+ except Utils.WafError:
4637+ pass
4638+ except Exception:
4639+ tsk.err_msg = Utils.ex_stack()
4640+ tsk.hasrun = EXCEPTION
4641+ else:
4642+ tsk.hasrun = SUCCESS
4643+ if tsk.hasrun != SUCCESS:
4644+ m.error_handler(tsk)
4645+
4646+ m.out.put(tsk)
4647+
4648+class TaskConsumer(threading.Thread):
4649+ ready = Queue(0)
4650+ consumers = []
4651+
4652+ def __init__(self):
4653+ threading.Thread.__init__(self)
4654+ self.setDaemon(1)
4655+ self.start()
4656+
4657+ def run(self):
4658+ try:
4659+ self.loop()
4660+ except:
4661+ pass
4662+
4663+ def loop(self):
4664+ while 1:
4665+ tsk = TaskConsumer.ready.get()
4666+ process_task(tsk)
4667+
4668+class Parallel(object):
4669+ """
4670+ keep the consumer threads busy, and avoid consuming cpu cycles
4671+ when no more tasks can be added (end of the build, etc)
4672+ """
4673+ def __init__(self, bld, j=2):
4674+
4675+ # number of consumers
4676+ self.numjobs = j
4677+
4678+ self.manager = bld.task_manager
4679+ self.manager.current_group = 0
4680+
4681+ self.total = self.manager.total()
4682+
4683+ # tasks waiting to be processed - IMPORTANT
4684+ self.outstanding = []
4685+ self.maxjobs = MAXJOBS
4686+
4687+ # tasks that are awaiting for another task to complete
4688+ self.frozen = []
4689+
4690+ # tasks returned by the consumers
4691+ self.out = Queue(0)
4692+
4693+ self.count = 0 # tasks not in the producer area
4694+
4695+ self.processed = 1 # progress indicator
4696+
4697+ self.stop = False # error condition to stop the build
4698+ self.error = False # error flag
4699+
4700+ def get_next(self):
4701+ "override this method to schedule the tasks in a particular order"
4702+ if not self.outstanding:
4703+ return None
4704+ return self.outstanding.pop(0)
4705+
4706+ def postpone(self, tsk):
4707+ "override this method to schedule the tasks in a particular order"
4708+ # TODO consider using a deque instead
4709+ if random.randint(0, 1):
4710+ self.frozen.insert(0, tsk)
4711+ else:
4712+ self.frozen.append(tsk)
4713+
4714+ def refill_task_list(self):
4715+ "called to set the next group of tasks"
4716+
4717+ while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
4718+ self.get_out()
4719+
4720+ while not self.outstanding:
4721+ if self.count:
4722+ self.get_out()
4723+
4724+ if self.frozen:
4725+ self.outstanding += self.frozen
4726+ self.frozen = []
4727+ elif not self.count:
4728+ (jobs, tmp) = self.manager.get_next_set()
4729+ if jobs != None: self.maxjobs = jobs
4730+ if tmp: self.outstanding += tmp
4731+ break
4732+
4733+ def get_out(self):
4734+ "the tasks that are put to execute are all collected using get_out"
4735+ ret = self.out.get()
4736+ self.manager.add_finished(ret)
4737+ if not self.stop and getattr(ret, 'more_tasks', None):
4738+ self.outstanding += ret.more_tasks
4739+ self.total += len(ret.more_tasks)
4740+ self.count -= 1
4741+
4742+ def error_handler(self, tsk):
4743+ "by default, errors make the build stop (not thread safe so be careful)"
4744+ if not Options.options.keep:
4745+ self.stop = True
4746+ self.error = True
4747+
4748+ def start(self):
4749+ "execute the tasks"
4750+
4751+ if TaskConsumer.consumers:
4752+ # the worker pool is usually loaded lazily (see below)
4753+ # in case it is re-used with a different value of numjobs:
4754+ while len(TaskConsumer.consumers) < self.numjobs:
4755+ TaskConsumer.consumers.append(TaskConsumer())
4756+
4757+ while not self.stop:
4758+
4759+ self.refill_task_list()
4760+
4761+ # consider the next task
4762+ tsk = self.get_next()
4763+ if not tsk:
4764+ if self.count:
4765+ # tasks may add new ones after they are run
4766+ continue
4767+ else:
4768+ # no tasks to run, no tasks running, time to exit
4769+ break
4770+
4771+ if tsk.hasrun:
4772+ # if the task is marked as "run", just skip it
4773+ self.processed += 1
4774+ self.manager.add_finished(tsk)
4775+ continue
4776+
4777+ try:
4778+ st = tsk.runnable_status()
4779+ except Exception, e:
4780+ self.processed += 1
4781+ if self.stop and not Options.options.keep:
4782+ tsk.hasrun = SKIPPED
4783+ self.manager.add_finished(tsk)
4784+ continue
4785+ self.error_handler(tsk)
4786+ self.manager.add_finished(tsk)
4787+ tsk.hasrun = EXCEPTION
4788+ tsk.err_msg = Utils.ex_stack()
4789+ continue
4790+
4791+ if st == ASK_LATER:
4792+ self.postpone(tsk)
4793+ elif st == SKIP_ME:
4794+ self.processed += 1
4795+ tsk.hasrun = SKIPPED
4796+ self.manager.add_finished(tsk)
4797+ else:
4798+ # run me: put the task in ready queue
4799+ tsk.position = (self.processed, self.total)
4800+ self.count += 1
4801+ tsk.master = self
4802+ self.processed += 1
4803+
4804+ if self.numjobs == 1:
4805+ process_task(tsk)
4806+ else:
4807+ TaskConsumer.ready.put(tsk)
4808+ # create the consumer threads only if there is something to consume
4809+ if not TaskConsumer.consumers:
4810+ TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
4811+
4812+ # self.count represents the tasks that have been made available to the consumer threads
4813+ # collect all the tasks after an error else the message may be incomplete
4814+ while self.error and self.count:
4815+ self.get_out()
4816+
4817+ #print loop
4818+ assert (self.count == 0 or self.stop)
4819+
4820diff --git a/buildtools/wafadmin/Scripting.py b/buildtools/wafadmin/Scripting.py
4821new file mode 100644
4822index 0000000..d975bd9
4823--- /dev/null
4824+++ b/buildtools/wafadmin/Scripting.py
4825@@ -0,0 +1,586 @@
4826+#!/usr/bin/env python
4827+# encoding: utf-8
4828+# Thomas Nagy, 2005 (ita)
4829+
4830+"Module called for configuring, compiling and installing targets"
4831+
4832+import os, sys, shutil, traceback, datetime, inspect, errno
4833+
4834+import Utils, Configure, Build, Logs, Options, Environment, Task
4835+from Logs import error, warn, info
4836+from Constants import *
4837+
4838+g_gz = 'bz2'
4839+commands = []
4840+
4841+def prepare_impl(t, cwd, ver, wafdir):
4842+ Options.tooldir = [t]
4843+ Options.launch_dir = cwd
4844+
4845+ # some command-line options can be processed immediately
4846+ if '--version' in sys.argv:
4847+ opt_obj = Options.Handler()
4848+ opt_obj.curdir = cwd
4849+ opt_obj.parse_args()
4850+ sys.exit(0)
4851+
4852+ # now find the wscript file
4853+ msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
4854+
4855+ # in theory projects can be configured in an autotool-like manner:
4856+ # mkdir build && cd build && ../waf configure && ../waf
4857+ build_dir_override = None
4858+ candidate = None
4859+
4860+ lst = os.listdir(cwd)
4861+
4862+ search_for_candidate = True
4863+ if WSCRIPT_FILE in lst:
4864+ candidate = cwd
4865+
4866+ elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
4867+ # autotool-like configuration
4868+ calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
4869+ if WSCRIPT_FILE in os.listdir(calldir):
4870+ candidate = calldir
4871+ search_for_candidate = False
4872+ else:
4873+ error('arg[0] directory does not contain a wscript file')
4874+ sys.exit(1)
4875+ build_dir_override = cwd
4876+
4877+ # climb up to find a script if it is not found
4878+ while search_for_candidate:
4879+ if len(cwd) <= 3:
4880+ break # stop at / or c:
4881+ dirlst = os.listdir(cwd)
4882+ if WSCRIPT_FILE in dirlst:
4883+ candidate = cwd
4884+ if 'configure' in sys.argv and candidate:
4885+ break
4886+ if Options.lockfile in dirlst:
4887+ env = Environment.Environment()
4888+ try:
4889+ env.load(os.path.join(cwd, Options.lockfile))
4890+ except:
4891+ error('could not load %r' % Options.lockfile)
4892+ try:
4893+ os.stat(env['cwd'])
4894+ except:
4895+ candidate = cwd
4896+ else:
4897+ candidate = env['cwd']
4898+ break
4899+ cwd = os.path.dirname(cwd) # climb up
4900+
4901+ if not candidate:
4902+ # check if the user only wanted to display the help
4903+ if '-h' in sys.argv or '--help' in sys.argv:
4904+ warn('No wscript file found: the help message may be incomplete')
4905+ opt_obj = Options.Handler()
4906+ opt_obj.curdir = cwd
4907+ opt_obj.parse_args()
4908+ else:
4909+ error(msg1)
4910+ sys.exit(0)
4911+
4912+ # We have found wscript, but there is no guarantee that it is valid
4913+ try:
4914+ os.chdir(candidate)
4915+ except OSError:
4916+ raise Utils.WafError("the folder %r is unreadable" % candidate)
4917+
4918+ # define the main module containing the functions init, shutdown, ..
4919+ Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
4920+
4921+ if build_dir_override:
4922+ d = getattr(Utils.g_module, BLDDIR, None)
4923+ if d:
4924+ # test if user has set the blddir in wscript.
4925+ msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
4926+ warn(msg)
4927+ Utils.g_module.blddir = build_dir_override
4928+
4929+ # bind a few methods and classes by default
4930+
4931+ def set_def(obj, name=''):
4932+ n = name or obj.__name__
4933+ if not n in Utils.g_module.__dict__:
4934+ setattr(Utils.g_module, n, obj)
4935+
4936+ for k in [dist, distclean, distcheck, clean, install, uninstall]:
4937+ set_def(k)
4938+
4939+ set_def(Configure.ConfigurationContext, 'configure_context')
4940+
4941+ for k in ['build', 'clean', 'install', 'uninstall']:
4942+ set_def(Build.BuildContext, k + '_context')
4943+
4944+ # now parse the options from the user wscript file
4945+ opt_obj = Options.Handler(Utils.g_module)
4946+ opt_obj.curdir = candidate
4947+ try:
4948+ f = Utils.g_module.set_options
4949+ except AttributeError:
4950+ pass
4951+ else:
4952+ opt_obj.sub_options([''])
4953+ opt_obj.parse_args()
4954+
4955+ if not 'init' in Utils.g_module.__dict__:
4956+ Utils.g_module.init = Utils.nada
4957+ if not 'shutdown' in Utils.g_module.__dict__:
4958+ Utils.g_module.shutdown = Utils.nada
4959+
4960+ main()
4961+
4962+def prepare(t, cwd, ver, wafdir):
4963+ if WAFVERSION != ver:
4964+ msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
4965+ print('\033[91mError: %s\033[0m' % msg)
4966+ sys.exit(1)
4967+
4968+ #"""
4969+ try:
4970+ prepare_impl(t, cwd, ver, wafdir)
4971+ except Utils.WafError, e:
4972+ error(str(e))
4973+ sys.exit(1)
4974+ except KeyboardInterrupt:
4975+ Utils.pprint('RED', 'Interrupted')
4976+ sys.exit(68)
4977+ """
4978+ import cProfile, pstats
4979+ cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
4980+ {'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
4981+ 'profi.txt')
4982+ p = pstats.Stats('profi.txt')
4983+ p.sort_stats('time').print_stats(45)
4984+ #"""
4985+
4986+def main():
4987+ global commands
4988+ commands = Options.arg_line[:]
4989+
4990+ while commands:
4991+ x = commands.pop(0)
4992+
4993+ ini = datetime.datetime.now()
4994+ if x == 'configure':
4995+ fun = configure
4996+ elif x == 'build':
4997+ fun = build
4998+ else:
4999+ fun = getattr(Utils.g_module, x, None)
5000+
5001+ if not fun:
5002+ raise Utils.WscriptError('No such command %r' % x)
5003+
5004+ ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
5005+
5006+ if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
5007+ # compatibility TODO remove in waf 1.6
5008+ try:
5009+ fun(ctx)
5010+ except TypeError:
5011+ fun()
5012+ else:
5013+ fun(ctx)
5014+
5015+ ela = ''
5016+ if not Options.options.progress_bar:
5017+ ela = ' (%s)' % Utils.get_elapsed_time(ini)
5018+
5019+ if x != 'init' and x != 'shutdown':
5020+ info('%r finished successfully%s' % (x, ela))
5021+
5022+ if not commands and x != 'shutdown':
5023+ commands.append('shutdown')
5024+
5025+def configure(conf):
5026+
5027+ src = getattr(Options.options, SRCDIR, None)
5028+ if not src: src = getattr(Utils.g_module, SRCDIR, None)
5029+ if not src: src = getattr(Utils.g_module, 'top', None)
5030+ if not src:
5031+ src = '.'
5032+ incomplete_src = 1
5033+ src = os.path.abspath(src)
5034+
5035+ bld = getattr(Options.options, BLDDIR, None)
5036+ if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
5037+ if not bld: bld = getattr(Utils.g_module, 'out', None)
5038+ if not bld:
5039+ bld = 'build'
5040+ incomplete_bld = 1
5041+ if bld == '.':
5042+ raise Utils.WafError('Setting blddir="." may cause distclean problems')
5043+ bld = os.path.abspath(bld)
5044+
5045+ try: os.makedirs(bld)
5046+ except OSError: pass
5047+
5048+ # It is not possible to compile specific targets in the configuration
5049+ # this may cause configuration errors if autoconfig is set
5050+ targets = Options.options.compile_targets
5051+ Options.options.compile_targets = None
5052+ Options.is_install = False
5053+
5054+ conf.srcdir = src
5055+ conf.blddir = bld
5056+ conf.post_init()
5057+
5058+ if 'incomplete_src' in vars():
5059+ conf.check_message_1('Setting srcdir to')
5060+ conf.check_message_2(src)
5061+ if 'incomplete_bld' in vars():
5062+ conf.check_message_1('Setting blddir to')
5063+ conf.check_message_2(bld)
5064+
5065+ # calling to main wscript's configure()
5066+ conf.sub_config([''])
5067+
5068+ conf.store()
5069+
5070+ # this will write a configure lock so that subsequent builds will
5071+ # consider the current path as the root directory (see prepare_impl).
5072+ # to remove: use 'waf distclean'
5073+ env = Environment.Environment()
5074+ env[BLDDIR] = bld
5075+ env[SRCDIR] = src
5076+ env['argv'] = sys.argv
5077+ env['commands'] = Options.commands
5078+ env['options'] = Options.options.__dict__
5079+
5080+ # conf.hash & conf.files hold wscript files paths and hash
5081+ # (used only by Configure.autoconfig)
5082+ env['hash'] = conf.hash
5083+ env['files'] = conf.files
5084+ env['environ'] = dict(conf.environ)
5085+ env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
5086+
5087+ if Utils.g_module.root_path != src:
5088+ # in case the source dir is somewhere else
5089+ env.store(os.path.join(src, Options.lockfile))
5090+
5091+ env.store(Options.lockfile)
5092+
5093+ Options.options.compile_targets = targets
5094+
5095+def clean(bld):
5096+ '''removes the build files'''
5097+ try:
5098+ proj = Environment.Environment(Options.lockfile)
5099+ except IOError:
5100+ raise Utils.WafError('Nothing to clean (project not configured)')
5101+
5102+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5103+ bld.load_envs()
5104+
5105+ bld.is_install = 0 # False
5106+
5107+ # read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
5108+ bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
5109+
5110+ try:
5111+ bld.clean()
5112+ finally:
5113+ bld.save()
5114+
5115+def check_configured(bld):
5116+ if not Configure.autoconfig:
5117+ return bld
5118+
5119+ conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
5120+ bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
5121+
5122+ def reconf(proj):
5123+ back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
5124+
5125+ Options.commands = proj['commands']
5126+ Options.options.__dict__ = proj['options']
5127+ conf = conf_cls()
5128+ conf.environ = proj['environ']
5129+ configure(conf)
5130+
5131+ (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
5132+
5133+ try:
5134+ proj = Environment.Environment(Options.lockfile)
5135+ except IOError:
5136+ conf = conf_cls()
5137+ configure(conf)
5138+ else:
5139+ try:
5140+ bld = bld_cls()
5141+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5142+ bld.load_envs()
5143+ except Utils.WafError:
5144+ reconf(proj)
5145+ return bld_cls()
5146+
5147+ try:
5148+ proj = Environment.Environment(Options.lockfile)
5149+ except IOError:
5150+ raise Utils.WafError('Auto-config: project does not configure (bug)')
5151+
5152+ h = 0
5153+ try:
5154+ for file in proj['files']:
5155+ if file.endswith('configure'):
5156+ h = hash((h, Utils.readf(file)))
5157+ else:
5158+ mod = Utils.load_module(file)
5159+ h = hash((h, mod.waf_hash_val))
5160+ except (OSError, IOError):
5161+ warn('Reconfiguring the project: a file is unavailable')
5162+ reconf(proj)
5163+ else:
5164+ if (h != proj['hash']):
5165+ warn('Reconfiguring the project: the configuration has changed')
5166+ reconf(proj)
5167+
5168+ return bld_cls()
5169+
5170+def install(bld):
5171+ '''installs the build files'''
5172+ bld = check_configured(bld)
5173+
5174+ Options.commands['install'] = True
5175+ Options.commands['uninstall'] = False
5176+ Options.is_install = True
5177+
5178+ bld.is_install = INSTALL
5179+
5180+ build_impl(bld)
5181+ bld.install()
5182+
5183+def uninstall(bld):
5184+ '''removes the installed files'''
5185+ Options.commands['install'] = False
5186+ Options.commands['uninstall'] = True
5187+ Options.is_install = True
5188+
5189+ bld.is_install = UNINSTALL
5190+
5191+ try:
5192+ def runnable_status(self):
5193+ return SKIP_ME
5194+ setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
5195+ setattr(Task.Task, 'runnable_status', runnable_status)
5196+
5197+ build_impl(bld)
5198+ bld.install()
5199+ finally:
5200+ setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
5201+
5202+def build(bld):
5203+ bld = check_configured(bld)
5204+
5205+ Options.commands['install'] = False
5206+ Options.commands['uninstall'] = False
5207+ Options.is_install = False
5208+
5209+ bld.is_install = 0 # False
5210+
5211+ return build_impl(bld)
5212+
5213+def build_impl(bld):
5214+ # compile the project and/or install the files
5215+ try:
5216+ proj = Environment.Environment(Options.lockfile)
5217+ except IOError:
5218+ raise Utils.WafError("Project not configured (run 'waf configure' first)")
5219+
5220+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5221+ bld.load_envs()
5222+
5223+ info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
5224+ bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
5225+
5226+ # execute something immediately before the build starts
5227+ bld.pre_build()
5228+
5229+ try:
5230+ bld.compile()
5231+ finally:
5232+ if Options.options.progress_bar: print('')
5233+ info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
5234+
5235+ # execute something immediately after a successful build
5236+ bld.post_build()
5237+
5238+ bld.install()
5239+
5240+excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
5241+dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
5242+def dont_dist(name, src, build_dir):
5243+ global excludes, dist_exts
5244+
5245+ if (name.startswith(',,')
5246+ or name.startswith('++')
5247+ or name.startswith('.waf')
5248+ or (src == '.' and name == Options.lockfile)
5249+ or name in excludes
5250+ or name == build_dir
5251+ ):
5252+ return True
5253+
5254+ for ext in dist_exts:
5255+ if name.endswith(ext):
5256+ return True
5257+
5258+ return False
5259+
5260+# like shutil.copytree
5261+# exclude files and to raise exceptions immediately
5262+def copytree(src, dst, build_dir):
5263+ names = os.listdir(src)
5264+ os.makedirs(dst)
5265+ for name in names:
5266+ srcname = os.path.join(src, name)
5267+ dstname = os.path.join(dst, name)
5268+
5269+ if dont_dist(name, src, build_dir):
5270+ continue
5271+
5272+ if os.path.isdir(srcname):
5273+ copytree(srcname, dstname, build_dir)
5274+ else:
5275+ shutil.copy2(srcname, dstname)
5276+
5277+# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
5278+def distclean(ctx=None):
5279+ '''removes the build directory'''
5280+ global commands
5281+ lst = os.listdir('.')
5282+ for f in lst:
5283+ if f == Options.lockfile:
5284+ try:
5285+ proj = Environment.Environment(f)
5286+ except:
5287+ Logs.warn('could not read %r' % f)
5288+ continue
5289+
5290+ try:
5291+ shutil.rmtree(proj[BLDDIR])
5292+ except IOError:
5293+ pass
5294+ except OSError, e:
5295+ if e.errno != errno.ENOENT:
5296+ Logs.warn('project %r cannot be removed' % proj[BLDDIR])
5297+
5298+ try:
5299+ os.remove(f)
5300+ except OSError, e:
5301+ if e.errno != errno.ENOENT:
5302+ Logs.warn('file %r cannot be removed' % f)
5303+
5304+ # remove the local waf cache
5305+ if not commands and f.startswith('.waf'):
5306+ shutil.rmtree(f, ignore_errors=True)
5307+
5308+# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
5309+def dist(appname='', version=''):
5310+ '''makes a tarball for redistributing the sources'''
5311+ # return return (distdirname, tarballname)
5312+ import tarfile
5313+
5314+ if not appname: appname = Utils.g_module.APPNAME
5315+ if not version: version = Utils.g_module.VERSION
5316+
5317+ tmp_folder = appname + '-' + version
5318+ if g_gz in ['gz', 'bz2']:
5319+ arch_name = tmp_folder + '.tar.' + g_gz
5320+ else:
5321+ arch_name = tmp_folder + '.' + 'zip'
5322+
5323+ # remove the previous dir
5324+ try:
5325+ shutil.rmtree(tmp_folder)
5326+ except (OSError, IOError):
5327+ pass
5328+
5329+ # remove the previous archive
5330+ try:
5331+ os.remove(arch_name)
5332+ except (OSError, IOError):
5333+ pass
5334+
5335+ # copy the files into the temporary folder
5336+ blddir = getattr(Utils.g_module, BLDDIR, None)
5337+ if not blddir:
5338+ blddir = getattr(Utils.g_module, 'out', None)
5339+ copytree('.', tmp_folder, blddir)
5340+
5341+ # undocumented hook for additional cleanup
5342+ dist_hook = getattr(Utils.g_module, 'dist_hook', None)
5343+ if dist_hook:
5344+ back = os.getcwd()
5345+ os.chdir(tmp_folder)
5346+ try:
5347+ dist_hook()
5348+ finally:
5349+ # go back to the root directory
5350+ os.chdir(back)
5351+
5352+ if g_gz in ['gz', 'bz2']:
5353+ tar = tarfile.open(arch_name, 'w:' + g_gz)
5354+ tar.add(tmp_folder)
5355+ tar.close()
5356+ else:
5357+ Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
5358+
5359+ try: from hashlib import sha1 as sha
5360+ except ImportError: from sha import sha
5361+ try:
5362+ digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
5363+ except:
5364+ digest = ''
5365+
5366+ info('New archive created: %s%s' % (arch_name, digest))
5367+
5368+ if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
5369+ return arch_name
5370+
5371+# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
5372+def distcheck(appname='', version='', subdir=''):
5373+ '''checks if the sources compile (tarball from 'dist')'''
5374+ import tempfile, tarfile
5375+
5376+ if not appname: appname = Utils.g_module.APPNAME
5377+ if not version: version = Utils.g_module.VERSION
5378+
5379+ waf = os.path.abspath(sys.argv[0])
5380+ tarball = dist(appname, version)
5381+
5382+ path = appname + '-' + version
5383+
5384+ # remove any previous instance
5385+ if os.path.exists(path):
5386+ shutil.rmtree(path)
5387+
5388+ t = tarfile.open(tarball)
5389+ for x in t: t.extract(x)
5390+ t.close()
5391+
5392+ # build_path is the directory for the waf invocation
5393+ if subdir:
5394+ build_path = os.path.join(path, subdir)
5395+ else:
5396+ build_path = path
5397+
5398+ instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
5399+ ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
5400+ if ret:
5401+ raise Utils.WafError('distcheck failed with code %i' % ret)
5402+
5403+ if os.path.exists(instdir):
5404+ raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
5405+
5406+ shutil.rmtree(path)
5407+
5408+# FIXME remove in Waf 1.6 (kept for compatibility)
5409+def add_subdir(dir, bld):
5410+ bld.recurse(dir, 'build')
5411+
5412diff --git a/buildtools/wafadmin/Task.py b/buildtools/wafadmin/Task.py
5413new file mode 100644
5414index 0000000..5cda2ec
5415--- /dev/null
5416+++ b/buildtools/wafadmin/Task.py
5417@@ -0,0 +1,1200 @@
5418+#!/usr/bin/env python
5419+# encoding: utf-8
5420+# Thomas Nagy, 2005-2008 (ita)
5421+
5422+"""
5423+Running tasks in parallel is a simple problem, but in practice it is more complicated:
5424+* dependencies discovered during the build (dynamic task creation)
5425+* dependencies discovered after files are compiled
5426+* the amount of tasks and dependencies (graph size) can be huge
5427+
5428+This is why the dependency management is split on three different levels:
5429+1. groups of tasks that run all after another group of tasks
5430+2. groups of tasks that can be run in parallel
5431+3. tasks that can run in parallel, but with possible unknown ad-hoc dependencies
5432+
5433+The point #1 represents a strict sequential order between groups of tasks, for example a compiler is produced
5434+and used to compile the rest, whereas #2 and #3 represent partial order constraints where #2 applies to the kind of task
5435+and #3 applies to the task instances.
5436+
5437+#1 is held by the task manager: ordered list of TaskGroups (see bld.add_group)
5438+#2 is held by the task groups and the task types: precedence after/before (topological sort),
5439+ and the constraints extracted from file extensions
5440+#3 is held by the tasks individually (attribute run_after),
5441+ and the scheduler (Runner.py) use Task::runnable_status to reorder the tasks
5442+
5443+--
5444+
5445+To try, use something like this in your code:
5446+import Constants, Task
5447+Task.algotype = Constants.MAXPARALLEL
5448+
5449+--
5450+
5451+There are two concepts with the tasks (individual units of change):
5452+* dependency (if 1 is recompiled, recompile 2)
5453+* order (run 2 after 1)
5454+
5455+example 1: if t1 depends on t2 and t2 depends on t3 it is not necessary to make t1 depend on t3 (dependency is transitive)
5456+example 2: if t1 depends on a node produced by t2, it is not immediately obvious that t1 must run after t2 (order is not obvious)
5457+
5458+The role of the Task Manager is to give the tasks in order (groups of task that may be run in parallel one after the other)
5459+
5460+"""
5461+
5462+import os, shutil, sys, re, random, datetime, tempfile, shlex
5463+from Utils import md5
5464+import Build, Runner, Utils, Node, Logs, Options
5465+from Logs import debug, warn, error
5466+from Constants import *
5467+
5468+algotype = NORMAL
5469+#algotype = JOBCONTROL
5470+#algotype = MAXPARALLEL
5471+
5472+COMPILE_TEMPLATE_SHELL = '''
5473+def f(task):
5474+ env = task.env
5475+ wd = getattr(task, 'cwd', None)
5476+ p = env.get_flat
5477+ cmd = \'\'\' %s \'\'\' % s
5478+ return task.exec_command(cmd, cwd=wd)
5479+'''
5480+
5481+COMPILE_TEMPLATE_NOSHELL = '''
5482+def f(task):
5483+ env = task.env
5484+ wd = getattr(task, 'cwd', None)
5485+ def to_list(xx):
5486+ if isinstance(xx, str): return [xx]
5487+ return xx
5488+ lst = []
5489+ %s
5490+ lst = [x for x in lst if x]
5491+ return task.exec_command(lst, cwd=wd)
5492+'''
5493+
5494+
5495+"""
5496+Enable different kind of dependency algorithms:
5497+1 make groups: first compile all cpps and then compile all links (NORMAL)
5498+2 parallelize all (each link task run after its dependencies) (MAXPARALLEL)
5499+3 like 1 but provide additional constraints for the parallelization (MAXJOBS)
5500+
5501+In theory 1. will be faster than 2 for waf, but might be slower for builds
5502+The scheme 2 will not allow for running tasks one by one so it can cause disk thrashing on huge builds
5503+"""
5504+
5505+file_deps = Utils.nada
5506+"""
5507+Additional dependency pre-check may be added by replacing the function file_deps.
5508+e.g. extract_outputs, extract_deps below.
5509+"""
5510+
5511+class TaskManager(object):
5512+ """The manager is attached to the build object, it holds a list of TaskGroup"""
5513+ def __init__(self):
5514+ self.groups = []
5515+ self.tasks_done = []
5516+ self.current_group = 0
5517+ self.groups_names = {}
5518+
5519+ def group_name(self, g):
5520+ """name for the group g (utility)"""
5521+ if not isinstance(g, TaskGroup):
5522+ g = self.groups[g]
5523+ for x in self.groups_names:
5524+ if id(self.groups_names[x]) == id(g):
5525+ return x
5526+ return ''
5527+
5528+ def group_idx(self, tg):
5529+ """group the task generator tg is in"""
5530+ se = id(tg)
5531+ for i in range(len(self.groups)):
5532+ g = self.groups[i]
5533+ for t in g.tasks_gen:
5534+ if id(t) == se:
5535+ return i
5536+ return None
5537+
5538+ def get_next_set(self):
5539+ """return the next set of tasks to execute
5540+ the first parameter is the maximum amount of parallelization that may occur"""
5541+ ret = None
5542+ while not ret and self.current_group < len(self.groups):
5543+ ret = self.groups[self.current_group].get_next_set()
5544+ if ret: return ret
5545+ else:
5546+ self.groups[self.current_group].process_install()
5547+ self.current_group += 1
5548+ return (None, None)
5549+
5550+ def add_group(self, name=None, set=True):
5551+ #if self.groups and not self.groups[0].tasks:
5552+ # error('add_group: an empty group is already present')
5553+ g = TaskGroup()
5554+
5555+ if name and name in self.groups_names:
5556+ error('add_group: name %s already present' % name)
5557+ self.groups_names[name] = g
5558+ self.groups.append(g)
5559+ if set:
5560+ self.current_group = len(self.groups) - 1
5561+
5562+ def set_group(self, idx):
5563+ if isinstance(idx, str):
5564+ g = self.groups_names[idx]
5565+ for x in xrange(len(self.groups)):
5566+ if id(g) == id(self.groups[x]):
5567+ self.current_group = x
5568+ else:
5569+ self.current_group = idx
5570+
5571+ def add_task_gen(self, tgen):
5572+ if not self.groups: self.add_group()
5573+ self.groups[self.current_group].tasks_gen.append(tgen)
5574+
5575+ def add_task(self, task):
5576+ if not self.groups: self.add_group()
5577+ self.groups[self.current_group].tasks.append(task)
5578+
5579+ def total(self):
5580+ total = 0
5581+ if not self.groups: return 0
5582+ for group in self.groups:
5583+ total += len(group.tasks)
5584+ return total
5585+
5586+ def add_finished(self, tsk):
5587+ self.tasks_done.append(tsk)
5588+ bld = tsk.generator.bld
5589+ if bld.is_install:
5590+ f = None
5591+ if 'install' in tsk.__dict__:
5592+ f = tsk.__dict__['install']
5593+ # install=0 to prevent installation
5594+ if f: f(tsk)
5595+ else:
5596+ tsk.install()
5597+
5598+class TaskGroup(object):
5599+ "the compilation of one group does not begin until the previous group has finished (in the manager)"
5600+ def __init__(self):
5601+ self.tasks = [] # this list will be consumed
5602+ self.tasks_gen = []
5603+
5604+ self.cstr_groups = Utils.DefaultDict(list) # tasks having equivalent constraints
5605+ self.cstr_order = Utils.DefaultDict(set) # partial order between the cstr groups
5606+ self.temp_tasks = [] # tasks put on hold
5607+ self.ready = 0
5608+ self.post_funs = []
5609+
5610+ def reset(self):
5611+ "clears the state of the object (put back the tasks into self.tasks)"
5612+ for x in self.cstr_groups:
5613+ self.tasks += self.cstr_groups[x]
5614+ self.tasks = self.temp_tasks + self.tasks
5615+ self.temp_tasks = []
5616+ self.cstr_groups = Utils.DefaultDict(list)
5617+ self.cstr_order = Utils.DefaultDict(set)
5618+ self.ready = 0
5619+
5620+ def process_install(self):
5621+ for (f, k, kw) in self.post_funs:
5622+ f(*k, **kw)
5623+
5624+ def prepare(self):
5625+ "prepare the scheduling"
5626+ self.ready = 1
5627+ file_deps(self.tasks)
5628+ self.make_cstr_groups()
5629+ self.extract_constraints()
5630+
5631+ def get_next_set(self):
5632+ "next list of tasks to execute using max job settings, returns (maxjobs, task_list)"
5633+ global algotype
5634+ if algotype == NORMAL:
5635+ tasks = self.tasks_in_parallel()
5636+ maxj = MAXJOBS
5637+ elif algotype == JOBCONTROL:
5638+ (maxj, tasks) = self.tasks_by_max_jobs()
5639+ elif algotype == MAXPARALLEL:
5640+ tasks = self.tasks_with_inner_constraints()
5641+ maxj = MAXJOBS
5642+ else:
5643+ raise Utils.WafError("unknown algorithm type %s" % (algotype))
5644+
5645+ if not tasks: return ()
5646+ return (maxj, tasks)
5647+
5648+ def make_cstr_groups(self):
5649+ "unite the tasks that have similar constraints"
5650+ self.cstr_groups = Utils.DefaultDict(list)
5651+ for x in self.tasks:
5652+ h = x.hash_constraints()
5653+ self.cstr_groups[h].append(x)
5654+
5655+ def set_order(self, a, b):
5656+ self.cstr_order[a].add(b)
5657+
5658+ def compare_exts(self, t1, t2):
5659+ "extension production"
5660+ x = "ext_in"
5661+ y = "ext_out"
5662+ in_ = t1.attr(x, ())
5663+ out_ = t2.attr(y, ())
5664+ for k in in_:
5665+ if k in out_:
5666+ return -1
5667+ in_ = t2.attr(x, ())
5668+ out_ = t1.attr(y, ())
5669+ for k in in_:
5670+ if k in out_:
5671+ return 1
5672+ return 0
5673+
5674+ def compare_partial(self, t1, t2):
5675+ "partial relations after/before"
5676+ m = "after"
5677+ n = "before"
5678+ name = t2.__class__.__name__
5679+ if name in Utils.to_list(t1.attr(m, ())): return -1
5680+ elif name in Utils.to_list(t1.attr(n, ())): return 1
5681+ name = t1.__class__.__name__
5682+ if name in Utils.to_list(t2.attr(m, ())): return 1
5683+ elif name in Utils.to_list(t2.attr(n, ())): return -1
5684+ return 0
5685+
5686+ def extract_constraints(self):
5687+ "extract the parallelization constraints from the tasks with different constraints"
5688+ keys = self.cstr_groups.keys()
5689+ max = len(keys)
5690+ # hopefully the length of this list is short
5691+ for i in xrange(max):
5692+ t1 = self.cstr_groups[keys[i]][0]
5693+ for j in xrange(i + 1, max):
5694+ t2 = self.cstr_groups[keys[j]][0]
5695+
5696+ # add the constraints based on the comparisons
5697+ val = (self.compare_exts(t1, t2)
5698+ or self.compare_partial(t1, t2)
5699+ )
5700+ if val > 0:
5701+ self.set_order(keys[i], keys[j])
5702+ elif val < 0:
5703+ self.set_order(keys[j], keys[i])
5704+
5705+ def tasks_in_parallel(self):
5706+ "(NORMAL) next list of tasks that may be executed in parallel"
5707+
5708+ if not self.ready: self.prepare()
5709+
5710+ keys = self.cstr_groups.keys()
5711+
5712+ unconnected = []
5713+ remainder = []
5714+
5715+ for u in keys:
5716+ for k in self.cstr_order.values():
5717+ if u in k:
5718+ remainder.append(u)
5719+ break
5720+ else:
5721+ unconnected.append(u)
5722+
5723+ toreturn = []
5724+ for y in unconnected:
5725+ toreturn.extend(self.cstr_groups[y])
5726+
5727+ # remove stuff only after
5728+ for y in unconnected:
5729+ try: self.cstr_order.__delitem__(y)
5730+ except KeyError: pass
5731+ self.cstr_groups.__delitem__(y)
5732+
5733+ if not toreturn and remainder:
5734+ raise Utils.WafError("circular order constraint detected %r" % remainder)
5735+
5736+ return toreturn
5737+
5738+ def tasks_by_max_jobs(self):
5739+ "(JOBCONTROL) returns the tasks that can run in parallel with the max amount of jobs"
5740+ if not self.ready: self.prepare()
5741+ if not self.temp_tasks: self.temp_tasks = self.tasks_in_parallel()
5742+ if not self.temp_tasks: return (None, None)
5743+
5744+ maxjobs = MAXJOBS
5745+ ret = []
5746+ remaining = []
5747+ for t in self.temp_tasks:
5748+ m = getattr(t, "maxjobs", getattr(self.__class__, "maxjobs", MAXJOBS))
5749+ if m > maxjobs:
5750+ remaining.append(t)
5751+ elif m < maxjobs:
5752+ remaining += ret
5753+ ret = [t]
5754+ maxjobs = m
5755+ else:
5756+ ret.append(t)
5757+ self.temp_tasks = remaining
5758+ return (maxjobs, ret)
5759+
5760+ def tasks_with_inner_constraints(self):
5761+ """(MAXPARALLEL) returns all tasks in this group, but add the constraints on each task instance
5762+ as an optimization, it might be desirable to discard the tasks which do not have to run"""
5763+ if not self.ready: self.prepare()
5764+
5765+ if getattr(self, "done", None): return None
5766+
5767+ for p in self.cstr_order:
5768+ for v in self.cstr_order[p]:
5769+ for m in self.cstr_groups[p]:
5770+ for n in self.cstr_groups[v]:
5771+ n.set_run_after(m)
5772+ self.cstr_order = Utils.DefaultDict(set)
5773+ self.cstr_groups = Utils.DefaultDict(list)
5774+ self.done = 1
5775+ return self.tasks[:] # make a copy
5776+
5777+class store_task_type(type):
5778+ "store the task types that have a name ending in _task into a map (remember the existing task types)"
5779+ def __init__(cls, name, bases, dict):
5780+ super(store_task_type, cls).__init__(name, bases, dict)
5781+ name = cls.__name__
5782+
5783+ if name.endswith('_task'):
5784+ name = name.replace('_task', '')
5785+ if name != 'TaskBase':
5786+ TaskBase.classes[name] = cls
5787+
5788+class TaskBase(object):
5789+ """Base class for all Waf tasks
5790+
5791+ The most important methods are (by usual order of call):
5792+ 1 runnable_status: ask the task if it should be run, skipped, or if we have to ask later
5793+ 2 __str__: string to display to the user
5794+ 3 run: execute the task
5795+ 4 post_run: after the task is run, update the cache about the task
5796+
5797+ This class should be seen as an interface, it provides the very minimum necessary for the scheduler
5798+ so it does not do much.
5799+
5800+ For illustration purposes, TaskBase instances try to execute self.fun (if provided)
5801+ """
5802+
5803+ __metaclass__ = store_task_type
5804+
5805+ color = "GREEN"
5806+ maxjobs = MAXJOBS
5807+ classes = {}
5808+ stat = None
5809+
5810+ def __init__(self, *k, **kw):
5811+ self.hasrun = NOT_RUN
5812+
5813+ try:
5814+ self.generator = kw['generator']
5815+ except KeyError:
5816+ self.generator = self
5817+ self.bld = Build.bld
5818+
5819+ if kw.get('normal', 1):
5820+ self.generator.bld.task_manager.add_task(self)
5821+
5822+ def __repr__(self):
5823+ "used for debugging"
5824+ return '\n\t{task: %s %s}' % (self.__class__.__name__, str(getattr(self, "fun", "")))
5825+
5826+ def __str__(self):
5827+ "string to display to the user"
5828+ if hasattr(self, 'fun'):
5829+ return 'executing: %s\n' % self.fun.__name__
5830+ return self.__class__.__name__ + '\n'
5831+
5832+ def exec_command(self, *k, **kw):
5833+ "use this for executing commands from tasks"
5834+ # TODO in waf 1.6, eliminate bld.exec_command, and move the cwd processing to here
5835+ if self.env['env']:
5836+ kw['env'] = self.env['env']
5837+ return self.generator.bld.exec_command(*k, **kw)
5838+
5839+ def runnable_status(self):
5840+ "RUN_ME SKIP_ME or ASK_LATER"
5841+ return RUN_ME
5842+
5843+ def can_retrieve_cache(self):
5844+ return False
5845+
5846+ def call_run(self):
5847+ if self.can_retrieve_cache():
5848+ return 0
5849+ return self.run()
5850+
5851+ def run(self):
5852+ "called if the task must run"
5853+ if hasattr(self, 'fun'):
5854+ return self.fun(self)
5855+ return 0
5856+
5857+ def post_run(self):
5858+ "update the dependency tree (node stats)"
5859+ pass
5860+
5861+ def display(self):
5862+ "print either the description (using __str__) or the progress bar or the ide output"
5863+ col1 = Logs.colors(self.color)
5864+ col2 = Logs.colors.NORMAL
5865+
5866+ if Options.options.progress_bar == 1:
5867+ return self.generator.bld.progress_line(self.position[0], self.position[1], col1, col2)
5868+
5869+ if Options.options.progress_bar == 2:
5870+ ela = Utils.get_elapsed_time(self.generator.bld.ini)
5871+ try:
5872+ ins = ','.join([n.name for n in self.inputs])
5873+ except AttributeError:
5874+ ins = ''
5875+ try:
5876+ outs = ','.join([n.name for n in self.outputs])
5877+ except AttributeError:
5878+ outs = ''
5879+ return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (self.position[1], self.position[0], ins, outs, ela)
5880+
5881+ total = self.position[1]
5882+ n = len(str(total))
5883+ fs = '[%%%dd/%%%dd] %%s%%s%%s' % (n, n)
5884+ return fs % (self.position[0], self.position[1], col1, str(self), col2)
5885+
5886+ def attr(self, att, default=None):
5887+ "retrieve an attribute from the instance or from the class (microoptimization here)"
5888+ ret = getattr(self, att, self)
5889+ if ret is self: return getattr(self.__class__, att, default)
5890+ return ret
5891+
5892+ def hash_constraints(self):
5893+ "identify a task type for all the constraints relevant for the scheduler: precedence, file production"
5894+ a = self.attr
5895+ sum = hash((self.__class__.__name__,
5896+ str(a('before', '')),
5897+ str(a('after', '')),
5898+ str(a('ext_in', '')),
5899+ str(a('ext_out', '')),
5900+ self.__class__.maxjobs))
5901+ return sum
5902+
5903+ def format_error(self):
5904+ "error message to display to the user (when a build fails)"
5905+ if getattr(self, "err_msg", None):
5906+ return self.err_msg
5907+ elif self.hasrun == CRASHED:
5908+ try:
5909+ return " -> task failed (err #%d): %r" % (self.err_code, self)
5910+ except AttributeError:
5911+ return " -> task failed: %r" % self
5912+ elif self.hasrun == MISSING:
5913+ return " -> missing files: %r" % self
5914+ else:
5915+ return ''
5916+
5917+ def install(self):
5918+ """
5919+ installation is performed by looking at the task attributes:
5920+ * install_path: installation path like "${PREFIX}/bin"
5921+ * filename: install the first node in the outputs as a file with a particular name, be certain to give os.sep
5922+ * chmod: permissions
5923+ """
5924+ bld = self.generator.bld
5925+ d = self.attr('install')
5926+
5927+ if self.attr('install_path'):
5928+ lst = [a.relpath_gen(bld.srcnode) for a in self.outputs]
5929+ perm = self.attr('chmod', O644)
5930+ if self.attr('src'):
5931+ # if src is given, install the sources too
5932+ lst += [a.relpath_gen(bld.srcnode) for a in self.inputs]
5933+ if self.attr('filename'):
5934+ dir = self.install_path.rstrip(os.sep) + os.sep + self.attr('filename')
5935+ bld.install_as(dir, lst[0], self.env, perm)
5936+ else:
5937+ bld.install_files(self.install_path, lst, self.env, perm)
5938+
5939+class Task(TaskBase):
5940+ """The parent class is quite limited, in this version:
5941+ * file system interaction: input and output nodes
5942+ * persistence: do not re-execute tasks that have already run
5943+ * caching: same files can be saved and retrieved from a cache directory
5944+ * dependencies:
5945+ implicit, like .c files depending on .h files
5946+ explicit, like the input nodes or the dep_nodes
5947+ environment variables, like the CXXFLAGS in self.env
5948+ """
5949+ vars = []
5950+ def __init__(self, env, **kw):
5951+ TaskBase.__init__(self, **kw)
5952+ self.env = env
5953+
5954+ # inputs and outputs are nodes
5955+ # use setters when possible
5956+ self.inputs = []
5957+ self.outputs = []
5958+
5959+ self.dep_nodes = []
5960+ self.run_after = []
5961+
5962+ # Additionally, you may define the following
5963+ #self.dep_vars = 'PREFIX DATADIR'
5964+
5965+ def __str__(self):
5966+ "string to display to the user"
5967+ env = self.env
5968+ src_str = ' '.join([a.nice_path(env) for a in self.inputs])
5969+ tgt_str = ' '.join([a.nice_path(env) for a in self.outputs])
5970+ if self.outputs: sep = ' -> '
5971+ else: sep = ''
5972+ return '%s: %s%s%s\n' % (self.__class__.__name__.replace('_task', ''), src_str, sep, tgt_str)
5973+
5974+ def __repr__(self):
5975+ return "".join(['\n\t{task: ', self.__class__.__name__, " ", ",".join([x.name for x in self.inputs]), " -> ", ",".join([x.name for x in self.outputs]), '}'])
5976+
5977+ def unique_id(self):
5978+ "get a unique id: hash the node paths, the variant, the class, the function"
5979+ try:
5980+ return self.uid
5981+ except AttributeError:
5982+ "this is not a real hot zone, but we want to avoid surprizes here"
5983+ m = md5()
5984+ up = m.update
5985+ up(self.__class__.__name__)
5986+ up(self.env.variant())
5987+ p = None
5988+ for x in self.inputs + self.outputs:
5989+ if p != x.parent.id:
5990+ p = x.parent.id
5991+ up(x.parent.abspath())
5992+ up(x.name)
5993+ self.uid = m.digest()
5994+ return self.uid
5995+
5996+ def set_inputs(self, inp):
5997+ if isinstance(inp, list): self.inputs += inp
5998+ else: self.inputs.append(inp)
5999+
6000+ def set_outputs(self, out):
6001+ if isinstance(out, list): self.outputs += out
6002+ else: self.outputs.append(out)
6003+
6004+ def set_run_after(self, task):
6005+ "set (scheduler) order on another task"
6006+ # TODO: handle list or object
6007+ assert isinstance(task, TaskBase)
6008+ self.run_after.append(task)
6009+
6010+ def add_file_dependency(self, filename):
6011+ "TODO user-provided file dependencies"
6012+ node = self.generator.bld.path.find_resource(filename)
6013+ self.dep_nodes.append(node)
6014+
6015+ def signature(self):
6016+ # compute the result one time, and suppose the scan_signature will give the good result
6017+ try: return self.cache_sig[0]
6018+ except AttributeError: pass
6019+
6020+ self.m = md5()
6021+
6022+ # explicit deps
6023+ exp_sig = self.sig_explicit_deps()
6024+
6025+ # env vars
6026+ var_sig = self.sig_vars()
6027+
6028+ # implicit deps
6029+
6030+ imp_sig = SIG_NIL
6031+ if self.scan:
6032+ try:
6033+ imp_sig = self.sig_implicit_deps()
6034+ except ValueError:
6035+ return self.signature()
6036+
6037+ # we now have the signature (first element) and the details (for debugging)
6038+ ret = self.m.digest()
6039+ self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
6040+ return ret
6041+
6042+ def runnable_status(self):
6043+ "SKIP_ME RUN_ME or ASK_LATER"
6044+ #return 0 # benchmarking
6045+
6046+ if self.inputs and (not self.outputs):
6047+ if not getattr(self.__class__, 'quiet', None):
6048+ warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r" % self)
6049+
6050+ for t in self.run_after:
6051+ if not t.hasrun:
6052+ return ASK_LATER
6053+
6054+ env = self.env
6055+ bld = self.generator.bld
6056+
6057+ # first compute the signature
6058+ new_sig = self.signature()
6059+
6060+ # compare the signature to a signature computed previously
6061+ key = self.unique_id()
6062+ try:
6063+ prev_sig = bld.task_sigs[key][0]
6064+ except KeyError:
6065+ debug("task: task %r must run as it was never run before or the task code changed", self)
6066+ return RUN_ME
6067+
6068+ # compare the signatures of the outputs
6069+ for node in self.outputs:
6070+ variant = node.variant(env)
6071+ try:
6072+ if bld.node_sigs[variant][node.id] != new_sig:
6073+ return RUN_ME
6074+ except KeyError:
6075+ debug("task: task %r must run as the output nodes do not exist", self)
6076+ return RUN_ME
6077+
6078+ # debug if asked to
6079+ if Logs.verbose: self.debug_why(bld.task_sigs[key])
6080+
6081+ if new_sig != prev_sig:
6082+ return RUN_ME
6083+ return SKIP_ME
6084+
6085+ def post_run(self):
6086+ "called after a successful task run"
6087+ bld = self.generator.bld
6088+ env = self.env
6089+ sig = self.signature()
6090+ ssig = sig.encode('hex')
6091+
6092+ variant = env.variant()
6093+ for node in self.outputs:
6094+ # check if the node exists ..
6095+ try:
6096+ os.stat(node.abspath(env))
6097+ except OSError:
6098+ self.hasrun = MISSING
6099+ self.err_msg = '-> missing file: %r' % node.abspath(env)
6100+ raise Utils.WafError
6101+
6102+ # important, store the signature for the next run
6103+ bld.node_sigs[variant][node.id] = sig
6104+ bld.task_sigs[self.unique_id()] = self.cache_sig
6105+
6106+ # file caching, if possible
6107+ # try to avoid data corruption as much as possible
6108+ if not Options.cache_global or Options.options.nocache or not self.outputs:
6109+ return None
6110+
6111+ if getattr(self, 'cached', None):
6112+ return None
6113+
6114+ dname = os.path.join(Options.cache_global, ssig)
6115+ tmpdir = tempfile.mkdtemp(prefix=Options.cache_global + os.sep + 'waf')
6116+
6117+ try:
6118+ shutil.rmtree(dname)
6119+ except:
6120+ pass
6121+
6122+ try:
6123+ i = 0
6124+ for node in self.outputs:
6125+ variant = node.variant(env)
6126+ dest = os.path.join(tmpdir, str(i) + node.name)
6127+ shutil.copy2(node.abspath(env), dest)
6128+ i += 1
6129+ except (OSError, IOError):
6130+ try:
6131+ shutil.rmtree(tmpdir)
6132+ except:
6133+ pass
6134+ else:
6135+ try:
6136+ os.rename(tmpdir, dname)
6137+ except OSError:
6138+ try:
6139+ shutil.rmtree(tmpdir)
6140+ except:
6141+ pass
6142+ else:
6143+ try:
6144+ os.chmod(dname, O755)
6145+ except:
6146+ pass
6147+
6148+ def can_retrieve_cache(self):
6149+ """
6150+ Retrieve build nodes from the cache
6151+ update the file timestamps to help cleaning the least used entries from the cache
6152+ additionally, set an attribute 'cached' to avoid re-creating the same cache files
6153+
6154+ suppose there are files in cache/dir1/file1 and cache/dir2/file2
6155+ first, read the timestamp of dir1
6156+ then try to copy the files
6157+ then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
6158+ should an exception occur, ignore the data
6159+ """
6160+ if not Options.cache_global or Options.options.nocache or not self.outputs:
6161+ return None
6162+
6163+ env = self.env
6164+ sig = self.signature()
6165+ ssig = sig.encode('hex')
6166+
6167+ # first try to access the cache folder for the task
6168+ dname = os.path.join(Options.cache_global, ssig)
6169+ try:
6170+ t1 = os.stat(dname).st_mtime
6171+ except OSError:
6172+ return None
6173+
6174+ i = 0
6175+ for node in self.outputs:
6176+ variant = node.variant(env)
6177+
6178+ orig = os.path.join(dname, str(i) + node.name)
6179+ try:
6180+ shutil.copy2(orig, node.abspath(env))
6181+ # mark the cache file as used recently (modified)
6182+ os.utime(orig, None)
6183+ except (OSError, IOError):
6184+ debug('task: failed retrieving file')
6185+ return None
6186+ i += 1
6187+
6188+ # is it the same folder?
6189+ try:
6190+ t2 = os.stat(dname).st_mtime
6191+ except OSError:
6192+ return None
6193+
6194+ if t1 != t2:
6195+ return None
6196+
6197+ for node in self.outputs:
6198+ self.generator.bld.node_sigs[variant][node.id] = sig
6199+ if Options.options.progress_bar < 1:
6200+ self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
6201+
6202+ self.cached = True
6203+ return 1
6204+
6205+ def debug_why(self, old_sigs):
6206+ "explains why a task is run"
6207+
6208+ new_sigs = self.cache_sig
6209+ def v(x):
6210+ return x.encode('hex')
6211+
6212+ debug("Task %r", self)
6213+ msgs = ['Task must run', '* Source file or manual dependency', '* Implicit dependency', '* Environment variable']
6214+ tmp = 'task: -> %s: %s %s'
6215+ for x in xrange(len(msgs)):
6216+ if (new_sigs[x] != old_sigs[x]):
6217+ debug(tmp, msgs[x], v(old_sigs[x]), v(new_sigs[x]))
6218+
6219+ def sig_explicit_deps(self):
6220+ bld = self.generator.bld
6221+ up = self.m.update
6222+
6223+ # the inputs
6224+ for x in self.inputs + getattr(self, 'dep_nodes', []):
6225+ if not x.parent.id in bld.cache_scanned_folders:
6226+ bld.rescan(x.parent)
6227+
6228+ variant = x.variant(self.env)
6229+ try:
6230+ up(bld.node_sigs[variant][x.id])
6231+ except KeyError:
6232+ raise Utils.WafError('Missing node signature for %r (required by %r)' % (x, self))
6233+
6234+ # manual dependencies, they can slow down the builds
6235+ if bld.deps_man:
6236+ additional_deps = bld.deps_man
6237+ for x in self.inputs + self.outputs:
6238+ try:
6239+ d = additional_deps[x.id]
6240+ except KeyError:
6241+ continue
6242+
6243+ for v in d:
6244+ if isinstance(v, Node.Node):
6245+ bld.rescan(v.parent)
6246+ variant = v.variant(self.env)
6247+ try:
6248+ v = bld.node_sigs[variant][v.id]
6249+ except KeyError:
6250+ raise Utils.WafError('Missing node signature for %r (required by %r)' % (v, self))
6251+ elif hasattr(v, '__call__'):
6252+ v = v() # dependency is a function, call it
6253+ up(v)
6254+
6255+ for x in self.dep_nodes:
6256+ v = bld.node_sigs[x.variant(self.env)][x.id]
6257+ up(v)
6258+
6259+ return self.m.digest()
6260+
6261+ def sig_vars(self):
6262+ bld = self.generator.bld
6263+ env = self.env
6264+
6265+ # dependencies on the environment vars
6266+ act_sig = bld.hash_env_vars(env, self.__class__.vars)
6267+ self.m.update(act_sig)
6268+
6269+ # additional variable dependencies, if provided
6270+ dep_vars = getattr(self, 'dep_vars', None)
6271+ if dep_vars:
6272+ self.m.update(bld.hash_env_vars(env, dep_vars))
6273+
6274+ return self.m.digest()
6275+
6276+ #def scan(self, node):
6277+ # """this method returns a tuple containing:
6278+ # * a list of nodes corresponding to real files
6279+ # * a list of names for files not found in path_lst
6280+ # the input parameters may have more parameters that the ones used below
6281+ # """
6282+ # return ((), ())
6283+ scan = None
6284+
6285+ # compute the signature, recompute it if there is no match in the cache
6286+ def sig_implicit_deps(self):
6287+ "the signature obtained may not be the one if the files have changed, we do it in two steps"
6288+
6289+ bld = self.generator.bld
6290+
6291+ # get the task signatures from previous runs
6292+ key = self.unique_id()
6293+ prev_sigs = bld.task_sigs.get(key, ())
6294+ if prev_sigs:
6295+ try:
6296+ # for issue #379
6297+ if prev_sigs[2] == self.compute_sig_implicit_deps():
6298+ return prev_sigs[2]
6299+ except (KeyError, OSError):
6300+ pass
6301+ del bld.task_sigs[key]
6302+ raise ValueError('rescan')
6303+
6304+ # no previous run or the signature of the dependencies has changed, rescan the dependencies
6305+ (nodes, names) = self.scan()
6306+ if Logs.verbose:
6307+ debug('deps: scanner for %s returned %s %s', str(self), str(nodes), str(names))
6308+
6309+ # store the dependencies in the cache
6310+ bld.node_deps[key] = nodes
6311+ bld.raw_deps[key] = names
6312+
6313+ # recompute the signature and return it
6314+ try:
6315+ sig = self.compute_sig_implicit_deps()
6316+ except KeyError:
6317+ try:
6318+ nodes = []
6319+ for k in bld.node_deps.get(self.unique_id(), []):
6320+ if k.id & 3 == 2: # Node.FILE:
6321+ if not k.id in bld.node_sigs[0]:
6322+ nodes.append(k)
6323+ else:
6324+ if not k.id in bld.node_sigs[self.env.variant()]:
6325+ nodes.append(k)
6326+ except:
6327+ nodes = '?'
6328+ raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
6329+
6330+ return sig
6331+
6332+ def compute_sig_implicit_deps(self):
6333+ """it is intended for .cpp and inferred .h files
6334+ there is a single list (no tree traversal)
6335+ this is the hot spot so ... do not touch"""
6336+ upd = self.m.update
6337+
6338+ bld = self.generator.bld
6339+ tstamp = bld.node_sigs
6340+ env = self.env
6341+
6342+ for k in bld.node_deps.get(self.unique_id(), []):
6343+ # unlikely but necessary if it happens
6344+ if not k.parent.id in bld.cache_scanned_folders:
6345+ # if the parent folder is removed, an OSError may be thrown
6346+ bld.rescan(k.parent)
6347+
6348+ # if the parent folder is removed, a KeyError will be thrown
6349+ if k.id & 3 == 2: # Node.FILE:
6350+ upd(tstamp[0][k.id])
6351+ else:
6352+ upd(tstamp[env.variant()][k.id])
6353+
6354+ return self.m.digest()
6355+
6356+def funex(c):
6357+ dc = {}
6358+ exec(c, dc)
6359+ return dc['f']
6360+
6361+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})", re.M)
6362+def compile_fun_shell(name, line):
6363+ """Compiles a string (once) into a function, eg:
6364+ simple_task_type('c++', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
6365+
6366+ The env variables (CXX, ..) on the task must not hold dicts (order)
6367+ The reserved keywords TGT and SRC represent the task input and output nodes
6368+
6369+ quick test:
6370+ bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
6371+ """
6372+
6373+ extr = []
6374+ def repl(match):
6375+ g = match.group
6376+ if g('dollar'): return "$"
6377+ elif g('backslash'): return '\\\\'
6378+ elif g('subst'): extr.append((g('var'), g('code'))); return "%s"
6379+ return None
6380+
6381+ line = reg_act.sub(repl, line) or line
6382+
6383+ parm = []
6384+ dvars = []
6385+ app = parm.append
6386+ for (var, meth) in extr:
6387+ if var == 'SRC':
6388+ if meth: app('task.inputs%s' % meth)
6389+ else: app('" ".join([a.srcpath(env) for a in task.inputs])')
6390+ elif var == 'TGT':
6391+ if meth: app('task.outputs%s' % meth)
6392+ else: app('" ".join([a.bldpath(env) for a in task.outputs])')
6393+ else:
6394+ if not var in dvars: dvars.append(var)
6395+ app("p('%s')" % var)
6396+ if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
6397+ else: parm = ''
6398+
6399+ c = COMPILE_TEMPLATE_SHELL % (line, parm)
6400+
6401+ debug('action: %s', c)
6402+ return (funex(c), dvars)
6403+
6404+def compile_fun_noshell(name, line):
6405+
6406+ extr = []
6407+ def repl(match):
6408+ g = match.group
6409+ if g('dollar'): return "$"
6410+ elif g('subst'): extr.append((g('var'), g('code'))); return "<<|@|>>"
6411+ return None
6412+
6413+ line2 = reg_act.sub(repl, line)
6414+ params = line2.split('<<|@|>>')
6415+
6416+ buf = []
6417+ dvars = []
6418+ app = buf.append
6419+ for x in xrange(len(extr)):
6420+ params[x] = params[x].strip()
6421+ if params[x]:
6422+ app("lst.extend(%r)" % params[x].split())
6423+ (var, meth) = extr[x]
6424+ if var == 'SRC':
6425+ if meth: app('lst.append(task.inputs%s)' % meth)
6426+ else: app("lst.extend([a.srcpath(env) for a in task.inputs])")
6427+ elif var == 'TGT':
6428+ if meth: app('lst.append(task.outputs%s)' % meth)
6429+ else: app("lst.extend([a.bldpath(env) for a in task.outputs])")
6430+ else:
6431+ app('lst.extend(to_list(env[%r]))' % var)
6432+ if not var in dvars: dvars.append(var)
6433+
6434+ if params[-1]:
6435+ app("lst.extend(%r)" % shlex.split(params[-1]))
6436+
6437+ fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
6438+ debug('action: %s', fun)
6439+ return (funex(fun), dvars)
6440+
6441+def compile_fun(name, line, shell=None):
6442+ "commands can be launched by the shell or not"
6443+ if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
6444+ shell = True
6445+ #else:
6446+ # shell = False
6447+
6448+ if shell is None:
6449+ if sys.platform == 'win32':
6450+ shell = False
6451+ else:
6452+ shell = True
6453+
6454+ if shell:
6455+ return compile_fun_shell(name, line)
6456+ else:
6457+ return compile_fun_noshell(name, line)
6458+
6459+def simple_task_type(name, line, color='GREEN', vars=[], ext_in=[], ext_out=[], before=[], after=[], shell=None):
6460+ """return a new Task subclass with the function run compiled from the line given"""
6461+ (fun, dvars) = compile_fun(name, line, shell)
6462+ fun.code = line
6463+ return task_type_from_func(name, fun, vars or dvars, color, ext_in, ext_out, before, after)
6464+
6465+def task_type_from_func(name, func, vars=[], color='GREEN', ext_in=[], ext_out=[], before=[], after=[]):
6466+ """return a new Task subclass with the function run compiled from the line given"""
6467+ params = {
6468+ 'run': func,
6469+ 'vars': vars,
6470+ 'color': color,
6471+ 'name': name,
6472+ 'ext_in': Utils.to_list(ext_in),
6473+ 'ext_out': Utils.to_list(ext_out),
6474+ 'before': Utils.to_list(before),
6475+ 'after': Utils.to_list(after),
6476+ }
6477+
6478+ cls = type(Task)(name, (Task,), params)
6479+ TaskBase.classes[name] = cls
6480+ return cls
6481+
6482+def always_run(cls):
6483+ """Set all task instances of this class to be executed whenever a build is started
6484+ The task signature is calculated, but the result of the comparation between
6485+ task signatures is bypassed
6486+ """
6487+ old = cls.runnable_status
6488+ def always(self):
6489+ ret = old(self)
6490+ if ret == SKIP_ME:
6491+ return RUN_ME
6492+ return ret
6493+ cls.runnable_status = always
6494+
6495+def update_outputs(cls):
6496+ """When a command is always run, it is possible that the output only change
6497+ sometimes. By default the build node have as a hash the signature of the task
6498+ which may not change. With this, the output nodes (produced) are hashed,
6499+ and the hashes are set to the build nodes
6500+
6501+ This may avoid unnecessary recompilations, but it uses more resources
6502+ (hashing the output files) so it is not used by default
6503+ """
6504+ old_post_run = cls.post_run
6505+ def post_run(self):
6506+ old_post_run(self)
6507+ bld = self.generator.bld
6508+ for output in self.outputs:
6509+ bld.node_sigs[self.env.variant()][output.id] = Utils.h_file(output.abspath(self.env))
6510+ bld.task_sigs[output.id] = self.unique_id()
6511+ cls.post_run = post_run
6512+
6513+ old_runnable_status = cls.runnable_status
6514+ def runnable_status(self):
6515+ status = old_runnable_status(self)
6516+ if status != RUN_ME:
6517+ return status
6518+
6519+ uid = self.unique_id()
6520+ try:
6521+ bld = self.outputs[0].__class__.bld
6522+ new_sig = self.signature()
6523+ prev_sig = bld.task_sigs[uid][0]
6524+ if prev_sig == new_sig:
6525+ for x in self.outputs:
6526+ if not x.id in bld.node_sigs[self.env.variant()]:
6527+ return RUN_ME
6528+ if bld.task_sigs[x.id] != uid: # ensure the outputs are associated with *this* task
6529+ return RUN_ME
6530+ return SKIP_ME
6531+ except KeyError:
6532+ pass
6533+ except IndexError:
6534+ pass
6535+ return RUN_ME
6536+ cls.runnable_status = runnable_status
6537+
6538+def extract_outputs(tasks):
6539+ """file_deps: Infer additional dependencies from task input and output nodes
6540+ """
6541+ v = {}
6542+ for x in tasks:
6543+ try:
6544+ (ins, outs) = v[x.env.variant()]
6545+ except KeyError:
6546+ ins = {}
6547+ outs = {}
6548+ v[x.env.variant()] = (ins, outs)
6549+
6550+ for a in getattr(x, 'inputs', []):
6551+ try: ins[a.id].append(x)
6552+ except KeyError: ins[a.id] = [x]
6553+ for a in getattr(x, 'outputs', []):
6554+ try: outs[a.id].append(x)
6555+ except KeyError: outs[a.id] = [x]
6556+
6557+ for (ins, outs) in v.values():
6558+ links = set(ins.iterkeys()).intersection(outs.iterkeys())
6559+ for k in links:
6560+ for a in ins[k]:
6561+ for b in outs[k]:
6562+ a.set_run_after(b)
6563+
6564+def extract_deps(tasks):
6565+ """file_deps: Infer additional dependencies from task input and output nodes and from implicit dependencies
6566+ returned by the scanners - that will only work if all tasks are created
6567+
6568+ this is aimed at people who have pathological builds and who do not care enough
6569+ to implement the build dependencies properly
6570+
6571+ with two loops over the list of tasks, do not expect this to be really fast
6572+ """
6573+
6574+ # first reuse the function above
6575+ extract_outputs(tasks)
6576+
6577+ # map the output nodes to the tasks producing them
6578+ out_to_task = {}
6579+ for x in tasks:
6580+ v = x.env.variant()
6581+ try:
6582+ lst = x.outputs
6583+ except AttributeError:
6584+ pass
6585+ else:
6586+ for node in lst:
6587+ out_to_task[(v, node.id)] = x
6588+
6589+ # map the dependencies found to the tasks compiled
6590+ dep_to_task = {}
6591+ for x in tasks:
6592+ try:
6593+ x.signature()
6594+ except: # this is on purpose
6595+ pass
6596+
6597+ v = x.env.variant()
6598+ key = x.unique_id()
6599+ for k in x.generator.bld.node_deps.get(x.unique_id(), []):
6600+ try: dep_to_task[(v, k.id)].append(x)
6601+ except KeyError: dep_to_task[(v, k.id)] = [x]
6602+
6603+ # now get the intersection
6604+ deps = set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
6605+
6606+ # and add the dependencies from task to task
6607+ for idx in deps:
6608+ for k in dep_to_task[idx]:
6609+ k.set_run_after(out_to_task[idx])
6610+
6611+ # cleanup, remove the signatures
6612+ for x in tasks:
6613+ try:
6614+ delattr(x, 'cache_sig')
6615+ except AttributeError:
6616+ pass
6617+
6618diff --git a/buildtools/wafadmin/TaskGen.py b/buildtools/wafadmin/TaskGen.py
6619new file mode 100644
6620index 0000000..ae1834a
6621--- /dev/null
6622+++ b/buildtools/wafadmin/TaskGen.py
6623@@ -0,0 +1,612 @@
6624+#!/usr/bin/env python
6625+# encoding: utf-8
6626+# Thomas Nagy, 2005-2008 (ita)
6627+
6628+"""
6629+The class task_gen encapsulates the creation of task objects (low-level code)
6630+The instances can have various parameters, but the creation of task nodes (Task.py)
6631+is delayed. To achieve this, various methods are called from the method "apply"
6632+
6633+The class task_gen contains lots of methods, and a configuration table:
6634+* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
6635+* the order of the methods (self.prec or by default task_gen.prec) is configurable
6636+* new methods can be inserted dynamically without pasting old code
6637+
6638+Additionally, task_gen provides the method apply_core
6639+* file extensions are mapped to methods: def meth(self, name_or_node)
6640+* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
6641+* when called, the functions may modify self.allnodes to re-add source to process
6642+* the mappings can map an extension or a filename (see the code below)
6643+
6644+WARNING: subclasses must reimplement the clone method
6645+"""
6646+
6647+import os, traceback, copy
6648+import Build, Task, Utils, Logs, Options
6649+from Logs import debug, error, warn
6650+from Constants import *
6651+
6652+typos = {
6653+'sources':'source',
6654+'targets':'target',
6655+'include':'includes',
6656+'define':'defines',
6657+'importpath':'importpaths',
6658+'install_var':'install_path',
6659+'install_subdir':'install_path',
6660+'inst_var':'install_path',
6661+'inst_dir':'install_path',
6662+'feature':'features',
6663+}
6664+
6665+class register_obj(type):
6666+ """no decorators for classes, so we use a metaclass
6667+ we store into task_gen.classes the classes that inherit task_gen
6668+ and whose names end in '_taskgen'
6669+ """
6670+ def __init__(cls, name, bases, dict):
6671+ super(register_obj, cls).__init__(name, bases, dict)
6672+ name = cls.__name__
6673+ suffix = '_taskgen'
6674+ if name.endswith(suffix):
6675+ task_gen.classes[name.replace(suffix, '')] = cls
6676+
6677+class task_gen(object):
6678+ """
6679+ Most methods are of the form 'def meth(self):' without any parameters
6680+ there are many of them, and they do many different things:
6681+ * task creation
6682+ * task results installation
6683+ * environment modification
6684+ * attribute addition/removal
6685+
6686+ The inheritance approach is complicated
6687+ * mixing several languages at once
6688+ * subclassing is needed even for small changes
6689+ * inserting new methods is complicated
6690+
6691+ This new class uses a configuration table:
6692+ * adding new methods easily
6693+ * obtaining the order in which to call the methods
6694+ * postponing the method calls (post() -> apply)
6695+
6696+ Additionally, a 'traits' static attribute is provided:
6697+ * this list contains methods
6698+ * the methods can remove or add methods from self.meths
6699+ Example1: the attribute 'staticlib' is set on an instance
6700+ a method set in the list of traits is executed when the
6701+ instance is posted, it finds that flag and adds another method for execution
6702+ Example2: a method set in the list of traits finds the msvc
6703+ compiler (from self.env['MSVC']==1); more methods are added to self.meths
6704+ """
6705+
6706+ __metaclass__ = register_obj
6707+ mappings = {}
6708+ mapped = {}
6709+ prec = Utils.DefaultDict(list)
6710+ traits = Utils.DefaultDict(set)
6711+ classes = {}
6712+
6713+ def __init__(self, *kw, **kwargs):
6714+ self.prec = Utils.DefaultDict(list)
6715+ "map precedence of function names to call"
6716+ # so we will have to play with directed acyclic graphs
6717+ # detect cycles, etc
6718+
6719+ self.source = ''
6720+ self.target = ''
6721+
6722+ # list of methods to execute - does not touch it by hand unless you know
6723+ self.meths = []
6724+
6725+ # list of mappings extension -> function
6726+ self.mappings = {}
6727+
6728+ # list of features (see the documentation on traits)
6729+ self.features = list(kw)
6730+
6731+ # not always a good idea
6732+ self.tasks = []
6733+
6734+ self.default_chmod = O644
6735+ self.default_install_path = None
6736+
6737+ # kind of private, beware of what you put in it, also, the contents are consumed
6738+ self.allnodes = []
6739+
6740+ self.bld = kwargs.get('bld', Build.bld)
6741+ self.env = self.bld.env.copy()
6742+
6743+ self.path = self.bld.path # emulate chdir when reading scripts
6744+ self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
6745+
6746+ # provide a unique id
6747+ self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
6748+
6749+ for key, val in kwargs.iteritems():
6750+ setattr(self, key, val)
6751+
6752+ self.bld.task_manager.add_task_gen(self)
6753+ self.bld.all_task_gen.append(self)
6754+
6755+ def __str__(self):
6756+ return ("<task_gen '%s' of type %s defined in %s>"
6757+ % (self.name or self.target, self.__class__.__name__, str(self.path)))
6758+
6759+ def __setattr__(self, name, attr):
6760+ real = typos.get(name, name)
6761+ if real != name:
6762+ warn('typo %s -> %s' % (name, real))
6763+ if Logs.verbose > 0:
6764+ traceback.print_stack()
6765+ object.__setattr__(self, real, attr)
6766+
6767+ def to_list(self, value):
6768+ "helper: returns a list"
6769+ if isinstance(value, str): return value.split()
6770+ else: return value
6771+
6772+ def apply(self):
6773+ "order the methods to execute using self.prec or task_gen.prec"
6774+ keys = set(self.meths)
6775+
6776+ # add the methods listed in the features
6777+ self.features = Utils.to_list(self.features)
6778+ for x in self.features + ['*']:
6779+ st = task_gen.traits[x]
6780+ if not st:
6781+ warn('feature %r does not exist - bind at least one method to it' % x)
6782+ keys.update(st)
6783+
6784+ # copy the precedence table
6785+ prec = {}
6786+ prec_tbl = self.prec or task_gen.prec
6787+ for x in prec_tbl:
6788+ if x in keys:
6789+ prec[x] = prec_tbl[x]
6790+
6791+ # elements disconnected
6792+ tmp = []
6793+ for a in keys:
6794+ for x in prec.values():
6795+ if a in x: break
6796+ else:
6797+ tmp.append(a)
6798+
6799+ # topological sort
6800+ out = []
6801+ while tmp:
6802+ e = tmp.pop()
6803+ if e in keys: out.append(e)
6804+ try:
6805+ nlst = prec[e]
6806+ except KeyError:
6807+ pass
6808+ else:
6809+ del prec[e]
6810+ for x in nlst:
6811+ for y in prec:
6812+ if x in prec[y]:
6813+ break
6814+ else:
6815+ tmp.append(x)
6816+
6817+ if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
6818+ out.reverse()
6819+ self.meths = out
6820+
6821+ # then we run the methods in order
6822+ debug('task_gen: posting %s %d', self, id(self))
6823+ for x in out:
6824+ try:
6825+ v = getattr(self, x)
6826+ except AttributeError:
6827+ raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
6828+ debug('task_gen: -> %s (%d)', x, id(self))
6829+ v()
6830+
6831+ def post(self):
6832+ "runs the code to create the tasks, do not subclass"
6833+ if not self.name:
6834+ if isinstance(self.target, list):
6835+ self.name = ' '.join(self.target)
6836+ else:
6837+ self.name = self.target
6838+
6839+ if getattr(self, 'posted', None):
6840+ #error("OBJECT ALREADY POSTED" + str( self))
6841+ return
6842+
6843+ self.apply()
6844+ self.posted = True
6845+ debug('task_gen: posted %s', self.name)
6846+
6847+ def get_hook(self, ext):
6848+ try: return self.mappings[ext]
6849+ except KeyError:
6850+ try: return task_gen.mappings[ext]
6851+ except KeyError: return None
6852+
6853+ # TODO waf 1.6: always set the environment
6854+ # TODO waf 1.6: create_task(self, name, inputs, outputs)
6855+ def create_task(self, name, src=None, tgt=None, env=None):
6856+ env = env or self.env
6857+ task = Task.TaskBase.classes[name](env.copy(), generator=self)
6858+ if src:
6859+ task.set_inputs(src)
6860+ if tgt:
6861+ task.set_outputs(tgt)
6862+ self.tasks.append(task)
6863+ return task
6864+
6865+ def name_to_obj(self, name):
6866+ return self.bld.name_to_obj(name, self.env)
6867+
6868+ def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
6869+ """
6870+ The attributes "excludes" and "exts" must be lists to avoid the confusion
6871+ find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
6872+
6873+ do not use absolute paths
6874+ do not use paths outside of the source tree
6875+ the files or folder beginning by . are not returned
6876+
6877+ # TODO: remove in Waf 1.6
6878+ """
6879+
6880+ err_msg = "'%s' attribute must be a list"
6881+ if not isinstance(excludes, list):
6882+ raise Utils.WscriptError(err_msg % 'excludes')
6883+ if not isinstance(exts, list):
6884+ raise Utils.WscriptError(err_msg % 'exts')
6885+
6886+ lst = []
6887+
6888+ #make sure dirnames is a list helps with dirnames with spaces
6889+ dirnames = self.to_list(dirnames)
6890+
6891+ ext_lst = exts or list(self.mappings.keys()) + list(task_gen.mappings.keys())
6892+
6893+ for name in dirnames:
6894+ anode = self.path.find_dir(name)
6895+
6896+ if not anode or not anode.is_child_of(self.bld.srcnode):
6897+ raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
6898+ ", or it's not child of '%s'." % (name, self.bld.srcnode))
6899+
6900+ self.bld.rescan(anode)
6901+ for name in self.bld.cache_dir_contents[anode.id]:
6902+
6903+ # ignore hidden files
6904+ if name.startswith('.'):
6905+ continue
6906+
6907+ (base, ext) = os.path.splitext(name)
6908+ if ext in ext_lst and not name in lst and not name in excludes:
6909+ lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
6910+
6911+ lst.sort()
6912+ self.source = self.to_list(self.source)
6913+ if not self.source: self.source = lst
6914+ else: self.source += lst
6915+
6916+ def clone(self, env):
6917+ """when creating a clone in a task generator method,
6918+ make sure to set posted=False on the clone
6919+ else the other task generator will not create its tasks"""
6920+ newobj = task_gen(bld=self.bld)
6921+ for x in self.__dict__:
6922+ if x in ['env', 'bld']:
6923+ continue
6924+ elif x in ["path", "features"]:
6925+ setattr(newobj, x, getattr(self, x))
6926+ else:
6927+ setattr(newobj, x, copy.copy(getattr(self, x)))
6928+
6929+ newobj.__class__ = self.__class__
6930+ if isinstance(env, str):
6931+ newobj.env = self.bld.all_envs[env].copy()
6932+ else:
6933+ newobj.env = env.copy()
6934+
6935+ return newobj
6936+
6937+ def get_inst_path(self):
6938+ return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
6939+
6940+ def set_inst_path(self, val):
6941+ self._install_path = val
6942+
6943+ install_path = property(get_inst_path, set_inst_path)
6944+
6945+
6946+ def get_chmod(self):
6947+ return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
6948+
6949+ def set_chmod(self, val):
6950+ self._chmod = val
6951+
6952+ chmod = property(get_chmod, set_chmod)
6953+
6954+def declare_extension(var, func):
6955+ try:
6956+ for x in Utils.to_list(var):
6957+ task_gen.mappings[x] = func
6958+ except:
6959+ raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
6960+ task_gen.mapped[func.__name__] = func
6961+
6962+def declare_order(*k):
6963+ assert(len(k) > 1)
6964+ n = len(k) - 1
6965+ for i in xrange(n):
6966+ f1 = k[i]
6967+ f2 = k[i+1]
6968+ if not f1 in task_gen.prec[f2]:
6969+ task_gen.prec[f2].append(f1)
6970+
6971+def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
6972+ install=0, before=[], after=[], decider=None, rule=None, scan=None):
6973+ """
6974+ see Tools/flex.py for an example
6975+ while i do not like such wrappers, some people really do
6976+ """
6977+
6978+ action = action or rule
6979+ if isinstance(action, str):
6980+ act = Task.simple_task_type(name, action, color=color)
6981+ else:
6982+ act = Task.task_type_from_func(name, action, color=color)
6983+ act.ext_in = tuple(Utils.to_list(ext_in))
6984+ act.ext_out = tuple(Utils.to_list(ext_out))
6985+ act.before = Utils.to_list(before)
6986+ act.after = Utils.to_list(after)
6987+ act.scan = scan
6988+
6989+ def x_file(self, node):
6990+ if decider:
6991+ ext = decider(self, node)
6992+ else:
6993+ ext = ext_out
6994+
6995+ if isinstance(ext, str):
6996+ out_source = node.change_ext(ext)
6997+ if reentrant:
6998+ self.allnodes.append(out_source)
6999+ elif isinstance(ext, list):
7000+ out_source = [node.change_ext(x) for x in ext]
7001+ if reentrant:
7002+ for i in xrange((reentrant is True) and len(out_source) or reentrant):
7003+ self.allnodes.append(out_source[i])
7004+ else:
7005+ # XXX: useless: it will fail on Utils.to_list above...
7006+ raise Utils.WafError("do not know how to process %s" % str(ext))
7007+
7008+ tsk = self.create_task(name, node, out_source)
7009+
7010+ if node.__class__.bld.is_install:
7011+ tsk.install = install
7012+
7013+ declare_extension(act.ext_in, x_file)
7014+ return x_file
7015+
7016+def bind_feature(name, methods):
7017+ lst = Utils.to_list(methods)
7018+ task_gen.traits[name].update(lst)
7019+
7020+"""
7021+All the following decorators are registration decorators, i.e add an attribute to current class
7022+ (task_gen and its derivatives), with same name as func, which points to func itself.
7023+For example:
7024+ @taskgen
7025+ def sayHi(self):
7026+ print("hi")
7027+Now taskgen.sayHi() may be called
7028+
7029+If python were really smart, it could infer itself the order of methods by looking at the
7030+attributes. A prerequisite for execution is to have the attribute set before.
7031+Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
7032+"""
7033+def taskgen(func):
7034+ """
7035+ register a method as a task generator method
7036+ """
7037+ setattr(task_gen, func.__name__, func)
7038+ return func
7039+
7040+def feature(*k):
7041+ """
7042+ declare a task generator method that will be executed when the
7043+ object attribute 'feature' contains the corresponding key(s)
7044+ """
7045+ def deco(func):
7046+ setattr(task_gen, func.__name__, func)
7047+ for name in k:
7048+ task_gen.traits[name].update([func.__name__])
7049+ return func
7050+ return deco
7051+
7052+def before(*k):
7053+ """
7054+ declare a task generator method which will be executed
7055+ before the functions of given name(s)
7056+ """
7057+ def deco(func):
7058+ setattr(task_gen, func.__name__, func)
7059+ for fun_name in k:
7060+ if not func.__name__ in task_gen.prec[fun_name]:
7061+ task_gen.prec[fun_name].append(func.__name__)
7062+ return func
7063+ return deco
7064+
7065+def after(*k):
7066+ """
7067+ declare a task generator method which will be executed
7068+ after the functions of given name(s)
7069+ """
7070+ def deco(func):
7071+ setattr(task_gen, func.__name__, func)
7072+ for fun_name in k:
7073+ if not fun_name in task_gen.prec[func.__name__]:
7074+ task_gen.prec[func.__name__].append(fun_name)
7075+ return func
7076+ return deco
7077+
7078+def extension(var):
7079+ """
7080+ declare a task generator method which will be invoked during
7081+ the processing of source files for the extension given
7082+ """
7083+ def deco(func):
7084+ setattr(task_gen, func.__name__, func)
7085+ try:
7086+ for x in Utils.to_list(var):
7087+ task_gen.mappings[x] = func
7088+ except:
7089+ raise Utils.WafError('extension takes either a list or a string %r' % var)
7090+ task_gen.mapped[func.__name__] = func
7091+ return func
7092+ return deco
7093+
7094+# TODO make certain the decorators may be used here
7095+
7096+def apply_core(self):
7097+ """Process the attribute source
7098+ transform the names into file nodes
7099+ try to process the files by name first, later by extension"""
7100+ # get the list of folders to use by the scanners
7101+ # all our objects share the same include paths anyway
7102+ find_resource = self.path.find_resource
7103+
7104+ for filename in self.to_list(self.source):
7105+ # if self.mappings or task_gen.mappings contains a file of the same name
7106+ x = self.get_hook(filename)
7107+ if x:
7108+ x(self, filename)
7109+ else:
7110+ node = find_resource(filename)
7111+ if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
7112+ self.allnodes.append(node)
7113+
7114+ for node in self.allnodes:
7115+ # self.mappings or task_gen.mappings map the file extension to a function
7116+ x = self.get_hook(node.suffix())
7117+
7118+ if not x:
7119+ raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
7120+ (str(node), self.__class__.mappings.keys(), self.__class__))
7121+ x(self, node)
7122+feature('*')(apply_core)
7123+
7124+def exec_rule(self):
7125+ """Process the attribute rule, when provided the method apply_core will be disabled
7126+ """
7127+ if not getattr(self, 'rule', None):
7128+ return
7129+
7130+ # someone may have removed it already
7131+ try:
7132+ self.meths.remove('apply_core')
7133+ except ValueError:
7134+ pass
7135+
7136+ # get the function and the variables
7137+ func = self.rule
7138+
7139+ vars2 = []
7140+ if isinstance(func, str):
7141+ # use the shell by default for user-defined commands
7142+ (func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
7143+ func.code = self.rule
7144+
7145+ # create the task class
7146+ name = getattr(self, 'name', None) or self.target or self.rule
7147+ if not isinstance(name, str):
7148+ name = str(self.idx)
7149+ cls = Task.task_type_from_func(name, func, getattr(self, 'vars', vars2))
7150+ cls.color = getattr(self, 'color', 'BLUE')
7151+
7152+ # now create one instance
7153+ tsk = self.create_task(name)
7154+
7155+ dep_vars = getattr(self, 'dep_vars', ['ruledeps'])
7156+ if dep_vars:
7157+ tsk.dep_vars = dep_vars
7158+ if isinstance(self.rule, str):
7159+ tsk.env.ruledeps = self.rule
7160+ else:
7161+ # only works if the function is in a global module such as a waf tool
7162+ tsk.env.ruledeps = Utils.h_fun(self.rule)
7163+
7164+ # we assume that the user knows that without inputs or outputs
7165+ #if not getattr(self, 'target', None) and not getattr(self, 'source', None):
7166+ # cls.quiet = True
7167+
7168+ if getattr(self, 'target', None):
7169+ cls.quiet = True
7170+ tsk.outputs = [self.path.find_or_declare(x) for x in self.to_list(self.target)]
7171+
7172+ if getattr(self, 'source', None):
7173+ cls.quiet = True
7174+ tsk.inputs = []
7175+ for x in self.to_list(self.source):
7176+ y = self.path.find_resource(x)
7177+ if not y:
7178+ raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
7179+ tsk.inputs.append(y)
7180+
7181+ if self.allnodes:
7182+ tsk.inputs.extend(self.allnodes)
7183+
7184+ if getattr(self, 'scan', None):
7185+ cls.scan = self.scan
7186+
7187+ if getattr(self, 'install_path', None):
7188+ tsk.install_path = self.install_path
7189+
7190+ if getattr(self, 'cwd', None):
7191+ tsk.cwd = self.cwd
7192+
7193+ if getattr(self, 'on_results', None):
7194+ Task.update_outputs(cls)
7195+
7196+ if getattr(self, 'always', None):
7197+ Task.always_run(cls)
7198+
7199+ for x in ['after', 'before', 'ext_in', 'ext_out']:
7200+ setattr(cls, x, getattr(self, x, []))
7201+feature('*')(exec_rule)
7202+before('apply_core')(exec_rule)
7203+
7204+def sequence_order(self):
7205+ """
7206+ add a strict sequential constraint between the tasks generated by task generators
7207+ it uses the fact that task generators are posted in order
7208+ it will not post objects which belong to other folders
7209+ there is also an awesome trick for executing the method in last position
7210+
7211+ to use:
7212+ bld(features='javac seq')
7213+ bld(features='jar seq')
7214+
7215+ to start a new sequence, set the attribute seq_start, for example:
7216+ obj.seq_start = True
7217+ """
7218+ if self.meths and self.meths[-1] != 'sequence_order':
7219+ self.meths.append('sequence_order')
7220+ return
7221+
7222+ if getattr(self, 'seq_start', None):
7223+ return
7224+
7225+ # all the tasks previously declared must be run before these
7226+ if getattr(self.bld, 'prev', None):
7227+ self.bld.prev.post()
7228+ for x in self.bld.prev.tasks:
7229+ for y in self.tasks:
7230+ y.set_run_after(x)
7231+
7232+ self.bld.prev = self
7233+
7234+feature('seq')(sequence_order)
7235+
7236diff --git a/buildtools/wafadmin/Tools/__init__.py b/buildtools/wafadmin/Tools/__init__.py
7237new file mode 100644
7238index 0000000..bc6ca23
7239--- /dev/null
7240+++ b/buildtools/wafadmin/Tools/__init__.py
7241@@ -0,0 +1,4 @@
7242+#!/usr/bin/env python
7243+# encoding: utf-8
7244+# Thomas Nagy, 2006 (ita)
7245+
7246diff --git a/buildtools/wafadmin/Tools/ar.py b/buildtools/wafadmin/Tools/ar.py
7247new file mode 100644
7248index 0000000..af9b17f
7249--- /dev/null
7250+++ b/buildtools/wafadmin/Tools/ar.py
7251@@ -0,0 +1,36 @@
7252+#!/usr/bin/env python
7253+# encoding: utf-8
7254+# Thomas Nagy, 2006-2008 (ita)
7255+# Ralf Habacker, 2006 (rh)
7256+
7257+"ar and ranlib"
7258+
7259+import os, sys
7260+import Task, Utils
7261+from Configure import conftest
7262+
7263+ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
7264+cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
7265+cls.maxjobs = 1
7266+cls.install = Utils.nada
7267+
7268+# remove the output in case it already exists
7269+old = cls.run
7270+def wrap(self):
7271+ try: os.remove(self.outputs[0].abspath(self.env))
7272+ except OSError: pass
7273+ return old(self)
7274+setattr(cls, 'run', wrap)
7275+
7276+def detect(conf):
7277+ conf.find_program('ar', var='AR')
7278+ conf.find_program('ranlib', var='RANLIB')
7279+ conf.env.ARFLAGS = 'rcs'
7280+
7281+@conftest
7282+def find_ar(conf):
7283+ v = conf.env
7284+ conf.check_tool('ar')
7285+ if not v['AR']: conf.fatal('ar is required for static libraries - not found')
7286+
7287+
7288diff --git a/buildtools/wafadmin/Tools/bison.py b/buildtools/wafadmin/Tools/bison.py
7289new file mode 100644
7290index 0000000..49c6051
7291--- /dev/null
7292+++ b/buildtools/wafadmin/Tools/bison.py
7293@@ -0,0 +1,38 @@
7294+#!/usr/bin/env python
7295+# encoding: utf-8
7296+# John O'Meara, 2006
7297+# Thomas Nagy 2009
7298+
7299+"Bison processing"
7300+
7301+import Task
7302+from TaskGen import extension
7303+
7304+bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
7305+cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False)
7306+
7307+@extension(['.y', '.yc', '.yy'])
7308+def big_bison(self, node):
7309+ """when it becomes complicated (unlike flex), the old recipes work better (cwd)"""
7310+ has_h = '-d' in self.env['BISONFLAGS']
7311+
7312+ outs = []
7313+ if node.name.endswith('.yc'):
7314+ outs.append(node.change_ext('.tab.cc'))
7315+ if has_h:
7316+ outs.append(node.change_ext('.tab.hh'))
7317+ else:
7318+ outs.append(node.change_ext('.tab.c'))
7319+ if has_h:
7320+ outs.append(node.change_ext('.tab.h'))
7321+
7322+ tsk = self.create_task('bison', node, outs)
7323+ tsk.cwd = node.bld_dir(tsk.env)
7324+
7325+ # and the c/cxx file must be compiled too
7326+ self.allnodes.append(outs[0])
7327+
7328+def detect(conf):
7329+ bison = conf.find_program('bison', var='BISON', mandatory=True)
7330+ conf.env['BISONFLAGS'] = '-d'
7331+
7332diff --git a/buildtools/wafadmin/Tools/cc.py b/buildtools/wafadmin/Tools/cc.py
7333new file mode 100644
7334index 0000000..903a1c5
7335--- /dev/null
7336+++ b/buildtools/wafadmin/Tools/cc.py
7337@@ -0,0 +1,100 @@
7338+#!/usr/bin/env python
7339+# encoding: utf-8
7340+# Thomas Nagy, 2006 (ita)
7341+
7342+"Base for c programs/libraries"
7343+
7344+import os
7345+import TaskGen, Build, Utils, Task
7346+from Logs import debug
7347+import ccroot
7348+from TaskGen import feature, before, extension, after
7349+
7350+g_cc_flag_vars = [
7351+'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
7352+'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
7353+'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
7354+
7355+EXT_CC = ['.c']
7356+
7357+g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
7358+
7359+# TODO remove in waf 1.6
7360+class cc_taskgen(ccroot.ccroot_abstract):
7361+ pass
7362+
7363+@feature('cc')
7364+@before('apply_type_vars')
7365+@after('default_cc')
7366+def init_cc(self):
7367+ self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
7368+ self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
7369+
7370+ if not self.env['CC_NAME']:
7371+ raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
7372+
7373+@feature('cc')
7374+@after('apply_incpaths')
7375+def apply_obj_vars_cc(self):
7376+ """after apply_incpaths for INC_PATHS"""
7377+ env = self.env
7378+ app = env.append_unique
7379+ cpppath_st = env['CPPPATH_ST']
7380+
7381+ # local flags come first
7382+ # set the user-defined includes paths
7383+ for i in env['INC_PATHS']:
7384+ app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
7385+ app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
7386+
7387+ # set the library include paths
7388+ for i in env['CPPPATH']:
7389+ app('_CCINCFLAGS', cpppath_st % i)
7390+
7391+@feature('cc')
7392+@after('apply_lib_vars')
7393+def apply_defines_cc(self):
7394+ """after uselib is set for CCDEFINES"""
7395+ self.defines = getattr(self, 'defines', [])
7396+ lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
7397+ milst = []
7398+
7399+ # now process the local defines
7400+ for defi in lst:
7401+ if not defi in milst:
7402+ milst.append(defi)
7403+
7404+ # CCDEFINES_
7405+ libs = self.to_list(self.uselib)
7406+ for l in libs:
7407+ val = self.env['CCDEFINES_'+l]
7408+ if val: milst += val
7409+ self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
7410+ y = self.env['CCDEFINES_ST']
7411+ self.env.append_unique('_CCDEFFLAGS', [y%x for x in milst])
7412+
7413+@extension(EXT_CC)
7414+def c_hook(self, node):
7415+ # create the compilation task: cpp or cc
7416+ if getattr(self, 'obj_ext', None):
7417+ obj_ext = self.obj_ext
7418+ else:
7419+ obj_ext = '_%d.o' % self.idx
7420+
7421+ task = self.create_task('cc', node, node.change_ext(obj_ext))
7422+ try:
7423+ self.compiled_tasks.append(task)
7424+ except AttributeError:
7425+ raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
7426+ return task
7427+
7428+cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
7429+cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
7430+cls.scan = ccroot.scan
7431+cls.vars.append('CCDEPS')
7432+
7433+link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
7434+cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
7435+cls.maxjobs = 1
7436+cls.install = Utils.nada
7437+
7438diff --git a/buildtools/wafadmin/Tools/ccroot.py b/buildtools/wafadmin/Tools/ccroot.py
7439new file mode 100644
7440index 0000000..f54c82f
7441--- /dev/null
7442+++ b/buildtools/wafadmin/Tools/ccroot.py
7443@@ -0,0 +1,629 @@
7444+#!/usr/bin/env python
7445+# encoding: utf-8
7446+# Thomas Nagy, 2005-2008 (ita)
7447+
7448+"base for all c/c++ programs and libraries"
7449+
7450+import os, sys, re
7451+import TaskGen, Task, Utils, preproc, Logs, Build, Options
7452+from Logs import error, debug, warn
7453+from Utils import md5
7454+from TaskGen import taskgen, after, before, feature
7455+from Constants import *
7456+from Configure import conftest
7457+try:
7458+ from cStringIO import StringIO
7459+except ImportError:
7460+ from io import StringIO
7461+
7462+import config_c # <- necessary for the configuration, do not touch
7463+
7464+USE_TOP_LEVEL = False
7465+
7466+def get_cc_version(conf, cc, gcc=False, icc=False):
7467+
7468+ cmd = cc + ['-dM', '-E', '-']
7469+ try:
7470+ p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
7471+ p.stdin.write('\n')
7472+ out = p.communicate()[0]
7473+ except:
7474+ conf.fatal('could not determine the compiler version %r' % cmd)
7475+
7476+ # PY3K: do not touch
7477+ out = str(out)
7478+
7479+ if gcc:
7480+ if out.find('__INTEL_COMPILER') >= 0:
7481+ conf.fatal('The intel compiler pretends to be gcc')
7482+ if out.find('__GNUC__') < 0:
7483+ conf.fatal('Could not determine the compiler type')
7484+
7485+ if icc and out.find('__INTEL_COMPILER') < 0:
7486+ conf.fatal('Not icc/icpc')
7487+
7488+ k = {}
7489+ if icc or gcc:
7490+ out = out.split('\n')
7491+ import shlex
7492+
7493+ for line in out:
7494+ lst = shlex.split(line)
7495+ if len(lst)>2:
7496+ key = lst[1]
7497+ val = lst[2]
7498+ k[key] = val
7499+
7500+ def isD(var):
7501+ return var in k
7502+
7503+ def isT(var):
7504+ return var in k and k[var] != '0'
7505+
7506+ # Some documentation is available at http://predef.sourceforge.net
7507+ # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
7508+ mp1 = {
7509+ '__linux__' : 'linux',
7510+ '__GNU__' : 'gnu',
7511+ '__FreeBSD__' : 'freebsd',
7512+ '__NetBSD__' : 'netbsd',
7513+ '__OpenBSD__' : 'openbsd',
7514+ '__sun' : 'sunos',
7515+ '__hpux' : 'hpux',
7516+ '__sgi' : 'irix',
7517+ '_AIX' : 'aix',
7518+ '__CYGWIN__' : 'cygwin',
7519+ '__MSYS__' : 'msys',
7520+ '_UWIN' : 'uwin',
7521+ '_WIN64' : 'win32',
7522+ '_WIN32' : 'win32',
7523+ '__POWERPC__' : 'powerpc',
7524+ }
7525+
7526+ for i in mp1:
7527+ if isD(i):
7528+ conf.env.DEST_OS = mp1[i]
7529+ break
7530+ else:
7531+ if isD('__APPLE__') and isD('__MACH__'):
7532+ conf.env.DEST_OS = 'darwin'
7533+ elif isD('__unix__'): # unix must be tested last as it's a generic fallback
7534+ conf.env.DEST_OS = 'generic'
7535+
7536+ if isD('__ELF__'):
7537+ conf.env.DEST_BINFMT = 'elf'
7538+ elif isD('__WINNT__') or isD('__CYGWIN__'):
7539+ conf.env.DEST_BINFMT = 'pe'
7540+ elif isD('__APPLE__'):
7541+ conf.env.DEST_BINFMT = 'mac-o'
7542+
7543+ mp2 = {
7544+ '__x86_64__' : 'x86_64',
7545+ '__i386__' : 'x86',
7546+ '__ia64__' : 'ia',
7547+ '__mips__' : 'mips',
7548+ '__sparc__' : 'sparc',
7549+ '__alpha__' : 'alpha',
7550+ '__arm__' : 'arm',
7551+ '__hppa__' : 'hppa',
7552+ '__powerpc__' : 'powerpc',
7553+ }
7554+ for i in mp2:
7555+ if isD(i):
7556+ conf.env.DEST_CPU = mp2[i]
7557+ break
7558+
7559+ debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
7560+ conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
7561+ return k
7562+
7563+class DEBUG_LEVELS:
7564+ """Will disappear in waf 1.6"""
7565+ ULTRADEBUG = "ultradebug"
7566+ DEBUG = "debug"
7567+ RELEASE = "release"
7568+ OPTIMIZED = "optimized"
7569+ CUSTOM = "custom"
7570+
7571+ ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM]
7572+
7573+def scan(self):
7574+ "look for .h the .cpp need"
7575+ debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
7576+
7577+ # TODO waf 1.6 - assume the default input has exactly one file
7578+
7579+ if len(self.inputs) == 1:
7580+ node = self.inputs[0]
7581+ (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
7582+ if Logs.verbose:
7583+ debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
7584+ return (nodes, names)
7585+
7586+ all_nodes = []
7587+ all_names = []
7588+ seen = set()
7589+ for node in self.inputs:
7590+ (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
7591+ if Logs.verbose:
7592+ debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
7593+ for x in nodes:
7594+ if id(x) in seen: continue
7595+ seen.add(id(x))
7596+ all_nodes.append(x)
7597+ for x in names:
7598+ if not x in all_names:
7599+ all_names.append(x)
7600+ return (all_nodes, all_names)
7601+
7602+class ccroot_abstract(TaskGen.task_gen):
7603+ "Parent class for programs and libraries in languages c, c++ and moc (Qt)"
7604+ def __init__(self, *k, **kw):
7605+ # COMPAT remove in waf 1.6 TODO
7606+ if len(k) > 1:
7607+ k = list(k)
7608+ if k[1][0] != 'c':
7609+ k[1] = 'c' + k[1]
7610+ TaskGen.task_gen.__init__(self, *k, **kw)
7611+
7612+def get_target_name(self):
7613+ tp = 'program'
7614+ for x in self.features:
7615+ if x in ['cshlib', 'cstaticlib']:
7616+ tp = x.lstrip('c')
7617+
7618+ pattern = self.env[tp + '_PATTERN']
7619+ if not pattern: pattern = '%s'
7620+
7621+ dir, name = os.path.split(self.target)
7622+
7623+ if self.env.DEST_BINFMT == 'pe' and getattr(self, 'vnum', None) and 'cshlib' in self.features:
7624+ # include the version in the dll file name,
7625+ # the import lib file name stays unversionned.
7626+ name = name + '-' + self.vnum.split('.')[0]
7627+
7628+ return os.path.join(dir, pattern % name)
7629+
7630+@feature('cc', 'cxx')
7631+@before('apply_core')
7632+def default_cc(self):
7633+ """compiled_tasks attribute must be set before the '.c->.o' tasks can be created"""
7634+ Utils.def_attrs(self,
7635+ includes = '',
7636+ defines= '',
7637+ rpaths = '',
7638+ uselib = '',
7639+ uselib_local = '',
7640+ add_objects = '',
7641+ p_flag_vars = [],
7642+ p_type_vars = [],
7643+ compiled_tasks = [],
7644+ link_task = None)
7645+
7646+ # The only thing we need for cross-compilation is DEST_BINFMT.
7647+ # At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient.
7648+ # Currently, cross-compilation is auto-detected only for the gnu and intel compilers.
7649+ if not self.env.DEST_BINFMT:
7650+ # Infer the binary format from the os name.
7651+ self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format(
7652+ self.env.DEST_OS or Utils.unversioned_sys_platform())
7653+
7654+ if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env)
7655+ if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env)
7656+
7657+@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
7658+def apply_verif(self):
7659+ """no particular order, used for diagnostic"""
7660+ if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)):
7661+ raise Utils.WafError('no source files specified for %s' % self)
7662+ if not self.target:
7663+ raise Utils.WafError('no target for %s' % self)
7664+
7665+# TODO reference the d programs, shlibs in d.py, not here
7666+
7667+@feature('cprogram', 'dprogram')
7668+@after('default_cc')
7669+@before('apply_core')
7670+def vars_target_cprogram(self):
7671+ self.default_install_path = self.env.BINDIR
7672+ self.default_chmod = O755
7673+
7674+@after('default_cc')
7675+@feature('cshlib', 'dshlib')
7676+@before('apply_core')
7677+def vars_target_cshlib(self):
7678+ if self.env.DEST_BINFMT == 'pe':
7679+ # set execute bit on libs to avoid 'permission denied' (issue 283)
7680+ self.default_chmod = O755
7681+ self.default_install_path = self.env.BINDIR
7682+ else:
7683+ self.default_install_path = self.env.LIBDIR
7684+
7685+@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
7686+@after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib')
7687+def default_link_install(self):
7688+ """you may kill this method to inject your own installation for the first element
7689+ any other install should only process its own nodes and not those from the others"""
7690+ if self.install_path:
7691+ self.bld.install_files(self.install_path, self.link_task.outputs[0], env=self.env, chmod=self.chmod)
7692+
7693+@feature('cc', 'cxx')
7694+@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
7695+def apply_incpaths(self):
7696+ """used by the scanner
7697+ after processing the uselib for CPPPATH
7698+ after apply_core because some processing may add include paths
7699+ """
7700+ lst = []
7701+ # TODO move the uselib processing out of here
7702+ for lib in self.to_list(self.uselib):
7703+ for path in self.env['CPPPATH_' + lib]:
7704+ if not path in lst:
7705+ lst.append(path)
7706+ if preproc.go_absolute:
7707+ for path in preproc.standard_includes:
7708+ if not path in lst:
7709+ lst.append(path)
7710+
7711+ for path in self.to_list(self.includes):
7712+ if not path in lst:
7713+ if preproc.go_absolute or not os.path.isabs(path):
7714+ lst.append(path)
7715+ else:
7716+ self.env.prepend_value('CPPPATH', path)
7717+
7718+ for path in lst:
7719+ node = None
7720+ if os.path.isabs(path):
7721+ if preproc.go_absolute:
7722+ node = self.bld.root.find_dir(path)
7723+ elif path[0] == '#':
7724+ node = self.bld.srcnode
7725+ if len(path) > 1:
7726+ node = node.find_dir(path[1:])
7727+ else:
7728+ node = self.path.find_dir(path)
7729+
7730+ if node:
7731+ self.env.append_value('INC_PATHS', node)
7732+
7733+ # TODO WAF 1.6
7734+ if USE_TOP_LEVEL:
7735+ self.env.append_value('INC_PATHS', self.bld.srcnode)
7736+
7737+@feature('cc', 'cxx')
7738+@after('init_cc', 'init_cxx')
7739+@before('apply_lib_vars')
7740+def apply_type_vars(self):
7741+ """before apply_lib_vars because we modify uselib
7742+ after init_cc and init_cxx because web need p_type_vars
7743+ """
7744+ for x in self.features:
7745+ if not x in ['cprogram', 'cstaticlib', 'cshlib']:
7746+ continue
7747+ x = x.lstrip('c')
7748+
7749+ # if the type defines uselib to add, add them
7750+ st = self.env[x + '_USELIB']
7751+ if st: self.uselib = self.uselib + ' ' + st
7752+
7753+ # each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc
7754+ # so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly
7755+ for var in self.p_type_vars:
7756+ compvar = '%s_%s' % (x, var)
7757+ #print compvar
7758+ value = self.env[compvar]
7759+ if value: self.env.append_value(var, value)
7760+
7761+@feature('cprogram', 'cshlib', 'cstaticlib')
7762+@after('apply_core')
7763+def apply_link(self):
7764+ """executes after apply_core for collecting 'compiled_tasks'
7765+ use a custom linker if specified (self.link='name-of-custom-link-task')"""
7766+ link = getattr(self, 'link', None)
7767+ if not link:
7768+ if 'cstaticlib' in self.features: link = 'static_link'
7769+ elif 'cxx' in self.features: link = 'cxx_link'
7770+ else: link = 'cc_link'
7771+
7772+ tsk = self.create_task(link)
7773+ outputs = [t.outputs[0] for t in self.compiled_tasks]
7774+ tsk.set_inputs(outputs)
7775+ tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
7776+
7777+ self.link_task = tsk
7778+
7779+@feature('cc', 'cxx')
7780+@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
7781+def apply_lib_vars(self):
7782+ """after apply_link because of 'link_task'
7783+ after default_cc because of the attribute 'uselib'"""
7784+
7785+ # after 'apply_core' in case if 'cc' if there is no link
7786+
7787+ env = self.env
7788+
7789+ # 1. the case of the libs defined in the project (visit ancestors first)
7790+ # the ancestors external libraries (uselib) will be prepended
7791+ self.uselib = self.to_list(self.uselib)
7792+ names = self.to_list(self.uselib_local)
7793+
7794+ seen = set([])
7795+ tmp = Utils.deque(names) # consume a copy of the list of names
7796+ while tmp:
7797+ lib_name = tmp.popleft()
7798+ # visit dependencies only once
7799+ if lib_name in seen:
7800+ continue
7801+
7802+ y = self.name_to_obj(lib_name)
7803+ if not y:
7804+ raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
7805+ y.post()
7806+ seen.add(lib_name)
7807+
7808+ # object has ancestors to process (shared libraries): add them to the end of the list
7809+ if getattr(y, 'uselib_local', None):
7810+ lst = y.to_list(y.uselib_local)
7811+ if 'cshlib' in y.features or 'cprogram' in y.features:
7812+ lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features]
7813+ tmp.extend(lst)
7814+
7815+ # link task and flags
7816+ if getattr(y, 'link_task', None):
7817+
7818+ link_name = y.target[y.target.rfind(os.sep) + 1:]
7819+ if 'cstaticlib' in y.features:
7820+ env.append_value('STATICLIB', link_name)
7821+ elif 'cshlib' in y.features or 'cprogram' in y.features:
7822+ # WARNING some linkers can link against programs
7823+ env.append_value('LIB', link_name)
7824+
7825+ # the order
7826+ self.link_task.set_run_after(y.link_task)
7827+
7828+ # for the recompilation
7829+ dep_nodes = getattr(self.link_task, 'dep_nodes', [])
7830+ self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
7831+
7832+ # add the link path too
7833+ tmp_path = y.link_task.outputs[0].parent.bldpath(self.env)
7834+ if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path)
7835+
7836+ # add ancestors uselib too - but only propagate those that have no staticlib
7837+ for v in self.to_list(y.uselib):
7838+ if not env['STATICLIB_' + v]:
7839+ if not v in self.uselib:
7840+ self.uselib.insert(0, v)
7841+
7842+ # if the library task generator provides 'export_incdirs', add to the include path
7843+ # the export_incdirs must be a list of paths relative to the other library
7844+ if getattr(y, 'export_incdirs', None):
7845+ for x in self.to_list(y.export_incdirs):
7846+ node = y.path.find_dir(x)
7847+ if not node:
7848+ raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
7849+ self.env.append_unique('INC_PATHS', node)
7850+
7851+ # 2. the case of the libs defined outside
7852+ for x in self.uselib:
7853+ for v in self.p_flag_vars:
7854+ val = self.env[v + '_' + x]
7855+ if val: self.env.append_value(v, val)
7856+
7857+@feature('cprogram', 'cstaticlib', 'cshlib')
7858+@after('init_cc', 'init_cxx', 'apply_link')
7859+def apply_objdeps(self):
7860+ "add the .o files produced by some other object files in the same manner as uselib_local"
7861+ if not getattr(self, 'add_objects', None): return
7862+
7863+ seen = []
7864+ names = self.to_list(self.add_objects)
7865+ while names:
7866+ x = names[0]
7867+
7868+ # visit dependencies only once
7869+ if x in seen:
7870+ names = names[1:]
7871+ continue
7872+
7873+ # object does not exist ?
7874+ y = self.name_to_obj(x)
7875+ if not y:
7876+ raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
7877+
7878+ # object has ancestors to process first ? update the list of names
7879+ if getattr(y, 'add_objects', None):
7880+ added = 0
7881+ lst = y.to_list(y.add_objects)
7882+ lst.reverse()
7883+ for u in lst:
7884+ if u in seen: continue
7885+ added = 1
7886+ names = [u]+names
7887+ if added: continue # list of names modified, loop
7888+
7889+ # safe to process the current object
7890+ y.post()
7891+ seen.append(x)
7892+
7893+ for t in y.compiled_tasks:
7894+ self.link_task.inputs.extend(t.outputs)
7895+
7896+@feature('cprogram', 'cshlib', 'cstaticlib')
7897+@after('apply_lib_vars')
7898+def apply_obj_vars(self):
7899+ """after apply_lib_vars for uselib"""
7900+ v = self.env
7901+ lib_st = v['LIB_ST']
7902+ staticlib_st = v['STATICLIB_ST']
7903+ libpath_st = v['LIBPATH_ST']
7904+ staticlibpath_st = v['STATICLIBPATH_ST']
7905+ rpath_st = v['RPATH_ST']
7906+
7907+ app = v.append_unique
7908+
7909+ if v['FULLSTATIC']:
7910+ v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER'])
7911+
7912+ for i in v['RPATH']:
7913+ if i and rpath_st:
7914+ app('LINKFLAGS', rpath_st % i)
7915+
7916+ for i in v['LIBPATH']:
7917+ app('LINKFLAGS', libpath_st % i)
7918+ app('LINKFLAGS', staticlibpath_st % i)
7919+
7920+ if v['STATICLIB']:
7921+ v.append_value('LINKFLAGS', v['STATICLIB_MARKER'])
7922+ k = [(staticlib_st % i) for i in v['STATICLIB']]
7923+ app('LINKFLAGS', k)
7924+
7925+ # fully static binaries ?
7926+ if not v['FULLSTATIC']:
7927+ if v['STATICLIB'] or v['LIB']:
7928+ v.append_value('LINKFLAGS', v['SHLIB_MARKER'])
7929+
7930+ app('LINKFLAGS', [lib_st % i for i in v['LIB']])
7931+
7932+@after('apply_link')
7933+def process_obj_files(self):
7934+ if not hasattr(self, 'obj_files'): return
7935+ for x in self.obj_files:
7936+ node = self.path.find_resource(x)
7937+ self.link_task.inputs.append(node)
7938+
7939+@taskgen
7940+def add_obj_file(self, file):
7941+ """Small example on how to link object files as if they were source
7942+ obj = bld.create_obj('cc')
7943+ obj.add_obj_file('foo.o')"""
7944+ if not hasattr(self, 'obj_files'): self.obj_files = []
7945+ if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
7946+ self.obj_files.append(file)
7947+
7948+c_attrs = {
7949+'cxxflag' : 'CXXFLAGS',
7950+'cflag' : 'CCFLAGS',
7951+'ccflag' : 'CCFLAGS',
7952+'linkflag' : 'LINKFLAGS',
7953+'ldflag' : 'LINKFLAGS',
7954+'lib' : 'LIB',
7955+'libpath' : 'LIBPATH',
7956+'staticlib': 'STATICLIB',
7957+'staticlibpath': 'STATICLIBPATH',
7958+'rpath' : 'RPATH',
7959+'framework' : 'FRAMEWORK',
7960+'frameworkpath' : 'FRAMEWORKPATH'
7961+}
7962+
7963+@feature('cc', 'cxx')
7964+@before('init_cxx', 'init_cc')
7965+@before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc')
7966+def add_extra_flags(self):
7967+ """case and plural insensitive
7968+ before apply_obj_vars for processing the library attributes
7969+ """
7970+ for x in self.__dict__.keys():
7971+ y = x.lower()
7972+ if y[-1] == 's':
7973+ y = y[:-1]
7974+ if c_attrs.get(y, None):
7975+ self.env.append_unique(c_attrs[y], getattr(self, x))
7976+
7977+# ============ the code above must not know anything about import libs ==========
7978+
7979+@feature('cshlib')
7980+@after('apply_link', 'default_cc')
7981+@before('apply_lib_vars', 'apply_objdeps', 'default_link_install')
7982+def apply_implib(self):
7983+ """On mswindows, handle dlls and their import libs
7984+ the .dll.a is the import lib and it is required for linking so it is installed too
7985+ """
7986+ if not self.env.DEST_BINFMT == 'pe':
7987+ return
7988+
7989+ self.meths.remove('default_link_install')
7990+
7991+ bindir = self.install_path
7992+ if not bindir: return
7993+
7994+ # install the dll in the bin dir
7995+ dll = self.link_task.outputs[0]
7996+ self.bld.install_files(bindir, dll, self.env, self.chmod)
7997+
7998+ # add linker flags to generate the import lib
7999+ implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1]
8000+
8001+ implib = dll.parent.find_or_declare(implib)
8002+ self.link_task.outputs.append(implib)
8003+ self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
8004+
8005+ self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split())
8006+
8007+# ============ the code above must not know anything about vnum processing on unix platforms =========
8008+
8009+@feature('cshlib')
8010+@after('apply_link')
8011+@before('apply_lib_vars', 'default_link_install')
8012+def apply_vnum(self):
8013+ """
8014+ libfoo.so is installed as libfoo.so.1.2.3
8015+ """
8016+ if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
8017+ return
8018+
8019+ self.meths.remove('default_link_install')
8020+
8021+ link = self.link_task
8022+ nums = self.vnum.split('.')
8023+ node = link.outputs[0]
8024+
8025+ libname = node.name
8026+ if libname.endswith('.dylib'):
8027+ name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
8028+ name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
8029+ else:
8030+ name3 = libname + '.' + self.vnum
8031+ name2 = libname + '.' + nums[0]
8032+
8033+ if self.env.SONAME_ST:
8034+ v = self.env.SONAME_ST % name2
8035+ self.env.append_value('LINKFLAGS', v.split())
8036+
8037+ bld = self.bld
8038+ nums = self.vnum.split('.')
8039+
8040+ path = self.install_path
8041+ if not path: return
8042+
8043+ bld.install_as(path + os.sep + name3, node, env=self.env)
8044+ bld.symlink_as(path + os.sep + name2, name3)
8045+ bld.symlink_as(path + os.sep + libname, name3)
8046+
8047+ # the following task is just to enable execution from the build dir :-/
8048+ self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
8049+
8050+def exec_vnum_link(self):
8051+ for x in self.outputs:
8052+ path = x.abspath(self.env)
8053+ try:
8054+ os.remove(path)
8055+ except OSError:
8056+ pass
8057+
8058+ try:
8059+ os.symlink(self.inputs[0].name, path)
8060+ except OSError:
8061+ return 1
8062+
8063+cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN')
8064+cls.quiet = 1
8065+
8066+# ============ the --as-needed flag should added during the configuration, not at runtime =========
8067+
8068+@conftest
8069+def add_as_needed(conf):
8070+ if conf.env.DEST_BINFMT == 'elf' and 'gcc' in (conf.env.CXX_NAME, conf.env.CC_NAME):
8071+ conf.env.append_unique('LINKFLAGS', '--as-needed')
8072+
8073diff --git a/buildtools/wafadmin/Tools/compiler_cc.py b/buildtools/wafadmin/Tools/compiler_cc.py
8074new file mode 100644
8075index 0000000..0421503
8076--- /dev/null
8077+++ b/buildtools/wafadmin/Tools/compiler_cc.py
8078@@ -0,0 +1,67 @@
8079+#!/usr/bin/env python
8080+# encoding: utf-8
8081+# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
8082+
8083+import os, sys, imp, types, ccroot
8084+import optparse
8085+import Utils, Configure, Options
8086+from Logs import debug
8087+
8088+c_compiler = {
8089+ 'win32': ['msvc', 'gcc'],
8090+ 'cygwin': ['gcc'],
8091+ 'darwin': ['gcc'],
8092+ 'aix': ['xlc', 'gcc'],
8093+ 'linux': ['gcc', 'icc', 'suncc'],
8094+ 'sunos': ['gcc', 'suncc'],
8095+ 'irix': ['gcc'],
8096+ 'hpux': ['gcc'],
8097+ 'gnu': ['gcc'],
8098+ 'default': ['gcc']
8099+}
8100+
8101+def __list_possible_compiler(platform):
8102+ try:
8103+ return c_compiler[platform]
8104+ except KeyError:
8105+ return c_compiler["default"]
8106+
8107+def detect(conf):
8108+ """
8109+ for each compiler for the platform, try to configure the compiler
8110+ in theory the tools should raise a configuration error if the compiler
8111+ pretends to be something it is not (setting CC=icc and trying to configure gcc)
8112+ """
8113+ try: test_for_compiler = Options.options.check_c_compiler
8114+ except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
8115+ orig = conf.env
8116+ for compiler in test_for_compiler.split():
8117+ conf.env = orig.copy()
8118+ try:
8119+ conf.check_tool(compiler)
8120+ except Configure.ConfigurationError, e:
8121+ debug('compiler_cc: %r' % e)
8122+ else:
8123+ if conf.env['CC']:
8124+ orig.table = conf.env.get_merged_dict()
8125+ conf.env = orig
8126+ conf.check_message(compiler, '', True)
8127+ conf.env['COMPILER_CC'] = compiler
8128+ break
8129+ conf.check_message(compiler, '', False)
8130+ break
8131+ else:
8132+ conf.fatal('could not configure a c compiler!')
8133+
8134+def set_options(opt):
8135+ build_platform = Utils.unversioned_sys_platform()
8136+ possible_compiler_list = __list_possible_compiler(build_platform)
8137+ test_for_compiler = ' '.join(possible_compiler_list)
8138+ cc_compiler_opts = opt.add_option_group("C Compiler Options")
8139+ cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
8140+ help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
8141+ dest="check_c_compiler")
8142+
8143+ for c_compiler in test_for_compiler.split():
8144+ opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
8145+
8146diff --git a/buildtools/wafadmin/Tools/compiler_cxx.py b/buildtools/wafadmin/Tools/compiler_cxx.py
8147new file mode 100644
8148index 0000000..5308ea9
8149--- /dev/null
8150+++ b/buildtools/wafadmin/Tools/compiler_cxx.py
8151@@ -0,0 +1,62 @@
8152+#!/usr/bin/env python
8153+# encoding: utf-8
8154+# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
8155+
8156+import os, sys, imp, types, ccroot
8157+import optparse
8158+import Utils, Configure, Options
8159+from Logs import debug
8160+
8161+cxx_compiler = {
8162+'win32': ['msvc', 'g++'],
8163+'cygwin': ['g++'],
8164+'darwin': ['g++'],
8165+'aix': ['xlc++', 'g++'],
8166+'linux': ['g++', 'icpc', 'sunc++'],
8167+'sunos': ['g++', 'sunc++'],
8168+'irix': ['g++'],
8169+'hpux': ['g++'],
8170+'gnu': ['g++'],
8171+'default': ['g++']
8172+}
8173+
8174+def __list_possible_compiler(platform):
8175+ try:
8176+ return cxx_compiler[platform]
8177+ except KeyError:
8178+ return cxx_compiler["default"]
8179+
8180+def detect(conf):
8181+ try: test_for_compiler = Options.options.check_cxx_compiler
8182+ except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
8183+ orig = conf.env
8184+ for compiler in test_for_compiler.split():
8185+ try:
8186+ conf.env = orig.copy()
8187+ conf.check_tool(compiler)
8188+ except Configure.ConfigurationError, e:
8189+ debug('compiler_cxx: %r' % e)
8190+ else:
8191+ if conf.env['CXX']:
8192+ orig.table = conf.env.get_merged_dict()
8193+ conf.env = orig
8194+ conf.check_message(compiler, '', True)
8195+ conf.env['COMPILER_CXX'] = compiler
8196+ break
8197+ conf.check_message(compiler, '', False)
8198+ break
8199+ else:
8200+ conf.fatal('could not configure a cxx compiler!')
8201+
8202+def set_options(opt):
8203+ build_platform = Utils.unversioned_sys_platform()
8204+ possible_compiler_list = __list_possible_compiler(build_platform)
8205+ test_for_compiler = ' '.join(possible_compiler_list)
8206+ cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
8207+ cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
8208+ help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
8209+ dest="check_cxx_compiler")
8210+
8211+ for cxx_compiler in test_for_compiler.split():
8212+ opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
8213+
8214diff --git a/buildtools/wafadmin/Tools/compiler_d.py b/buildtools/wafadmin/Tools/compiler_d.py
8215new file mode 100644
8216index 0000000..1ea5efa
8217--- /dev/null
8218+++ b/buildtools/wafadmin/Tools/compiler_d.py
8219@@ -0,0 +1,33 @@
8220+#!/usr/bin/env python
8221+# encoding: utf-8
8222+# Carlos Rafael Giani, 2007 (dv)
8223+
8224+import os, sys, imp, types
8225+import Utils, Configure, Options
8226+
8227+def detect(conf):
8228+ if getattr(Options.options, 'check_dmd_first', None):
8229+ test_for_compiler = ['dmd', 'gdc']
8230+ else:
8231+ test_for_compiler = ['gdc', 'dmd']
8232+
8233+ for d_compiler in test_for_compiler:
8234+ try:
8235+ conf.check_tool(d_compiler)
8236+ except:
8237+ pass
8238+ else:
8239+ break
8240+ else:
8241+ conf.fatal('no suitable d compiler was found')
8242+
8243+def set_options(opt):
8244+ d_compiler_opts = opt.add_option_group('D Compiler Options')
8245+ d_compiler_opts.add_option('--check-dmd-first', action='store_true',
8246+ help='checks for the gdc compiler before dmd (default is the other way round)',
8247+ dest='check_dmd_first',
8248+ default=False)
8249+
8250+ for d_compiler in ['gdc', 'dmd']:
8251+ opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts)
8252+
8253diff --git a/buildtools/wafadmin/Tools/config_c.py b/buildtools/wafadmin/Tools/config_c.py
8254new file mode 100644
8255index 0000000..a32d8aa
8256--- /dev/null
8257+++ b/buildtools/wafadmin/Tools/config_c.py
8258@@ -0,0 +1,736 @@
8259+#!/usr/bin/env python
8260+# encoding: utf-8
8261+# Thomas Nagy, 2005-2008 (ita)
8262+
8263+"""
8264+c/c++ configuration routines
8265+"""
8266+
8267+import os, imp, sys, shlex, shutil
8268+from Utils import md5
8269+import Build, Utils, Configure, Task, Options, Logs, TaskGen
8270+from Constants import *
8271+from Configure import conf, conftest
8272+
8273+cfg_ver = {
8274+ 'atleast-version': '>=',
8275+ 'exact-version': '==',
8276+ 'max-version': '<=',
8277+}
8278+
8279+SNIP1 = '''
8280+ int main() {
8281+ void *p;
8282+ p=(void*)(%s);
8283+ return 0;
8284+}
8285+'''
8286+
8287+SNIP2 = '''
8288+int main() {
8289+ if ((%(type_name)s *) 0) return 0;
8290+ if (sizeof (%(type_name)s)) return 0;
8291+}
8292+'''
8293+
8294+SNIP3 = '''
8295+int main() {
8296+ return 0;
8297+}
8298+'''
8299+
8300+def parse_flags(line, uselib, env):
8301+ """pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/"""
8302+
8303+ lst = shlex.split(line)
8304+ while lst:
8305+ x = lst.pop(0)
8306+ st = x[:2]
8307+ ot = x[2:]
8308+ app = env.append_value
8309+ if st == '-I' or st == '/I':
8310+ if not ot: ot = lst.pop(0)
8311+ app('CPPPATH_' + uselib, ot)
8312+ elif st == '-D':
8313+ if not ot: ot = lst.pop(0)
8314+ app('CXXDEFINES_' + uselib, ot)
8315+ app('CCDEFINES_' + uselib, ot)
8316+ elif st == '-l':
8317+ if not ot: ot = lst.pop(0)
8318+ app('LIB_' + uselib, ot)
8319+ elif st == '-L':
8320+ if not ot: ot = lst.pop(0)
8321+ app('LIBPATH_' + uselib, ot)
8322+ elif x == '-pthread' or x.startswith('+'):
8323+ app('CCFLAGS_' + uselib, x)
8324+ app('CXXFLAGS_' + uselib, x)
8325+ app('LINKFLAGS_' + uselib, x)
8326+ elif x == '-framework':
8327+ app('FRAMEWORK_' + uselib, lst.pop(0))
8328+ elif x.startswith('-F'):
8329+ app('FRAMEWORKPATH_' + uselib, x[2:])
8330+ elif x.startswith('-std'):
8331+ app('CCFLAGS_' + uselib, x)
8332+ app('CXXFLAGS_' + uselib, x)
8333+ app('LINKFLAGS_' + uselib, x)
8334+ elif x.startswith('-Wl'):
8335+ app('LINKFLAGS_' + uselib, x)
8336+ elif x.startswith('-m') or x.startswith('-f'):
8337+ app('CCFLAGS_' + uselib, x)
8338+ app('CXXFLAGS_' + uselib, x)
8339+
8340+@conf
8341+def ret_msg(self, f, kw):
8342+ """execute a function, when provided"""
8343+ if isinstance(f, str):
8344+ return f
8345+ return f(kw)
8346+
8347+@conf
8348+def validate_cfg(self, kw):
8349+ if not 'path' in kw:
8350+ kw['path'] = 'pkg-config --errors-to-stdout --print-errors'
8351+
8352+ # pkg-config version
8353+ if 'atleast_pkgconfig_version' in kw:
8354+ if not 'msg' in kw:
8355+ kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version']
8356+ return
8357+
8358+ # pkg-config --modversion
8359+ if 'modversion' in kw:
8360+ return
8361+
8362+ if 'variables' in kw:
8363+ if not 'msg' in kw:
8364+ kw['msg'] = 'Checking for %s variables' % kw['package']
8365+ return
8366+
8367+ # checking for the version of a module, for the moment, one thing at a time
8368+ for x in cfg_ver.keys():
8369+ y = x.replace('-', '_')
8370+ if y in kw:
8371+ if not 'package' in kw:
8372+ raise ValueError('%s requires a package' % x)
8373+
8374+ if not 'msg' in kw:
8375+ kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y])
8376+ return
8377+
8378+ if not 'msg' in kw:
8379+ kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
8380+ if not 'okmsg' in kw:
8381+ kw['okmsg'] = 'yes'
8382+ if not 'errmsg' in kw:
8383+ kw['errmsg'] = 'not found'
8384+
8385+@conf
8386+def cmd_and_log(self, cmd, kw):
8387+ Logs.debug('runner: %s\n' % cmd)
8388+ if self.log:
8389+ self.log.write('%s\n' % cmd)
8390+
8391+ try:
8392+ p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
8393+ (out, err) = p.communicate()
8394+ except OSError, e:
8395+ self.log.write('error %r' % e)
8396+ self.fatal(str(e))
8397+
8398+ # placeholder, don't touch
8399+ out = str(out)
8400+ err = str(err)
8401+
8402+ if self.log:
8403+ self.log.write(out)
8404+ self.log.write(err)
8405+
8406+ if p.returncode:
8407+ if not kw.get('errmsg', ''):
8408+ if kw.get('mandatory', False):
8409+ kw['errmsg'] = out.strip()
8410+ else:
8411+ kw['errmsg'] = 'no'
8412+ self.fatal('fail')
8413+ return out
8414+
8415+@conf
8416+def exec_cfg(self, kw):
8417+
8418+ # pkg-config version
8419+ if 'atleast_pkgconfig_version' in kw:
8420+ cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
8421+ self.cmd_and_log(cmd, kw)
8422+ if not 'okmsg' in kw:
8423+ kw['okmsg'] = 'yes'
8424+ return
8425+
8426+ # checking for the version of a module
8427+ for x in cfg_ver:
8428+ y = x.replace('-', '_')
8429+ if y in kw:
8430+ self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
8431+ if not 'okmsg' in kw:
8432+ kw['okmsg'] = 'yes'
8433+ self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
8434+ break
8435+
8436+ # retrieving the version of a module
8437+ if 'modversion' in kw:
8438+ version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip()
8439+ self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
8440+ return version
8441+
8442+ # retrieving variables of a module
8443+ if 'variables' in kw:
8444+ env = kw.get('env', self.env)
8445+ uselib = kw.get('uselib_store', kw['package'].upper())
8446+ vars = Utils.to_list(kw['variables'])
8447+ for v in vars:
8448+ val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
8449+ var = '%s_%s' % (uselib, v)
8450+ env[var] = val
8451+ if not 'okmsg' in kw:
8452+ kw['okmsg'] = 'yes'
8453+ return
8454+
8455+ lst = [kw['path']]
8456+
8457+
8458+ defi = kw.get('define_variable', None)
8459+ if not defi:
8460+ defi = self.env.PKG_CONFIG_DEFINES or {}
8461+ for key, val in defi.iteritems():
8462+ lst.append('--define-variable=%s=%s' % (key, val))
8463+
8464+ lst.append(kw.get('args', ''))
8465+ lst.append(kw['package'])
8466+
8467+ # so we assume the command-line will output flags to be parsed afterwards
8468+ cmd = ' '.join(lst)
8469+ ret = self.cmd_and_log(cmd, kw)
8470+ if not 'okmsg' in kw:
8471+ kw['okmsg'] = 'yes'
8472+
8473+ self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
8474+ parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
8475+ return ret
8476+
8477+@conf
8478+def check_cfg(self, *k, **kw):
8479+ """
8480+ for pkg-config mostly, but also all the -config tools
8481+ conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
8482+ conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
8483+ """
8484+
8485+ self.validate_cfg(kw)
8486+ if 'msg' in kw:
8487+ self.check_message_1(kw['msg'])
8488+ ret = None
8489+ try:
8490+ ret = self.exec_cfg(kw)
8491+ except Configure.ConfigurationError, e:
8492+ if 'errmsg' in kw:
8493+ self.check_message_2(kw['errmsg'], 'YELLOW')
8494+ if 'mandatory' in kw and kw['mandatory']:
8495+ if Logs.verbose > 1:
8496+ raise
8497+ else:
8498+ self.fatal('the configuration failed (see %r)' % self.log.name)
8499+ else:
8500+ kw['success'] = ret
8501+ if 'okmsg' in kw:
8502+ self.check_message_2(self.ret_msg(kw['okmsg'], kw))
8503+
8504+ return ret
8505+
8506+# the idea is the following: now that we are certain
8507+# that all the code here is only for c or c++, it is
8508+# easy to put all the logic in one function
8509+#
8510+# this should prevent code duplication (ita)
8511+
8512+# env: an optional environment (modified -> provide a copy)
8513+# compiler: cc or cxx - it tries to guess what is best
8514+# type: cprogram, cshlib, cstaticlib
8515+# code: a c code to execute
8516+# uselib_store: where to add the variables
8517+# uselib: parameters to use for building
8518+# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */
8519+# execute: True or False - will return the result of the execution
8520+
8521+@conf
8522+def validate_c(self, kw):
8523+ """validate the parameters for the test method"""
8524+
8525+ if not 'env' in kw:
8526+ kw['env'] = self.env.copy()
8527+
8528+ env = kw['env']
8529+ if not 'compiler' in kw:
8530+ kw['compiler'] = 'cc'
8531+ if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
8532+ kw['compiler'] = 'cxx'
8533+ if not self.env['CXX']:
8534+ self.fatal('a c++ compiler is required')
8535+ else:
8536+ if not self.env['CC']:
8537+ self.fatal('a c compiler is required')
8538+
8539+ if not 'type' in kw:
8540+ kw['type'] = 'cprogram'
8541+
8542+ assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
8543+
8544+
8545+ #if kw['type'] != 'program' and kw.get('execute', 0):
8546+ # raise ValueError, 'can only execute programs'
8547+
8548+ def to_header(dct):
8549+ if 'header_name' in dct:
8550+ dct = Utils.to_list(dct['header_name'])
8551+ return ''.join(['#include <%s>\n' % x for x in dct])
8552+ return ''
8553+
8554+ # set the file name
8555+ if not 'compile_mode' in kw:
8556+ kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
8557+
8558+ if not 'compile_filename' in kw:
8559+ kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
8560+
8561+ #OSX
8562+ if 'framework_name' in kw:
8563+ try: TaskGen.task_gen.create_task_macapp
8564+ except AttributeError: self.fatal('frameworks require the osx tool')
8565+
8566+ fwkname = kw['framework_name']
8567+ if not 'uselib_store' in kw:
8568+ kw['uselib_store'] = fwkname.upper()
8569+
8570+ if not kw.get('no_header', False):
8571+ if not 'header_name' in kw:
8572+ kw['header_name'] = []
8573+ fwk = '%s/%s.h' % (fwkname, fwkname)
8574+ if kw.get('remove_dot_h', None):
8575+ fwk = fwk[:-2]
8576+ kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
8577+
8578+ kw['msg'] = 'Checking for framework %s' % fwkname
8579+ kw['framework'] = fwkname
8580+ #kw['frameworkpath'] = set it yourself
8581+
8582+ if 'function_name' in kw:
8583+ fu = kw['function_name']
8584+ if not 'msg' in kw:
8585+ kw['msg'] = 'Checking for function %s' % fu
8586+ kw['code'] = to_header(kw) + SNIP1 % fu
8587+ if not 'uselib_store' in kw:
8588+ kw['uselib_store'] = fu.upper()
8589+ if not 'define_name' in kw:
8590+ kw['define_name'] = self.have_define(fu)
8591+
8592+ elif 'type_name' in kw:
8593+ tu = kw['type_name']
8594+ if not 'msg' in kw:
8595+ kw['msg'] = 'Checking for type %s' % tu
8596+ if not 'header_name' in kw:
8597+ kw['header_name'] = 'stdint.h'
8598+ kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
8599+ if not 'define_name' in kw:
8600+ kw['define_name'] = self.have_define(tu.upper())
8601+
8602+ elif 'header_name' in kw:
8603+ if not 'msg' in kw:
8604+ kw['msg'] = 'Checking for header %s' % kw['header_name']
8605+
8606+ l = Utils.to_list(kw['header_name'])
8607+ assert len(l)>0, 'list of headers in header_name is empty'
8608+
8609+ kw['code'] = to_header(kw) + SNIP3
8610+
8611+ if not 'uselib_store' in kw:
8612+ kw['uselib_store'] = l[0].upper()
8613+
8614+ if not 'define_name' in kw:
8615+ kw['define_name'] = self.have_define(l[0])
8616+
8617+ if 'lib' in kw:
8618+ if not 'msg' in kw:
8619+ kw['msg'] = 'Checking for library %s' % kw['lib']
8620+ if not 'uselib_store' in kw:
8621+ kw['uselib_store'] = kw['lib'].upper()
8622+
8623+ if 'staticlib' in kw:
8624+ if not 'msg' in kw:
8625+ kw['msg'] = 'Checking for static library %s' % kw['staticlib']
8626+ if not 'uselib_store' in kw:
8627+ kw['uselib_store'] = kw['staticlib'].upper()
8628+
8629+ if 'fragment' in kw:
8630+ # an additional code fragment may be provided to replace the predefined code
8631+ # in custom headers
8632+ kw['code'] = kw['fragment']
8633+ if not 'msg' in kw:
8634+ kw['msg'] = 'Checking for custom code'
8635+ if not 'errmsg' in kw:
8636+ kw['errmsg'] = 'no'
8637+
8638+ for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
8639+ if flagsname in kw:
8640+ if not 'msg' in kw:
8641+ kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
8642+ if not 'errmsg' in kw:
8643+ kw['errmsg'] = 'no'
8644+
8645+ if not 'execute' in kw:
8646+ kw['execute'] = False
8647+
8648+ if not 'errmsg' in kw:
8649+ kw['errmsg'] = 'not found'
8650+
8651+ if not 'okmsg' in kw:
8652+ kw['okmsg'] = 'yes'
8653+
8654+ if not 'code' in kw:
8655+ kw['code'] = SNIP3
8656+
8657+ if not kw.get('success'): kw['success'] = None
8658+
8659+ assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
8660+
8661+@conf
8662+def post_check(self, *k, **kw):
8663+ "set the variables after a test was run successfully"
8664+
8665+ is_success = False
8666+ if kw['execute']:
8667+ if kw['success'] is not None:
8668+ is_success = True
8669+ else:
8670+ is_success = (kw['success'] == 0)
8671+
8672+ if 'define_name' in kw:
8673+ if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
8674+ if kw['execute']:
8675+ key = kw['success']
8676+ if isinstance(key, str):
8677+ if key:
8678+ self.define(kw['define_name'], key, quote=kw.get('quote', 1))
8679+ else:
8680+ self.define_cond(kw['define_name'], True)
8681+ else:
8682+ self.define_cond(kw['define_name'], False)
8683+ else:
8684+ self.define_cond(kw['define_name'], is_success)
8685+
8686+ if is_success and 'uselib_store' in kw:
8687+ import cc, cxx
8688+ for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
8689+ lk = k.lower()
8690+ # inconsistency: includes -> CPPPATH
8691+ if k == 'CPPPATH': lk = 'includes'
8692+ if k == 'CXXDEFINES': lk = 'defines'
8693+ if k == 'CCDEFINES': lk = 'defines'
8694+ if lk in kw:
8695+ val = kw[lk]
8696+ # remove trailing slash
8697+ if isinstance(val, str):
8698+ val = val.rstrip(os.path.sep)
8699+ self.env.append_unique(k + '_' + kw['uselib_store'], val)
8700+
8701+@conf
8702+def check(self, *k, **kw):
8703+ # so this will be the generic function
8704+ # it will be safer to use check_cxx or check_cc
8705+ self.validate_c(kw)
8706+ self.check_message_1(kw['msg'])
8707+ ret = None
8708+ try:
8709+ ret = self.run_c_code(*k, **kw)
8710+ except Configure.ConfigurationError, e:
8711+ self.check_message_2(kw['errmsg'], 'YELLOW')
8712+ if 'mandatory' in kw and kw['mandatory']:
8713+ if Logs.verbose > 1:
8714+ raise
8715+ else:
8716+ self.fatal('the configuration failed (see %r)' % self.log.name)
8717+ else:
8718+ kw['success'] = ret
8719+ self.check_message_2(self.ret_msg(kw['okmsg'], kw))
8720+
8721+ self.post_check(*k, **kw)
8722+ if not kw.get('execute', False):
8723+ return ret == 0
8724+ return ret
8725+
8726+@conf
8727+def run_c_code(self, *k, **kw):
8728+ test_f_name = kw['compile_filename']
8729+
8730+ k = 0
8731+ while k < 10000:
8732+ # make certain to use a fresh folder - necessary for win32
8733+ dir = os.path.join(self.blddir, '.conf_check_%d' % k)
8734+
8735+ # if the folder already exists, remove it
8736+ try:
8737+ shutil.rmtree(dir)
8738+ except OSError:
8739+ pass
8740+
8741+ try:
8742+ os.stat(dir)
8743+ except OSError:
8744+ break
8745+
8746+ k += 1
8747+
8748+ try:
8749+ os.makedirs(dir)
8750+ except:
8751+ self.fatal('cannot create a configuration test folder %r' % dir)
8752+
8753+ try:
8754+ os.stat(dir)
8755+ except:
8756+ self.fatal('cannot use the configuration test folder %r' % dir)
8757+
8758+ bdir = os.path.join(dir, 'testbuild')
8759+
8760+ if not os.path.exists(bdir):
8761+ os.makedirs(bdir)
8762+
8763+ env = kw['env']
8764+
8765+ dest = open(os.path.join(dir, test_f_name), 'w')
8766+ dest.write(kw['code'])
8767+ dest.close()
8768+
8769+ back = os.path.abspath('.')
8770+
8771+ bld = Build.BuildContext()
8772+ bld.log = self.log
8773+ bld.all_envs.update(self.all_envs)
8774+ bld.all_envs['default'] = env
8775+ bld.lst_variants = bld.all_envs.keys()
8776+ bld.load_dirs(dir, bdir)
8777+
8778+ os.chdir(dir)
8779+
8780+ bld.rescan(bld.srcnode)
8781+
8782+ if not 'features' in kw:
8783+ # conf.check(features='cc cprogram pyext', ...)
8784+ kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
8785+
8786+ o = bld(features=kw['features'], source=test_f_name, target='testprog')
8787+
8788+ for k, v in kw.iteritems():
8789+ setattr(o, k, v)
8790+
8791+ self.log.write("==>\n%s\n<==\n" % kw['code'])
8792+
8793+ # compile the program
8794+ try:
8795+ bld.compile()
8796+ except Utils.WafError:
8797+ ret = Utils.ex_stack()
8798+ else:
8799+ ret = 0
8800+
8801+ # chdir before returning
8802+ os.chdir(back)
8803+
8804+ if ret:
8805+ self.log.write('command returned %r' % ret)
8806+ self.fatal(str(ret))
8807+
8808+ # if we need to run the program, try to get its result
8809+ # keep the name of the program to execute
8810+ if kw['execute']:
8811+ lastprog = o.link_task.outputs[0].abspath(env)
8812+
8813+ args = Utils.to_list(kw.get('exec_args', []))
8814+ proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
8815+ (out, err) = proc.communicate()
8816+ w = self.log.write
8817+ w(str(out))
8818+ w('\n')
8819+ w(str(err))
8820+ w('\n')
8821+ w('returncode %r' % proc.returncode)
8822+ w('\n')
8823+ if proc.returncode:
8824+ self.fatal(Utils.ex_stack())
8825+ ret = out
8826+
8827+ return ret
8828+
8829+@conf
8830+def check_cxx(self, *k, **kw):
8831+ kw['compiler'] = 'cxx'
8832+ return self.check(*k, **kw)
8833+
8834+@conf
8835+def check_cc(self, *k, **kw):
8836+ kw['compiler'] = 'cc'
8837+ return self.check(*k, **kw)
8838+
8839+@conf
8840+def define(self, define, value, quote=1):
8841+ """store a single define and its state into an internal list for later
8842+ writing to a config header file. Value can only be
8843+ a string or int; other types not supported. String
8844+ values will appear properly quoted in the generated
8845+ header file."""
8846+ assert define and isinstance(define, str)
8847+
8848+ # ordered_dict is for writing the configuration header in order
8849+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8850+
8851+ # the user forgot to tell if the value is quoted or not
8852+ if isinstance(value, str):
8853+ if quote:
8854+ tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
8855+ else:
8856+ tbl[define] = value
8857+ elif isinstance(value, int):
8858+ tbl[define] = value
8859+ else:
8860+ raise TypeError('define %r -> %r must be a string or an int' % (define, value))
8861+
8862+ # add later to make reconfiguring faster
8863+ self.env[DEFINES] = tbl
8864+ self.env[define] = value # <- not certain this is necessary
8865+
8866+@conf
8867+def undefine(self, define):
8868+ """store a single define and its state into an internal list
8869+ for later writing to a config header file"""
8870+ assert define and isinstance(define, str)
8871+
8872+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8873+
8874+ value = UNDEFINED
8875+ tbl[define] = value
8876+
8877+ # add later to make reconfiguring faster
8878+ self.env[DEFINES] = tbl
8879+ self.env[define] = value
8880+
8881+@conf
8882+def define_cond(self, name, value):
8883+ """Conditionally define a name.
8884+ Formally equivalent to: if value: define(name, 1) else: undefine(name)"""
8885+ if value:
8886+ self.define(name, 1)
8887+ else:
8888+ self.undefine(name)
8889+
8890+@conf
8891+def is_defined(self, key):
8892+ defines = self.env[DEFINES]
8893+ if not defines:
8894+ return False
8895+ try:
8896+ value = defines[key]
8897+ except KeyError:
8898+ return False
8899+ else:
8900+ return value != UNDEFINED
8901+
8902+@conf
8903+def get_define(self, define):
8904+ "get the value of a previously stored define"
8905+ try: return self.env[DEFINES][define]
8906+ except KeyError: return None
8907+
8908+@conf
8909+def have_define(self, name):
8910+ "prefix the define with 'HAVE_' and make sure it has valid characters."
8911+ return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name)
8912+
8913+@conf
8914+def write_config_header(self, configfile='', env='', guard='', top=False):
8915+ "save the defines into a file"
8916+ if not configfile: configfile = WAF_CONFIG_H
8917+ waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
8918+
8919+ # configfile -> absolute path
8920+ # there is a good reason to concatenate first and to split afterwards
8921+ if not env: env = self.env
8922+ if top:
8923+ diff = ''
8924+ else:
8925+ diff = Utils.diff_path(self.srcdir, self.curdir)
8926+ full = os.sep.join([self.blddir, env.variant(), diff, configfile])
8927+ full = os.path.normpath(full)
8928+ (dir, base) = os.path.split(full)
8929+
8930+ try: os.makedirs(dir)
8931+ except: pass
8932+
8933+ dest = open(full, 'w')
8934+ dest.write('/* Configuration header created by Waf - do not edit */\n')
8935+ dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard))
8936+
8937+ dest.write(self.get_config_header())
8938+
8939+ # config files are not removed on "waf clean"
8940+ env.append_unique(CFG_FILES, os.path.join(diff, configfile))
8941+
8942+ dest.write('\n#endif /* %s */\n' % waf_guard)
8943+ dest.close()
8944+
8945+@conf
8946+def get_config_header(self):
8947+ """Fill-in the contents of the config header. Override when you need to write your own config header."""
8948+ config_header = []
8949+
8950+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8951+ for key in tbl.allkeys:
8952+ value = tbl[key]
8953+ if value is None:
8954+ config_header.append('#define %s' % key)
8955+ elif value is UNDEFINED:
8956+ config_header.append('/* #undef %s */' % key)
8957+ else:
8958+ config_header.append('#define %s %s' % (key, value))
8959+ return "\n".join(config_header)
8960+
8961+@conftest
8962+def find_cpp(conf):
8963+ v = conf.env
8964+ cpp = []
8965+ if v['CPP']: cpp = v['CPP']
8966+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
8967+ if not cpp: cpp = conf.find_program('cpp', var='CPP')
8968+ #if not cpp: cpp = v['CC']
8969+ #if not cpp: cpp = v['CXX']
8970+ v['CPP'] = cpp
8971+
8972+@conftest
8973+def cc_add_flags(conf):
8974+ conf.add_os_flags('CFLAGS', 'CCFLAGS')
8975+ conf.add_os_flags('CPPFLAGS')
8976+
8977+@conftest
8978+def cxx_add_flags(conf):
8979+ conf.add_os_flags('CXXFLAGS')
8980+ conf.add_os_flags('CPPFLAGS')
8981+
8982+@conftest
8983+def link_add_flags(conf):
8984+ conf.add_os_flags('LINKFLAGS')
8985+ conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
8986+
8987+@conftest
8988+def cc_load_tools(conf):
8989+ conf.check_tool('cc')
8990+
8991+@conftest
8992+def cxx_load_tools(conf):
8993+ conf.check_tool('cxx')
8994+
8995diff --git a/buildtools/wafadmin/Tools/cs.py b/buildtools/wafadmin/Tools/cs.py
8996new file mode 100644
8997index 0000000..4354485
8998--- /dev/null
8999+++ b/buildtools/wafadmin/Tools/cs.py
9000@@ -0,0 +1,68 @@
9001+#!/usr/bin/env python
9002+# encoding: utf-8
9003+# Thomas Nagy, 2006 (ita)
9004+
9005+"C# support"
9006+
9007+import TaskGen, Utils, Task, Options
9008+from Logs import error
9009+from TaskGen import before, after, taskgen, feature
9010+
9011+flag_vars= ['FLAGS', 'ASSEMBLIES']
9012+
9013+@feature('cs')
9014+def init_cs(self):
9015+ Utils.def_attrs(self,
9016+ flags = '',
9017+ assemblies = '',
9018+ resources = '',
9019+ uselib = '')
9020+
9021+@feature('cs')
9022+@after('init_cs')
9023+def apply_uselib_cs(self):
9024+ if not self.uselib:
9025+ return
9026+ global flag_vars
9027+ for var in self.to_list(self.uselib):
9028+ for v in self.flag_vars:
9029+ val = self.env[v+'_'+var]
9030+ if val: self.env.append_value(v, val)
9031+
9032+@feature('cs')
9033+@after('apply_uselib_cs')
9034+@before('apply_core')
9035+def apply_cs(self):
9036+ try: self.meths.remove('apply_core')
9037+ except ValueError: pass
9038+
9039+ # process the flags for the assemblies
9040+ for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']:
9041+ self.env.append_unique('_ASSEMBLIES', '/r:'+i)
9042+
9043+ # process the flags for the resources
9044+ for i in self.to_list(self.resources):
9045+ self.env.append_unique('_RESOURCES', '/resource:'+i)
9046+
9047+ # what kind of assembly are we generating?
9048+ self.env['_TYPE'] = getattr(self, 'type', 'exe')
9049+
9050+ # additional flags
9051+ self.env.append_unique('_FLAGS', self.to_list(self.flags))
9052+ self.env.append_unique('_FLAGS', self.env.FLAGS)
9053+
9054+ # process the sources
9055+ nodes = [self.path.find_resource(i) for i in self.to_list(self.source)]
9056+ self.create_task('mcs', nodes, self.path.find_or_declare(self.target))
9057+
9058+Task.simple_task_type('mcs', '${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW')
9059+
9060+def detect(conf):
9061+ csc = getattr(Options.options, 'cscbinary', None)
9062+ if csc:
9063+ conf.env.MCS = csc
9064+ conf.find_program(['gmcs', 'mcs'], var='MCS')
9065+
9066+def set_options(opt):
9067+ opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
9068+
9069diff --git a/buildtools/wafadmin/Tools/cxx.py b/buildtools/wafadmin/Tools/cxx.py
9070new file mode 100644
9071index 0000000..719b821
9072--- /dev/null
9073+++ b/buildtools/wafadmin/Tools/cxx.py
9074@@ -0,0 +1,104 @@
9075+#!/usr/bin/env python
9076+# encoding: utf-8
9077+# Thomas Nagy, 2005 (ita)
9078+
9079+"Base for c++ programs and libraries"
9080+
9081+import TaskGen, Task, Utils
9082+from Logs import debug
9083+import ccroot # <- do not remove
9084+from TaskGen import feature, before, extension, after
9085+
9086+g_cxx_flag_vars = [
9087+'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
9088+'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
9089+'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES']
9090+"main cpp variables"
9091+
9092+EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++']
9093+
9094+g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS']
9095+
9096+# TODO remove in waf 1.6
9097+class cxx_taskgen(ccroot.ccroot_abstract):
9098+ pass
9099+
9100+@feature('cxx')
9101+@before('apply_type_vars')
9102+@after('default_cc')
9103+def init_cxx(self):
9104+ if not 'cc' in self.features:
9105+ self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx']
9106+
9107+ self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars)
9108+ self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars)
9109+
9110+ if not self.env['CXX_NAME']:
9111+ raise Utils.WafError("At least one compiler (g++, ..) must be selected")
9112+
9113+@feature('cxx')
9114+@after('apply_incpaths')
9115+def apply_obj_vars_cxx(self):
9116+ """after apply_incpaths for INC_PATHS"""
9117+ env = self.env
9118+ app = env.append_unique
9119+ cxxpath_st = env['CPPPATH_ST']
9120+
9121+ # local flags come first
9122+ # set the user-defined includes paths
9123+ for i in env['INC_PATHS']:
9124+ app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env))
9125+ app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env))
9126+
9127+ # set the library include paths
9128+ for i in env['CPPPATH']:
9129+ app('_CXXINCFLAGS', cxxpath_st % i)
9130+
9131+@feature('cxx')
9132+@after('apply_lib_vars')
9133+def apply_defines_cxx(self):
9134+ """after uselib is set for CXXDEFINES"""
9135+ self.defines = getattr(self, 'defines', [])
9136+ lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES'])
9137+ milst = []
9138+
9139+ # now process the local defines
9140+ for defi in lst:
9141+ if not defi in milst:
9142+ milst.append(defi)
9143+
9144+ # CXXDEFINES_USELIB
9145+ libs = self.to_list(self.uselib)
9146+ for l in libs:
9147+ val = self.env['CXXDEFINES_'+l]
9148+ if val: milst += self.to_list(val)
9149+
9150+ self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
9151+ y = self.env['CXXDEFINES_ST']
9152+ self.env.append_unique('_CXXDEFFLAGS', [y%x for x in milst])
9153+
9154+@extension(EXT_CXX)
9155+def cxx_hook(self, node):
9156+ # create the compilation task: cpp or cc
9157+ if getattr(self, 'obj_ext', None):
9158+ obj_ext = self.obj_ext
9159+ else:
9160+ obj_ext = '_%d.o' % self.idx
9161+
9162+ task = self.create_task('cxx', node, node.change_ext(obj_ext))
9163+ try:
9164+ self.compiled_tasks.append(task)
9165+ except AttributeError:
9166+ raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self))
9167+ return task
9168+
9169+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
9170+cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False)
9171+cls.scan = ccroot.scan
9172+cls.vars.append('CXXDEPS')
9173+
9174+link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
9175+cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
9176+cls.maxjobs = 1
9177+cls.install = Utils.nada
9178+
9179diff --git a/buildtools/wafadmin/Tools/d.py b/buildtools/wafadmin/Tools/d.py
9180new file mode 100644
9181index 0000000..1a22821
9182--- /dev/null
9183+++ b/buildtools/wafadmin/Tools/d.py
9184@@ -0,0 +1,535 @@
9185+#!/usr/bin/env python
9186+# encoding: utf-8
9187+# Carlos Rafael Giani, 2007 (dv)
9188+# Thomas Nagy, 2007-2008 (ita)
9189+
9190+import os, sys, re, optparse
9191+import ccroot # <- leave this
9192+import TaskGen, Utils, Task, Configure, Logs, Build
9193+from Logs import debug, error
9194+from TaskGen import taskgen, feature, after, before, extension
9195+from Configure import conftest
9196+
9197+EXT_D = ['.d', '.di', '.D']
9198+D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods
9199+
9200+DLIB = """
9201+version(D_Version2) {
9202+ import std.stdio;
9203+ int main() {
9204+ writefln("phobos2");
9205+ return 0;
9206+ }
9207+} else {
9208+ version(Tango) {
9209+ import tango.stdc.stdio;
9210+ int main() {
9211+ printf("tango");
9212+ return 0;
9213+ }
9214+ } else {
9215+ import std.stdio;
9216+ int main() {
9217+ writefln("phobos1");
9218+ return 0;
9219+ }
9220+ }
9221+}
9222+"""
9223+
9224+def filter_comments(filename):
9225+ txt = Utils.readf(filename)
9226+ i = 0
9227+ buf = []
9228+ max = len(txt)
9229+ begin = 0
9230+ while i < max:
9231+ c = txt[i]
9232+ if c == '"' or c == "'": # skip a string or character literal
9233+ buf.append(txt[begin:i])
9234+ delim = c
9235+ i += 1
9236+ while i < max:
9237+ c = txt[i]
9238+ if c == delim: break
9239+ elif c == '\\': # skip the character following backslash
9240+ i += 1
9241+ i += 1
9242+ i += 1
9243+ begin = i
9244+ elif c == '/': # try to replace a comment with whitespace
9245+ buf.append(txt[begin:i])
9246+ i += 1
9247+ if i == max: break
9248+ c = txt[i]
9249+ if c == '+': # eat nesting /+ +/ comment
9250+ i += 1
9251+ nesting = 1
9252+ c = None
9253+ while i < max:
9254+ prev = c
9255+ c = txt[i]
9256+ if prev == '/' and c == '+':
9257+ nesting += 1
9258+ c = None
9259+ elif prev == '+' and c == '/':
9260+ nesting -= 1
9261+ if nesting == 0: break
9262+ c = None
9263+ i += 1
9264+ elif c == '*': # eat /* */ comment
9265+ i += 1
9266+ c = None
9267+ while i < max:
9268+ prev = c
9269+ c = txt[i]
9270+ if prev == '*' and c == '/': break
9271+ i += 1
9272+ elif c == '/': # eat // comment
9273+ i += 1
9274+ while i < max and txt[i] != '\n':
9275+ i += 1
9276+ else: # no comment
9277+ begin = i - 1
9278+ continue
9279+ i += 1
9280+ begin = i
9281+ buf.append(' ')
9282+ else:
9283+ i += 1
9284+ buf.append(txt[begin:])
9285+ return buf
9286+
9287+class d_parser(object):
9288+ def __init__(self, env, incpaths):
9289+ #self.code = ''
9290+ #self.module = ''
9291+ #self.imports = []
9292+
9293+ self.allnames = []
9294+
9295+ self.re_module = re.compile("module\s+([^;]+)")
9296+ self.re_import = re.compile("import\s+([^;]+)")
9297+ self.re_import_bindings = re.compile("([^:]+):(.*)")
9298+ self.re_import_alias = re.compile("[^=]+=(.+)")
9299+
9300+ self.env = env
9301+
9302+ self.nodes = []
9303+ self.names = []
9304+
9305+ self.incpaths = incpaths
9306+
9307+ def tryfind(self, filename):
9308+ found = 0
9309+ for n in self.incpaths:
9310+ found = n.find_resource(filename.replace('.', '/') + '.d')
9311+ if found:
9312+ self.nodes.append(found)
9313+ self.waiting.append(found)
9314+ break
9315+ if not found:
9316+ if not filename in self.names:
9317+ self.names.append(filename)
9318+
9319+ def get_strings(self, code):
9320+ #self.imports = []
9321+ self.module = ''
9322+ lst = []
9323+
9324+ # get the module name (if present)
9325+
9326+ mod_name = self.re_module.search(code)
9327+ if mod_name:
9328+ self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
9329+
9330+ # go through the code, have a look at all import occurrences
9331+
9332+ # first, lets look at anything beginning with "import" and ending with ";"
9333+ import_iterator = self.re_import.finditer(code)
9334+ if import_iterator:
9335+ for import_match in import_iterator:
9336+ import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
9337+
9338+ # does this end with an import bindings declaration?
9339+ # (import bindings always terminate the list of imports)
9340+ bindings_match = self.re_import_bindings.match(import_match_str)
9341+ if bindings_match:
9342+ import_match_str = bindings_match.group(1)
9343+ # if so, extract the part before the ":" (since the module declaration(s) is/are located there)
9344+
9345+ # split the matching string into a bunch of strings, separated by a comma
9346+ matches = import_match_str.split(',')
9347+
9348+ for match in matches:
9349+ alias_match = self.re_import_alias.match(match)
9350+ if alias_match:
9351+ # is this an alias declaration? (alias = module name) if so, extract the module name
9352+ match = alias_match.group(1)
9353+
9354+ lst.append(match)
9355+ return lst
9356+
9357+ def start(self, node):
9358+ self.waiting = [node]
9359+ # while the stack is not empty, add the dependencies
9360+ while self.waiting:
9361+ nd = self.waiting.pop(0)
9362+ self.iter(nd)
9363+
9364+ def iter(self, node):
9365+ path = node.abspath(self.env) # obtain the absolute path
9366+ code = "".join(filter_comments(path)) # read the file and filter the comments
9367+ names = self.get_strings(code) # obtain the import strings
9368+ for x in names:
9369+ # optimization
9370+ if x in self.allnames: continue
9371+ self.allnames.append(x)
9372+
9373+ # for each name, see if it is like a node or not
9374+ self.tryfind(x)
9375+
9376+def scan(self):
9377+ "look for .d/.di the .d source need"
9378+ env = self.env
9379+ gruik = d_parser(env, env['INC_PATHS'])
9380+ gruik.start(self.inputs[0])
9381+
9382+ if Logs.verbose:
9383+ debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names)))
9384+ #debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps')
9385+ return (gruik.nodes, gruik.names)
9386+
9387+def get_target_name(self):
9388+ "for d programs and libs"
9389+ v = self.env
9390+ tp = 'program'
9391+ for x in self.features:
9392+ if x in ['dshlib', 'dstaticlib']:
9393+ tp = x.lstrip('d')
9394+ return v['D_%s_PATTERN' % tp] % self.target
9395+
9396+d_params = {
9397+'dflags': '',
9398+'importpaths':'',
9399+'libs':'',
9400+'libpaths':'',
9401+'generate_headers':False,
9402+}
9403+
9404+@feature('d')
9405+@before('apply_type_vars')
9406+def init_d(self):
9407+ for x in d_params:
9408+ setattr(self, x, getattr(self, x, d_params[x]))
9409+
9410+class d_taskgen(TaskGen.task_gen):
9411+ def __init__(self, *k, **kw):
9412+ TaskGen.task_gen.__init__(self, *k, **kw)
9413+
9414+ # COMPAT
9415+ if len(k) > 1:
9416+ self.features.append('d' + k[1])
9417+
9418+# okay, we borrow a few methods from ccroot
9419+TaskGen.bind_feature('d', D_METHS)
9420+
9421+@feature('d')
9422+@before('apply_d_libs')
9423+def init_d(self):
9424+ Utils.def_attrs(self,
9425+ dflags='',
9426+ importpaths='',
9427+ libs='',
9428+ libpaths='',
9429+ uselib='',
9430+ uselib_local='',
9431+ generate_headers=False, # set to true if you want .di files as well as .o
9432+ compiled_tasks=[],
9433+ add_objects=[],
9434+ link_task=None)
9435+
9436+@feature('d')
9437+@after('apply_d_link', 'init_d')
9438+@before('apply_vnum', 'apply_d_vars')
9439+def apply_d_libs(self):
9440+ """after apply_link because of 'link_task'
9441+ after default_cc because of the attribute 'uselib'"""
9442+ env = self.env
9443+
9444+ # 1. the case of the libs defined in the project (visit ancestors first)
9445+ # the ancestors external libraries (uselib) will be prepended
9446+ self.uselib = self.to_list(self.uselib)
9447+ names = self.to_list(self.uselib_local)
9448+
9449+ seen = set([])
9450+ tmp = Utils.deque(names) # consume a copy of the list of names
9451+ while tmp:
9452+ lib_name = tmp.popleft()
9453+ # visit dependencies only once
9454+ if lib_name in seen:
9455+ continue
9456+
9457+ y = self.name_to_obj(lib_name)
9458+ if not y:
9459+ raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
9460+ y.post()
9461+ seen.add(lib_name)
9462+
9463+ # object has ancestors to process (shared libraries): add them to the end of the list
9464+ if getattr(y, 'uselib_local', None):
9465+ lst = y.to_list(y.uselib_local)
9466+ if 'dshlib' in y.features or 'dprogram' in y.features:
9467+ lst = [x for x in lst if not 'dstaticlib' in self.name_to_obj(x).features]
9468+ tmp.extend(lst)
9469+
9470+ # link task and flags
9471+ if getattr(y, 'link_task', None):
9472+
9473+ link_name = y.target[y.target.rfind(os.sep) + 1:]
9474+ if 'dstaticlib' in y.features or 'dshlib' in y.features:
9475+ env.append_unique('DLINKFLAGS', env.DLIB_ST % link_name)
9476+ env.append_unique('DLINKFLAGS', env.DLIBPATH_ST % y.link_task.outputs[0].parent.bldpath(env))
9477+
9478+ # the order
9479+ self.link_task.set_run_after(y.link_task)
9480+
9481+ # for the recompilation
9482+ dep_nodes = getattr(self.link_task, 'dep_nodes', [])
9483+ self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
9484+
9485+ # add ancestors uselib too - but only propagate those that have no staticlib
9486+ for v in self.to_list(y.uselib):
9487+ if not v in self.uselib:
9488+ self.uselib.insert(0, v)
9489+
9490+ # if the library task generator provides 'export_incdirs', add to the include path
9491+ # the export_incdirs must be a list of paths relative to the other library
9492+ if getattr(y, 'export_incdirs', None):
9493+ for x in self.to_list(y.export_incdirs):
9494+ node = y.path.find_dir(x)
9495+ if not node:
9496+ raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
9497+ self.env.append_unique('INC_PATHS', node)
9498+
9499+@feature('dprogram', 'dshlib', 'dstaticlib')
9500+@after('apply_core')
9501+def apply_d_link(self):
9502+ link = getattr(self, 'link', None)
9503+ if not link:
9504+ if 'dstaticlib' in self.features: link = 'static_link'
9505+ else: link = 'd_link'
9506+
9507+ outputs = [t.outputs[0] for t in self.compiled_tasks]
9508+ self.link_task = self.create_task(link, outputs, self.path.find_or_declare(get_target_name(self)))
9509+
9510+@feature('d')
9511+@after('apply_core')
9512+def apply_d_vars(self):
9513+ env = self.env
9514+ dpath_st = env['DPATH_ST']
9515+ lib_st = env['DLIB_ST']
9516+ libpath_st = env['DLIBPATH_ST']
9517+
9518+ importpaths = self.to_list(self.importpaths)
9519+ libpaths = []
9520+ libs = []
9521+ uselib = self.to_list(self.uselib)
9522+
9523+ for i in uselib:
9524+ if env['DFLAGS_' + i]:
9525+ env.append_unique('DFLAGS', env['DFLAGS_' + i])
9526+
9527+ for x in self.features:
9528+ if not x in ['dprogram', 'dstaticlib', 'dshlib']:
9529+ continue
9530+ x.lstrip('d')
9531+ d_shlib_dflags = env['D_' + x + '_DFLAGS']
9532+ if d_shlib_dflags:
9533+ env.append_unique('DFLAGS', d_shlib_dflags)
9534+
9535+ # add import paths
9536+ for i in uselib:
9537+ if env['DPATH_' + i]:
9538+ for entry in self.to_list(env['DPATH_' + i]):
9539+ if not entry in importpaths:
9540+ importpaths.append(entry)
9541+
9542+ # now process the import paths
9543+ for path in importpaths:
9544+ if os.path.isabs(path):
9545+ env.append_unique('_DIMPORTFLAGS', dpath_st % path)
9546+ else:
9547+ node = self.path.find_dir(path)
9548+ self.env.append_unique('INC_PATHS', node)
9549+ env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env))
9550+ env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env))
9551+
9552+ # add library paths
9553+ for i in uselib:
9554+ if env['LIBPATH_' + i]:
9555+ for entry in self.to_list(env['LIBPATH_' + i]):
9556+ if not entry in libpaths:
9557+ libpaths.append(entry)
9558+ libpaths = self.to_list(self.libpaths) + libpaths
9559+
9560+ # now process the library paths
9561+ # apply same path manipulation as used with import paths
9562+ for path in libpaths:
9563+ if not os.path.isabs(path):
9564+ node = self.path.find_resource(path)
9565+ if not node:
9566+ raise Utils.WafError('could not find libpath %r from %r' % (path, self))
9567+ path = node.abspath(self.env)
9568+
9569+ env.append_unique('DLINKFLAGS', libpath_st % path)
9570+
9571+ # add libraries
9572+ for i in uselib:
9573+ if env['LIB_' + i]:
9574+ for entry in self.to_list(env['LIB_' + i]):
9575+ if not entry in libs:
9576+ libs.append(entry)
9577+ libs.extend(self.to_list(self.libs))
9578+
9579+ # process user flags
9580+ for flag in self.to_list(self.dflags):
9581+ env.append_unique('DFLAGS', flag)
9582+
9583+ # now process the libraries
9584+ for lib in libs:
9585+ env.append_unique('DLINKFLAGS', lib_st % lib)
9586+
9587+ # add linker flags
9588+ for i in uselib:
9589+ dlinkflags = env['DLINKFLAGS_' + i]
9590+ if dlinkflags:
9591+ for linkflag in dlinkflags:
9592+ env.append_unique('DLINKFLAGS', linkflag)
9593+
9594+@feature('dshlib')
9595+@after('apply_d_vars')
9596+def add_shlib_d_flags(self):
9597+ for linkflag in self.env['D_shlib_LINKFLAGS']:
9598+ self.env.append_unique('DLINKFLAGS', linkflag)
9599+
9600+@extension(EXT_D)
9601+def d_hook(self, node):
9602+ # create the compilation task: cpp or cc
9603+ task = self.create_task(self.generate_headers and 'd_with_header' or 'd')
9604+ try: obj_ext = self.obj_ext
9605+ except AttributeError: obj_ext = '_%d.o' % self.idx
9606+
9607+ task.inputs = [node]
9608+ task.outputs = [node.change_ext(obj_ext)]
9609+ self.compiled_tasks.append(task)
9610+
9611+ if self.generate_headers:
9612+ header_node = node.change_ext(self.env['DHEADER_ext'])
9613+ task.outputs += [header_node]
9614+
9615+d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
9616+d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
9617+${D_HDR_F}${TGT[1].bldpath(env)} \
9618+${D_SRC_F}${SRC} \
9619+${D_TGT_F}${TGT[0].bldpath(env)}'
9620+link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
9621+
9622+def override_exec(cls):
9623+ """stupid dmd wants -of stuck to the file name"""
9624+ old_exec = cls.exec_command
9625+ def exec_command(self, *k, **kw):
9626+ if isinstance(k[0], list):
9627+ lst = k[0]
9628+ for i in xrange(len(lst)):
9629+ if lst[i] == '-of':
9630+ del lst[i]
9631+ lst[i] = '-of' + lst[i]
9632+ break
9633+ return old_exec(self, *k, **kw)
9634+ cls.exec_command = exec_command
9635+
9636+cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False)
9637+cls.scan = scan
9638+override_exec(cls)
9639+
9640+cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False)
9641+override_exec(cls)
9642+
9643+cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False)
9644+override_exec(cls)
9645+
9646+# for feature request #104
9647+@taskgen
9648+def generate_header(self, filename, install_path):
9649+ if not hasattr(self, 'header_lst'): self.header_lst = []
9650+ self.meths.append('process_header')
9651+ self.header_lst.append([filename, install_path])
9652+
9653+@before('apply_core')
9654+def process_header(self):
9655+ env = self.env
9656+ for i in getattr(self, 'header_lst', []):
9657+ node = self.path.find_resource(i[0])
9658+
9659+ if not node:
9660+ raise Utils.WafError('file not found on d obj '+i[0])
9661+
9662+ task = self.create_task('d_header')
9663+ task.set_inputs(node)
9664+ task.set_outputs(node.change_ext('.di'))
9665+
9666+d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}'
9667+Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False)
9668+
9669+@conftest
9670+def d_platform_flags(conf):
9671+ v = conf.env
9672+ binfmt = v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(
9673+ v.DEST_OS or Utils.unversioned_sys_platform())
9674+ if binfmt == 'pe':
9675+ v['D_program_PATTERN'] = '%s.exe'
9676+ v['D_shlib_PATTERN'] = 'lib%s.dll'
9677+ v['D_staticlib_PATTERN'] = 'lib%s.a'
9678+ else:
9679+ v['D_program_PATTERN'] = '%s'
9680+ v['D_shlib_PATTERN'] = 'lib%s.so'
9681+ v['D_staticlib_PATTERN'] = 'lib%s.a'
9682+
9683+@conftest
9684+def check_dlibrary(conf):
9685+ ret = conf.check_cc(features='d dprogram', fragment=DLIB, mandatory=True, compile_filename='test.d', execute=True)
9686+ conf.env.DLIBRARY = ret.strip()
9687+
9688+# quick test #
9689+if __name__ == "__main__":
9690+ #Logs.verbose = 2
9691+
9692+ try: arg = sys.argv[1]
9693+ except IndexError: arg = "file.d"
9694+
9695+ print("".join(filter_comments(arg)))
9696+ # TODO
9697+ paths = ['.']
9698+
9699+ #gruik = filter()
9700+ #gruik.start(arg)
9701+
9702+ #code = "".join(gruik.buf)
9703+
9704+ #print "we have found the following code"
9705+ #print code
9706+
9707+ #print "now parsing"
9708+ #print "-------------------------------------------"
9709+ """
9710+ parser_ = d_parser()
9711+ parser_.start(arg)
9712+
9713+ print "module: %s" % parser_.module
9714+ print "imports: ",
9715+ for imp in parser_.imports:
9716+ print imp + " ",
9717+ print
9718+"""
9719+
9720diff --git a/buildtools/wafadmin/Tools/dbus.py b/buildtools/wafadmin/Tools/dbus.py
9721new file mode 100644
9722index 0000000..3179999
9723--- /dev/null
9724+++ b/buildtools/wafadmin/Tools/dbus.py
9725@@ -0,0 +1,34 @@
9726+#!/usr/bin/env python
9727+# encoding: utf-8
9728+# Ali Sabil, 2007
9729+
9730+import Task, Utils
9731+from TaskGen import taskgen, before, after, feature
9732+
9733+@taskgen
9734+def add_dbus_file(self, filename, prefix, mode):
9735+ if not hasattr(self, 'dbus_lst'):
9736+ self.dbus_lst = []
9737+ self.meths.append('process_dbus')
9738+ self.dbus_lst.append([filename, prefix, mode])
9739+
9740+@before('apply_core')
9741+def process_dbus(self):
9742+ for filename, prefix, mode in getattr(self, 'dbus_lst', []):
9743+ node = self.path.find_resource(filename)
9744+
9745+ if not node:
9746+ raise Utils.WafError('file not found ' + filename)
9747+
9748+ tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
9749+
9750+ tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
9751+ tsk.env.DBUS_BINDING_TOOL_MODE = mode
9752+
9753+Task.simple_task_type('dbus_binding_tool',
9754+ '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',
9755+ color='BLUE', before='cc')
9756+
9757+def detect(conf):
9758+ dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
9759+
9760diff --git a/buildtools/wafadmin/Tools/dmd.py b/buildtools/wafadmin/Tools/dmd.py
9761new file mode 100644
9762index 0000000..9c74908
9763--- /dev/null
9764+++ b/buildtools/wafadmin/Tools/dmd.py
9765@@ -0,0 +1,64 @@
9766+#!/usr/bin/env python
9767+# encoding: utf-8
9768+# Carlos Rafael Giani, 2007 (dv)
9769+# Thomas Nagy, 2008 (ita)
9770+
9771+import sys
9772+import Utils, ar
9773+from Configure import conftest
9774+
9775+@conftest
9776+def find_dmd(conf):
9777+ conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True)
9778+
9779+@conftest
9780+def common_flags_ldc(conf):
9781+ v = conf.env
9782+ v['DFLAGS'] = ['-d-version=Posix']
9783+ v['DLINKFLAGS'] = []
9784+ v['D_shlib_DFLAGS'] = ['-relocation-model=pic']
9785+
9786+@conftest
9787+def common_flags_dmd(conf):
9788+ v = conf.env
9789+
9790+ # _DFLAGS _DIMPORTFLAGS
9791+
9792+ # Compiler is dmd so 'gdc' part will be ignored, just
9793+ # ensure key is there, so wscript can append flags to it
9794+ v['DFLAGS'] = ['-version=Posix']
9795+
9796+ v['D_SRC_F'] = ''
9797+ v['D_TGT_F'] = ['-c', '-of']
9798+ v['DPATH_ST'] = '-I%s' # template for adding import paths
9799+
9800+ # linker
9801+ v['D_LINKER'] = v['D_COMPILER']
9802+ v['DLNK_SRC_F'] = ''
9803+ v['DLNK_TGT_F'] = '-of'
9804+
9805+ v['DLIB_ST'] = '-L-l%s' # template for adding libs
9806+ v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths
9807+
9808+ # linker debug levels
9809+ v['DFLAGS_OPTIMIZED'] = ['-O']
9810+ v['DFLAGS_DEBUG'] = ['-g', '-debug']
9811+ v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug']
9812+ v['DLINKFLAGS'] = ['-quiet']
9813+
9814+ v['D_shlib_DFLAGS'] = ['-fPIC']
9815+ v['D_shlib_LINKFLAGS'] = ['-L-shared']
9816+
9817+ v['DHEADER_ext'] = '.di'
9818+ v['D_HDR_F'] = ['-H', '-Hf']
9819+
9820+def detect(conf):
9821+ conf.find_dmd()
9822+ conf.check_tool('ar')
9823+ conf.check_tool('d')
9824+ conf.common_flags_dmd()
9825+ conf.d_platform_flags()
9826+
9827+ if conf.env.D_COMPILER.find('ldc') > -1:
9828+ conf.common_flags_ldc()
9829+
9830diff --git a/buildtools/wafadmin/Tools/flex.py b/buildtools/wafadmin/Tools/flex.py
9831new file mode 100644
9832index 0000000..5ce9f22
9833--- /dev/null
9834+++ b/buildtools/wafadmin/Tools/flex.py
9835@@ -0,0 +1,25 @@
9836+#!/usr/bin/env python
9837+# encoding: utf-8
9838+# John O'Meara, 2006
9839+# Thomas Nagy, 2006-2008
9840+
9841+"Flex processing"
9842+
9843+import TaskGen
9844+
9845+def decide_ext(self, node):
9846+ if 'cxx' in self.features: return '.lex.cc'
9847+ else: return '.lex.c'
9848+
9849+TaskGen.declare_chain(
9850+ name = 'flex',
9851+ rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
9852+ ext_in = '.l',
9853+ ext_out = '.c .cxx',
9854+ decider = decide_ext
9855+)
9856+
9857+def detect(conf):
9858+ conf.find_program('flex', var='FLEX', mandatory=True)
9859+ conf.env['FLEXFLAGS'] = ''
9860+
9861diff --git a/buildtools/wafadmin/Tools/gas.py b/buildtools/wafadmin/Tools/gas.py
9862new file mode 100644
9863index 0000000..c983b0a
9864--- /dev/null
9865+++ b/buildtools/wafadmin/Tools/gas.py
9866@@ -0,0 +1,38 @@
9867+#!/usr/bin/env python
9868+# encoding: utf-8
9869+# Thomas Nagy, 2008 (ita)
9870+
9871+"as and gas"
9872+
9873+import os, sys
9874+import Task
9875+from TaskGen import extension, taskgen, after, before
9876+
9877+EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
9878+
9879+as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
9880+Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False)
9881+
9882+@extension(EXT_ASM)
9883+def asm_hook(self, node):
9884+ # create the compilation task: cpp or cc
9885+ try: obj_ext = self.obj_ext
9886+ except AttributeError: obj_ext = '_%d.o' % self.idx
9887+
9888+ task = self.create_task('asm', node, node.change_ext(obj_ext))
9889+ self.compiled_tasks.append(task)
9890+ self.meths.append('asm_incflags')
9891+
9892+@after('apply_obj_vars_cc')
9893+@after('apply_obj_vars_cxx')
9894+@before('apply_link')
9895+def asm_incflags(self):
9896+ self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
9897+ var = ('cxx' in self.features) and 'CXX' or 'CC'
9898+ self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
9899+
9900+def detect(conf):
9901+ conf.find_program(['gas', 'as'], var='AS')
9902+ if not conf.env.AS: conf.env.AS = conf.env.CC
9903+ #conf.env.ASFLAGS = ['-c'] <- may be necesary for .S files
9904+
9905diff --git a/buildtools/wafadmin/Tools/gcc.py b/buildtools/wafadmin/Tools/gcc.py
9906new file mode 100644
9907index 0000000..420b44f
9908--- /dev/null
9909+++ b/buildtools/wafadmin/Tools/gcc.py
9910@@ -0,0 +1,135 @@
9911+#!/usr/bin/env python
9912+# encoding: utf-8
9913+# Thomas Nagy, 2006-2008 (ita)
9914+# Ralf Habacker, 2006 (rh)
9915+# Yinon Ehrlich, 2009
9916+
9917+import os, sys
9918+import Configure, Options, Utils
9919+import ccroot, ar
9920+from Configure import conftest
9921+
9922+@conftest
9923+def find_gcc(conf):
9924+ cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True)
9925+ cc = conf.cmd_to_list(cc)
9926+ ccroot.get_cc_version(conf, cc, gcc=True)
9927+ conf.env.CC_NAME = 'gcc'
9928+ conf.env.CC = cc
9929+
9930+@conftest
9931+def gcc_common_flags(conf):
9932+ v = conf.env
9933+
9934+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
9935+
9936+ v['CCFLAGS_DEBUG'] = ['-g']
9937+
9938+ v['CCFLAGS_RELEASE'] = ['-O2']
9939+
9940+ v['CC_SRC_F'] = ''
9941+ v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
9942+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
9943+
9944+ # linker
9945+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
9946+ v['CCLNK_SRC_F'] = ''
9947+ v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
9948+
9949+ v['LIB_ST'] = '-l%s' # template for adding libs
9950+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
9951+ v['STATICLIB_ST'] = '-l%s'
9952+ v['STATICLIBPATH_ST'] = '-L%s'
9953+ v['RPATH_ST'] = '-Wl,-rpath,%s'
9954+ v['CCDEFINES_ST'] = '-D%s'
9955+
9956+ v['SONAME_ST'] = '-Wl,-h,%s'
9957+ v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
9958+ v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
9959+ v['FULLSTATIC_MARKER'] = '-static'
9960+
9961+ # program
9962+ v['program_PATTERN'] = '%s'
9963+
9964+ # shared library
9965+ v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
9966+ v['shlib_LINKFLAGS'] = ['-shared']
9967+ v['shlib_PATTERN'] = 'lib%s.so'
9968+
9969+ # static lib
9970+ v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
9971+ v['staticlib_PATTERN'] = 'lib%s.a'
9972+
9973+ # osx stuff
9974+ v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
9975+ v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
9976+ v['macbundle_PATTERN'] = '%s.bundle'
9977+
9978+@conftest
9979+def gcc_modifier_win32(conf):
9980+ v = conf.env
9981+ v['program_PATTERN'] = '%s.exe'
9982+
9983+ v['shlib_PATTERN'] = '%s.dll'
9984+ v['implib_PATTERN'] = 'lib%s.dll.a'
9985+ v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
9986+
9987+ dest_arch = v['DEST_CPU']
9988+ v['shlib_CCFLAGS'] = ['-DPIC']
9989+
9990+ v.append_value('shlib_CCFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
9991+
9992+ # Auto-import is enabled by default even without this option,
9993+ # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
9994+ # that the linker emits otherwise.
9995+ v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
9996+
9997+@conftest
9998+def gcc_modifier_cygwin(conf):
9999+ gcc_modifier_win32(conf)
10000+ v = conf.env
10001+ v['shlib_PATTERN'] = 'cyg%s.dll'
10002+ v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
10003+
10004+@conftest
10005+def gcc_modifier_darwin(conf):
10006+ v = conf.env
10007+ v['shlib_CCFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
10008+ v['shlib_LINKFLAGS'] = ['-dynamiclib']
10009+ v['shlib_PATTERN'] = 'lib%s.dylib'
10010+
10011+ v['staticlib_LINKFLAGS'] = []
10012+
10013+ v['SHLIB_MARKER'] = ''
10014+ v['STATICLIB_MARKER'] = ''
10015+ v['SONAME_ST'] = ''
10016+
10017+@conftest
10018+def gcc_modifier_aix(conf):
10019+ v = conf.env
10020+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
10021+
10022+ v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull']
10023+
10024+ v['SHLIB_MARKER'] = ''
10025+
10026+@conftest
10027+def gcc_modifier_platform(conf):
10028+ # * set configurations specific for a platform.
10029+ # * the destination platform is detected automatically by looking at the macros the compiler predefines,
10030+ # and if it's not recognised, it fallbacks to sys.platform.
10031+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
10032+ gcc_modifier_func = globals().get('gcc_modifier_' + dest_os)
10033+ if gcc_modifier_func:
10034+ gcc_modifier_func(conf)
10035+
10036+def detect(conf):
10037+ conf.find_gcc()
10038+ conf.find_cpp()
10039+ conf.find_ar()
10040+ conf.gcc_common_flags()
10041+ conf.gcc_modifier_platform()
10042+ conf.cc_load_tools()
10043+ conf.cc_add_flags()
10044+ conf.link_add_flags()
10045+
10046diff --git a/buildtools/wafadmin/Tools/gdc.py b/buildtools/wafadmin/Tools/gdc.py
10047new file mode 100644
10048index 0000000..4d2a321
10049--- /dev/null
10050+++ b/buildtools/wafadmin/Tools/gdc.py
10051@@ -0,0 +1,52 @@
10052+#!/usr/bin/env python
10053+# encoding: utf-8
10054+# Carlos Rafael Giani, 2007 (dv)
10055+
10056+import sys
10057+import Utils, ar
10058+from Configure import conftest
10059+
10060+@conftest
10061+def find_gdc(conf):
10062+ conf.find_program('gdc', var='D_COMPILER', mandatory=True)
10063+
10064+@conftest
10065+def common_flags_gdc(conf):
10066+ v = conf.env
10067+
10068+ # _DFLAGS _DIMPORTFLAGS
10069+
10070+ # for mory info about the meaning of this dict see dmd.py
10071+ v['DFLAGS'] = []
10072+
10073+ v['D_SRC_F'] = ''
10074+ v['D_TGT_F'] = ['-c', '-o', '']
10075+ v['DPATH_ST'] = '-I%s' # template for adding import paths
10076+
10077+ # linker
10078+ v['D_LINKER'] = v['D_COMPILER']
10079+ v['DLNK_SRC_F'] = ''
10080+ v['DLNK_TGT_F'] = ['-o', '']
10081+
10082+ v['DLIB_ST'] = '-l%s' # template for adding libs
10083+ v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths
10084+
10085+ # debug levels
10086+ v['DLINKFLAGS'] = []
10087+ v['DFLAGS_OPTIMIZED'] = ['-O3']
10088+ v['DFLAGS_DEBUG'] = ['-O0']
10089+ v['DFLAGS_ULTRADEBUG'] = ['-O0']
10090+
10091+ v['D_shlib_DFLAGS'] = []
10092+ v['D_shlib_LINKFLAGS'] = ['-shared']
10093+
10094+ v['DHEADER_ext'] = '.di'
10095+ v['D_HDR_F'] = '-fintfc -fintfc-file='
10096+
10097+def detect(conf):
10098+ conf.find_gdc()
10099+ conf.check_tool('ar')
10100+ conf.check_tool('d')
10101+ conf.common_flags_gdc()
10102+ conf.d_platform_flags()
10103+
10104diff --git a/buildtools/wafadmin/Tools/glib2.py b/buildtools/wafadmin/Tools/glib2.py
10105new file mode 100644
10106index 0000000..042d612
10107--- /dev/null
10108+++ b/buildtools/wafadmin/Tools/glib2.py
10109@@ -0,0 +1,164 @@
10110+#! /usr/bin/env python
10111+# encoding: utf-8
10112+# Thomas Nagy, 2006-2008 (ita)
10113+
10114+"GLib2 support"
10115+
10116+import Task, Utils
10117+from TaskGen import taskgen, before, after, feature
10118+
10119+#
10120+# glib-genmarshal
10121+#
10122+
10123+@taskgen
10124+def add_marshal_file(self, filename, prefix):
10125+ if not hasattr(self, 'marshal_list'):
10126+ self.marshal_list = []
10127+ self.meths.append('process_marshal')
10128+ self.marshal_list.append((filename, prefix))
10129+
10130+@before('apply_core')
10131+def process_marshal(self):
10132+ for f, prefix in getattr(self, 'marshal_list', []):
10133+ node = self.path.find_resource(f)
10134+
10135+ if not node:
10136+ raise Utils.WafError('file not found %r' % f)
10137+
10138+ h_node = node.change_ext('.h')
10139+ c_node = node.change_ext('.c')
10140+
10141+ task = self.create_task('glib_genmarshal', node, [h_node, c_node])
10142+ task.env.GLIB_GENMARSHAL_PREFIX = prefix
10143+ self.allnodes.append(c_node)
10144+
10145+def genmarshal_func(self):
10146+
10147+ bld = self.inputs[0].__class__.bld
10148+
10149+ get = self.env.get_flat
10150+ cmd1 = "%s %s --prefix=%s --header > %s" % (
10151+ get('GLIB_GENMARSHAL'),
10152+ self.inputs[0].srcpath(self.env),
10153+ get('GLIB_GENMARSHAL_PREFIX'),
10154+ self.outputs[0].abspath(self.env)
10155+ )
10156+
10157+ ret = bld.exec_command(cmd1)
10158+ if ret: return ret
10159+
10160+ #print self.outputs[1].abspath(self.env)
10161+ f = open(self.outputs[1].abspath(self.env), 'wb')
10162+ c = '''#include "%s"\n''' % self.outputs[0].name
10163+ f.write(c)
10164+ f.close()
10165+
10166+ cmd2 = "%s %s --prefix=%s --body >> %s" % (
10167+ get('GLIB_GENMARSHAL'),
10168+ self.inputs[0].srcpath(self.env),
10169+ get('GLIB_GENMARSHAL_PREFIX'),
10170+ self.outputs[1].abspath(self.env)
10171+ )
10172+ ret = Utils.exec_command(cmd2)
10173+ if ret: return ret
10174+
10175+#
10176+# glib-mkenums
10177+#
10178+
10179+@taskgen
10180+def add_enums_from_template(self, source='', target='', template='', comments=''):
10181+ if not hasattr(self, 'enums_list'):
10182+ self.enums_list = []
10183+ self.meths.append('process_enums')
10184+ self.enums_list.append({'source': source,
10185+ 'target': target,
10186+ 'template': template,
10187+ 'file-head': '',
10188+ 'file-prod': '',
10189+ 'file-tail': '',
10190+ 'enum-prod': '',
10191+ 'value-head': '',
10192+ 'value-prod': '',
10193+ 'value-tail': '',
10194+ 'comments': comments})
10195+
10196+@taskgen
10197+def add_enums(self, source='', target='',
10198+ file_head='', file_prod='', file_tail='', enum_prod='',
10199+ value_head='', value_prod='', value_tail='', comments=''):
10200+ if not hasattr(self, 'enums_list'):
10201+ self.enums_list = []
10202+ self.meths.append('process_enums')
10203+ self.enums_list.append({'source': source,
10204+ 'template': '',
10205+ 'target': target,
10206+ 'file-head': file_head,
10207+ 'file-prod': file_prod,
10208+ 'file-tail': file_tail,
10209+ 'enum-prod': enum_prod,
10210+ 'value-head': value_head,
10211+ 'value-prod': value_prod,
10212+ 'value-tail': value_tail,
10213+ 'comments': comments})
10214+
10215+@before('apply_core')
10216+def process_enums(self):
10217+ for enum in getattr(self, 'enums_list', []):
10218+ task = self.create_task('glib_mkenums')
10219+ env = task.env
10220+
10221+ inputs = []
10222+
10223+ # process the source
10224+ source_list = self.to_list(enum['source'])
10225+ if not source_list:
10226+ raise Utils.WafError('missing source ' + str(enum))
10227+ source_list = [self.path.find_resource(k) for k in source_list]
10228+ inputs += source_list
10229+ env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list]
10230+
10231+ # find the target
10232+ if not enum['target']:
10233+ raise Utils.WafError('missing target ' + str(enum))
10234+ tgt_node = self.path.find_or_declare(enum['target'])
10235+ if tgt_node.name.endswith('.c'):
10236+ self.allnodes.append(tgt_node)
10237+ env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env)
10238+
10239+
10240+ options = []
10241+
10242+ if enum['template']: # template, if provided
10243+ template_node = self.path.find_resource(enum['template'])
10244+ options.append('--template %s' % (template_node.abspath(env)))
10245+ inputs.append(template_node)
10246+ params = {'file-head' : '--fhead',
10247+ 'file-prod' : '--fprod',
10248+ 'file-tail' : '--ftail',
10249+ 'enum-prod' : '--eprod',
10250+ 'value-head' : '--vhead',
10251+ 'value-prod' : '--vprod',
10252+ 'value-tail' : '--vtail',
10253+ 'comments': '--comments'}
10254+ for param, option in params.iteritems():
10255+ if enum[param]:
10256+ options.append('%s %r' % (option, enum[param]))
10257+
10258+ env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
10259+
10260+ # update the task instance
10261+ task.set_inputs(inputs)
10262+ task.set_outputs(tgt_node)
10263+
10264+Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'],
10265+ color='BLUE', before='cc cxx')
10266+Task.simple_task_type('glib_mkenums',
10267+ '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',
10268+ color='PINK', before='cc cxx')
10269+
10270+def detect(conf):
10271+ glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
10272+ mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS')
10273+
10274diff --git a/buildtools/wafadmin/Tools/gnome.py b/buildtools/wafadmin/Tools/gnome.py
10275new file mode 100644
10276index 0000000..c098a41
10277--- /dev/null
10278+++ b/buildtools/wafadmin/Tools/gnome.py
10279@@ -0,0 +1,223 @@
10280+#!/usr/bin/env python
10281+# encoding: utf-8
10282+# Thomas Nagy, 2006-2008 (ita)
10283+
10284+"Gnome support"
10285+
10286+import os, re
10287+import TaskGen, Utils, Runner, Task, Build, Options, Logs
10288+import cc
10289+from Logs import error
10290+from TaskGen import taskgen, before, after, feature
10291+
10292+n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M)
10293+n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M)
10294+
10295+def postinstall_schemas(prog_name):
10296+ if Build.bld.is_install:
10297+ dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name)
10298+ if not Options.options.destdir:
10299+ # add the gconf schema
10300+ Utils.pprint('YELLOW', 'Installing GConf schema')
10301+ command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir
10302+ ret = Utils.exec_command(command)
10303+ else:
10304+ Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:')
10305+ Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir)
10306+
10307+def postinstall_icons():
10308+ dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor')
10309+ if Build.bld.is_install:
10310+ if not Options.options.destdir:
10311+ # update the pixmap cache directory
10312+ Utils.pprint('YELLOW', "Updating Gtk icon cache.")
10313+ command = 'gtk-update-icon-cache -q -f -t %s' % dir
10314+ ret = Utils.exec_command(command)
10315+ else:
10316+ Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:')
10317+ Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir)
10318+
10319+def postinstall_scrollkeeper(prog_name):
10320+ if Build.bld.is_install:
10321+ # now the scrollkeeper update if we can write to the log file
10322+ if os.access('/var/log/scrollkeeper.log', os.W_OK):
10323+ dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
10324+ dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name)
10325+ command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2)
10326+ ret = Utils.exec_command(command)
10327+
10328+def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1):
10329+ if schemas: postinstall_schemas(prog_name)
10330+ if icons: postinstall_icons()
10331+ if scrollkeeper: postinstall_scrollkeeper(prog_name)
10332+
10333+# OBSOLETE
10334+class gnome_doc_taskgen(TaskGen.task_gen):
10335+ def __init__(self, *k, **kw):
10336+ TaskGen.task_gen.__init__(self, *k, **kw)
10337+
10338+@feature('gnome_doc')
10339+def init_gnome_doc(self):
10340+ self.default_install_path = '${PREFIX}/share'
10341+
10342+@feature('gnome_doc')
10343+@after('init_gnome_doc')
10344+def apply_gnome_doc(self):
10345+ self.env['APPNAME'] = self.doc_module
10346+ lst = self.to_list(self.doc_linguas)
10347+ bld = self.bld
10348+ lst.append('C')
10349+
10350+ for x in lst:
10351+ if not x == 'C':
10352+ tsk = self.create_task('xml2po')
10353+ node = self.path.find_resource(x+'/'+x+'.po')
10354+ src = self.path.find_resource('C/%s.xml' % self.doc_module)
10355+ out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
10356+ tsk.set_inputs([node, src])
10357+ tsk.set_outputs(out)
10358+ else:
10359+ out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
10360+
10361+ tsk2 = self.create_task('xsltproc2po')
10362+ out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
10363+ tsk2.set_outputs(out2)
10364+ node = self.path.find_resource(self.doc_module+".omf.in")
10365+ tsk2.inputs = [node, out]
10366+
10367+ tsk2.run_after.append(tsk)
10368+
10369+ if bld.is_install:
10370+ path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
10371+ bld.install_files(self.install_path + '/omf', out2, env=self.env)
10372+ for y in self.to_list(self.doc_figures):
10373+ try:
10374+ os.stat(self.path.abspath() + '/' + x + '/' + y)
10375+ bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y)
10376+ except:
10377+ bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
10378+ bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
10379+ if x == 'C':
10380+ xmls = self.to_list(self.doc_includes)
10381+ xmls.append(self.doc_entities)
10382+ for z in xmls:
10383+ out = self.path.find_resource('%s/%s' % (x, z))
10384+ bld.install_as(path + '/%s' % z, out.abspath(self.env))
10385+
10386+# OBSOLETE
10387+class xml_to_taskgen(TaskGen.task_gen):
10388+ def __init__(self, *k, **kw):
10389+ TaskGen.task_gen.__init__(self, *k, **kw)
10390+
10391+@feature('xml_to')
10392+def init_xml_to(self):
10393+ Utils.def_attrs(self,
10394+ source = 'xmlfile',
10395+ xslt = 'xlsltfile',
10396+ target = 'hey',
10397+ default_install_path = '${PREFIX}',
10398+ task_created = None)
10399+
10400+@feature('xml_to')
10401+@after('init_xml_to')
10402+def apply_xml_to(self):
10403+ xmlfile = self.path.find_resource(self.source)
10404+ xsltfile = self.path.find_resource(self.xslt)
10405+ tsk = self.create_task('xmlto', [xmlfile, xsltfile], xmlfile.change_ext('html'))
10406+ tsk.install_path = self.install_path
10407+
10408+def sgml_scan(self):
10409+ node = self.inputs[0]
10410+
10411+ env = self.env
10412+ variant = node.variant(env)
10413+
10414+ fi = open(node.abspath(env), 'r')
10415+ content = fi.read()
10416+ fi.close()
10417+
10418+ # we should use a sgml parser :-/
10419+ name = n1_regexp.findall(content)[0]
10420+ num = n2_regexp.findall(content)[0]
10421+
10422+ doc_name = name+'.'+num
10423+
10424+ if not self.outputs:
10425+ self.outputs = [self.generator.path.find_or_declare(doc_name)]
10426+
10427+ return ([], [doc_name])
10428+
10429+class gnome_sgml2man_taskgen(TaskGen.task_gen):
10430+ def __init__(self, *k, **kw):
10431+ TaskGen.task_gen.__init__(self, *k, **kw)
10432+
10433+@feature('gnome_sgml2man')
10434+def apply_gnome_sgml2man(self):
10435+ """
10436+ we could make it more complicated, but for now we just scan the document each time
10437+ """
10438+ assert(getattr(self, 'appname', None))
10439+
10440+ def install_result(task):
10441+ out = task.outputs[0]
10442+ name = out.name
10443+ ext = name[-1]
10444+ env = task.env
10445+ self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env)
10446+
10447+ self.bld.rescan(self.path)
10448+ for name in self.bld.cache_dir_contents[self.path.id]:
10449+ base, ext = os.path.splitext(name)
10450+ if ext != '.sgml': continue
10451+
10452+ task = self.create_task('sgml2man')
10453+ task.set_inputs(self.path.find_resource(name))
10454+ task.task_generator = self
10455+ if self.bld.is_install: task.install = install_result
10456+ # no outputs, the scanner does it
10457+ # no caching for now, this is not a time-critical feature
10458+ # in the future the scanner can be used to do more things (find dependencies, etc)
10459+ task.scan()
10460+
10461+cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE')
10462+cls.scan = sgml_scan
10463+cls.quiet = 1
10464+
10465+Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
10466+
10467+Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE')
10468+
10469+# how do you expect someone to understand this?!
10470+xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
10471+--stringparam db2omf.basename ${APPNAME} \
10472+--stringparam db2omf.format docbook \
10473+--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
10474+--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
10475+--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
10476+--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
10477+--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
10478+--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
10479+${DB2OMF} ${SRC[1].abspath(env)}"""
10480+
10481+#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
10482+Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE')
10483+
10484+def detect(conf):
10485+ conf.check_tool('gnu_dirs glib2 dbus')
10486+ sgml2man = conf.find_program('docbook2man', var='SGML2MAN')
10487+
10488+ def getstr(varname):
10489+ return getattr(Options.options, varname, '')
10490+
10491+ # addefine also sets the variable to the env
10492+ conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale'))
10493+
10494+ xml2po = conf.find_program('xml2po', var='XML2PO')
10495+ xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO')
10496+ conf.env['XML2POFLAGS'] = '-e -p'
10497+ conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip()
10498+ conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip()
10499+
10500+def set_options(opt):
10501+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
10502+
10503diff --git a/buildtools/wafadmin/Tools/gnu_dirs.py b/buildtools/wafadmin/Tools/gnu_dirs.py
10504new file mode 100644
10505index 0000000..856e4a7
10506--- /dev/null
10507+++ b/buildtools/wafadmin/Tools/gnu_dirs.py
10508@@ -0,0 +1,111 @@
10509+#!/usr/bin/env python
10510+# encoding: utf-8
10511+# Ali Sabil, 2007
10512+
10513+"""
10514+To use this module do not forget to call
10515+opt.tool_options('gnu_dirs')
10516+AND
10517+conf.check_tool('gnu_dirs')
10518+
10519+Add options for the standard GNU directories, this tool will add the options
10520+found in autotools, and will update the environment with the following
10521+installation variables:
10522+
10523+ * PREFIX : architecture-independent files [/usr/local]
10524+ * EXEC_PREFIX : architecture-dependent files [PREFIX]
10525+ * BINDIR : user executables [EXEC_PREFIX/bin]
10526+ * SBINDIR : user executables [EXEC_PREFIX/sbin]
10527+ * LIBEXECDIR : program executables [EXEC_PREFIX/libexec]
10528+ * SYSCONFDIR : read-only single-machine data [PREFIX/etc]
10529+ * SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com]
10530+ * LOCALSTATEDIR : modifiable single-machine data [PREFIX/var]
10531+ * LIBDIR : object code libraries [EXEC_PREFIX/lib]
10532+ * INCLUDEDIR : C header files [PREFIX/include]
10533+ * OLDINCLUDEDIR : C header files for non-gcc [/usr/include]
10534+ * DATAROOTDIR : read-only arch.-independent data root [PREFIX/share]
10535+ * DATADIR : read-only architecture-independent data [DATAROOTDIR]
10536+ * INFODIR : info documentation [DATAROOTDIR/info]
10537+ * LOCALEDIR : locale-dependent data [DATAROOTDIR/locale]
10538+ * MANDIR : man documentation [DATAROOTDIR/man]
10539+ * DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib]
10540+ * HTMLDIR : html documentation [DOCDIR]
10541+ * DVIDIR : dvi documentation [DOCDIR]
10542+ * PDFDIR : pdf documentation [DOCDIR]
10543+ * PSDIR : ps documentation [DOCDIR]
10544+"""
10545+
10546+import Utils, Options
10547+
10548+_options = [x.split(', ') for x in '''
10549+bindir, user executables, ${EXEC_PREFIX}/bin
10550+sbindir, system admin executables, ${EXEC_PREFIX}/sbin
10551+libexecdir, program executables, ${EXEC_PREFIX}/libexec
10552+sysconfdir, read-only single-machine data, ${PREFIX}/etc
10553+sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
10554+localstatedir, modifiable single-machine data, ${PREFIX}/var
10555+libdir, object code libraries, ${EXEC_PREFIX}/lib
10556+includedir, C header files, ${PREFIX}/include
10557+oldincludedir, C header files for non-gcc, /usr/include
10558+datarootdir, read-only arch.-independent data root, ${PREFIX}/share
10559+datadir, read-only architecture-independent data, ${DATAROOTDIR}
10560+infodir, info documentation, ${DATAROOTDIR}/info
10561+localedir, locale-dependent data, ${DATAROOTDIR}/locale
10562+mandir, man documentation, ${DATAROOTDIR}/man
10563+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
10564+htmldir, html documentation, ${DOCDIR}
10565+dvidir, dvi documentation, ${DOCDIR}
10566+pdfdir, pdf documentation, ${DOCDIR}
10567+psdir, ps documentation, ${DOCDIR}
10568+'''.split('\n') if x]
10569+
10570+def detect(conf):
10571+ def get_param(varname, default):
10572+ return getattr(Options.options, varname, '') or default
10573+
10574+ env = conf.env
10575+ env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
10576+ env['PACKAGE'] = Utils.g_module.APPNAME
10577+
10578+ complete = False
10579+ iter = 0
10580+ while not complete and iter < len(_options) + 1:
10581+ iter += 1
10582+ complete = True
10583+ for name, help, default in _options:
10584+ name = name.upper()
10585+ if not env[name]:
10586+ try:
10587+ env[name] = Utils.subst_vars(get_param(name, default), env)
10588+ except TypeError:
10589+ complete = False
10590+ if not complete:
10591+ lst = [name for name, _, _ in _options if not env[name.upper()]]
10592+ raise Utils.WafError('Variable substitution failure %r' % lst)
10593+
10594+def set_options(opt):
10595+
10596+ inst_dir = opt.add_option_group('Installation directories',
10597+'By default, "waf install" will put the files in\
10598+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
10599+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
10600+
10601+ for k in ('--prefix', '--destdir'):
10602+ option = opt.parser.get_option(k)
10603+ if option:
10604+ opt.parser.remove_option(k)
10605+ inst_dir.add_option(option)
10606+
10607+ inst_dir.add_option('--exec-prefix',
10608+ help = 'installation prefix [Default: ${PREFIX}]',
10609+ default = '',
10610+ dest = 'EXEC_PREFIX')
10611+
10612+ dirs_options = opt.add_option_group('Pre-defined installation directories', '')
10613+
10614+ for name, help, default in _options:
10615+ option_name = '--' + name
10616+ str_default = default
10617+ str_help = '%s [Default: %s]' % (help, str_default)
10618+ dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
10619+
10620diff --git a/buildtools/wafadmin/Tools/gob2.py b/buildtools/wafadmin/Tools/gob2.py
10621new file mode 100644
10622index 0000000..00aaa32
10623--- /dev/null
10624+++ b/buildtools/wafadmin/Tools/gob2.py
10625@@ -0,0 +1,18 @@
10626+#!/usr/bin/env python
10627+# encoding: utf-8
10628+# Ali Sabil, 2007
10629+
10630+import TaskGen
10631+
10632+TaskGen.declare_chain(
10633+ name = 'gob2',
10634+ rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',
10635+ ext_in = '.gob',
10636+ ext_out = '.c'
10637+)
10638+
10639+def detect(conf):
10640+ gob2 = conf.find_program('gob2', var='GOB2', mandatory=True)
10641+ conf.env['GOB2'] = gob2
10642+ conf.env['GOB2FLAGS'] = ''
10643+
10644diff --git a/buildtools/wafadmin/Tools/gxx.py b/buildtools/wafadmin/Tools/gxx.py
10645new file mode 100644
10646index 0000000..8f4a0bf
10647--- /dev/null
10648+++ b/buildtools/wafadmin/Tools/gxx.py
10649@@ -0,0 +1,133 @@
10650+#!/usr/bin/env python
10651+# encoding: utf-8
10652+# Thomas Nagy, 2006 (ita)
10653+# Ralf Habacker, 2006 (rh)
10654+# Yinon Ehrlich, 2009
10655+
10656+import os, sys
10657+import Configure, Options, Utils
10658+import ccroot, ar
10659+from Configure import conftest
10660+
10661+@conftest
10662+def find_gxx(conf):
10663+ cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True)
10664+ cxx = conf.cmd_to_list(cxx)
10665+ ccroot.get_cc_version(conf, cxx, gcc=True)
10666+ conf.env.CXX_NAME = 'gcc'
10667+ conf.env.CXX = cxx
10668+
10669+@conftest
10670+def gxx_common_flags(conf):
10671+ v = conf.env
10672+
10673+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
10674+ v['CXXFLAGS_DEBUG'] = ['-g']
10675+ v['CXXFLAGS_RELEASE'] = ['-O2']
10676+
10677+ v['CXX_SRC_F'] = ''
10678+ v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
10679+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
10680+
10681+ # linker
10682+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
10683+ v['CXXLNK_SRC_F'] = ''
10684+ v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
10685+
10686+ v['LIB_ST'] = '-l%s' # template for adding libs
10687+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
10688+ v['STATICLIB_ST'] = '-l%s'
10689+ v['STATICLIBPATH_ST'] = '-L%s'
10690+ v['RPATH_ST'] = '-Wl,-rpath,%s'
10691+ v['CXXDEFINES_ST'] = '-D%s'
10692+
10693+ v['SONAME_ST'] = '-Wl,-h,%s'
10694+ v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
10695+ v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
10696+ v['FULLSTATIC_MARKER'] = '-static'
10697+
10698+ # program
10699+ v['program_PATTERN'] = '%s'
10700+
10701+ # shared library
10702+ v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
10703+ v['shlib_LINKFLAGS'] = ['-shared']
10704+ v['shlib_PATTERN'] = 'lib%s.so'
10705+
10706+ # static lib
10707+ v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
10708+ v['staticlib_PATTERN'] = 'lib%s.a'
10709+
10710+ # osx stuff
10711+ v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
10712+ v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
10713+ v['macbundle_PATTERN'] = '%s.bundle'
10714+
10715+@conftest
10716+def gxx_modifier_win32(conf):
10717+ v = conf.env
10718+ v['program_PATTERN'] = '%s.exe'
10719+
10720+ v['shlib_PATTERN'] = '%s.dll'
10721+ v['implib_PATTERN'] = 'lib%s.dll.a'
10722+ v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
10723+
10724+ dest_arch = v['DEST_CPU']
10725+ v['shlib_CXXFLAGS'] = []
10726+
10727+ v.append_value('shlib_CXXFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
10728+
10729+ # Auto-import is enabled by default even without this option,
10730+ # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
10731+ # that the linker emits otherwise.
10732+ v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
10733+
10734+@conftest
10735+def gxx_modifier_cygwin(conf):
10736+ gxx_modifier_win32(conf)
10737+ v = conf.env
10738+ v['shlib_PATTERN'] = 'cyg%s.dll'
10739+ v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
10740+
10741+@conftest
10742+def gxx_modifier_darwin(conf):
10743+ v = conf.env
10744+ v['shlib_CXXFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
10745+ v['shlib_LINKFLAGS'] = ['-dynamiclib']
10746+ v['shlib_PATTERN'] = 'lib%s.dylib'
10747+
10748+ v['staticlib_LINKFLAGS'] = []
10749+
10750+ v['SHLIB_MARKER'] = ''
10751+ v['STATICLIB_MARKER'] = ''
10752+ v['SONAME_ST'] = ''
10753+
10754+@conftest
10755+def gxx_modifier_aix(conf):
10756+ v = conf.env
10757+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
10758+
10759+ v['shlib_LINKFLAGS'] = ['-shared', '-Wl,-brtl,-bexpfull']
10760+
10761+ v['SHLIB_MARKER'] = ''
10762+
10763+@conftest
10764+def gxx_modifier_platform(conf):
10765+ # * set configurations specific for a platform.
10766+ # * the destination platform is detected automatically by looking at the macros the compiler predefines,
10767+ # and if it's not recognised, it fallbacks to sys.platform.
10768+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
10769+ gxx_modifier_func = globals().get('gxx_modifier_' + dest_os)
10770+ if gxx_modifier_func:
10771+ gxx_modifier_func(conf)
10772+
10773+def detect(conf):
10774+ conf.find_gxx()
10775+ conf.find_cpp()
10776+ conf.find_ar()
10777+ conf.gxx_common_flags()
10778+ conf.gxx_modifier_platform()
10779+ conf.cxx_load_tools()
10780+ conf.cxx_add_flags()
10781+ conf.link_add_flags()
10782+
10783diff --git a/buildtools/wafadmin/Tools/icc.py b/buildtools/wafadmin/Tools/icc.py
10784new file mode 100644
10785index 0000000..9c9a926
10786--- /dev/null
10787+++ b/buildtools/wafadmin/Tools/icc.py
10788@@ -0,0 +1,37 @@
10789+#!/usr/bin/env python
10790+# encoding: utf-8
10791+# Stian Selnes, 2008
10792+# Thomas Nagy 2009
10793+
10794+import os, sys
10795+import Configure, Options, Utils
10796+import ccroot, ar, gcc
10797+from Configure import conftest
10798+
10799+@conftest
10800+def find_icc(conf):
10801+ if sys.platform == 'cygwin':
10802+ conf.fatal('The Intel compiler does not work on Cygwin')
10803+
10804+ v = conf.env
10805+ cc = None
10806+ if v['CC']: cc = v['CC']
10807+ elif 'CC' in conf.environ: cc = conf.environ['CC']
10808+ if not cc: cc = conf.find_program('icc', var='CC')
10809+ if not cc: cc = conf.find_program('ICL', var='CC')
10810+ if not cc: conf.fatal('Intel C Compiler (icc) was not found')
10811+ cc = conf.cmd_to_list(cc)
10812+
10813+ ccroot.get_cc_version(conf, cc, icc=True)
10814+ v['CC'] = cc
10815+ v['CC_NAME'] = 'icc'
10816+
10817+detect = '''
10818+find_icc
10819+find_ar
10820+gcc_common_flags
10821+gcc_modifier_platform
10822+cc_load_tools
10823+cc_add_flags
10824+link_add_flags
10825+'''
10826diff --git a/buildtools/wafadmin/Tools/icpc.py b/buildtools/wafadmin/Tools/icpc.py
10827new file mode 100644
10828index 0000000..7d79c57
10829--- /dev/null
10830+++ b/buildtools/wafadmin/Tools/icpc.py
10831@@ -0,0 +1,35 @@
10832+#!/usr/bin/env python
10833+# encoding: utf-8
10834+# Thomas Nagy 2009
10835+
10836+import os, sys
10837+import Configure, Options, Utils
10838+import ccroot, ar, gxx
10839+from Configure import conftest
10840+
10841+@conftest
10842+def find_icpc(conf):
10843+ if sys.platform == 'cygwin':
10844+ conf.fatal('The Intel compiler does not work on Cygwin')
10845+
10846+ v = conf.env
10847+ cxx = None
10848+ if v['CXX']: cxx = v['CXX']
10849+ elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
10850+ if not cxx: cxx = conf.find_program('icpc', var='CXX')
10851+ if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
10852+ cxx = conf.cmd_to_list(cxx)
10853+
10854+ ccroot.get_cc_version(conf, cxx, icc=True)
10855+ v['CXX'] = cxx
10856+ v['CXX_NAME'] = 'icc'
10857+
10858+detect = '''
10859+find_icpc
10860+find_ar
10861+gxx_common_flags
10862+gxx_modifier_platform
10863+cxx_load_tools
10864+cxx_add_flags
10865+link_add_flags
10866+'''
10867diff --git a/buildtools/wafadmin/Tools/intltool.py b/buildtools/wafadmin/Tools/intltool.py
10868new file mode 100644
10869index 0000000..deb8f4a
10870--- /dev/null
10871+++ b/buildtools/wafadmin/Tools/intltool.py
10872@@ -0,0 +1,139 @@
10873+#!/usr/bin/env python
10874+# encoding: utf-8
10875+# Thomas Nagy, 2006 (ita)
10876+
10877+"intltool support"
10878+
10879+import os, re
10880+import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c
10881+from TaskGen import feature, before, taskgen
10882+from Logs import error
10883+
10884+"""
10885+Usage:
10886+
10887+bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
10888+
10889+"""
10890+
10891+class intltool_in_taskgen(TaskGen.task_gen):
10892+ """deprecated"""
10893+ def __init__(self, *k, **kw):
10894+ TaskGen.task_gen.__init__(self, *k, **kw)
10895+
10896+@before('apply_core')
10897+@feature('intltool_in')
10898+def iapply_intltool_in_f(self):
10899+ try: self.meths.remove('apply_core')
10900+ except ValueError: pass
10901+
10902+ for i in self.to_list(self.source):
10903+ node = self.path.find_resource(i)
10904+
10905+ podir = getattr(self, 'podir', 'po')
10906+ podirnode = self.path.find_dir(podir)
10907+ if not podirnode:
10908+ error("could not find the podir %r" % podir)
10909+ continue
10910+
10911+ cache = getattr(self, 'intlcache', '.intlcache')
10912+ self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache)
10913+ self.env['INTLPODIR'] = podirnode.srcpath(self.env)
10914+ self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
10915+
10916+ task = self.create_task('intltool', node, node.change_ext(''))
10917+ task.install_path = self.install_path
10918+
10919+class intltool_po_taskgen(TaskGen.task_gen):
10920+ """deprecated"""
10921+ def __init__(self, *k, **kw):
10922+ TaskGen.task_gen.__init__(self, *k, **kw)
10923+
10924+
10925+@feature('intltool_po')
10926+def apply_intltool_po(self):
10927+ try: self.meths.remove('apply_core')
10928+ except ValueError: pass
10929+
10930+ self.default_install_path = '${LOCALEDIR}'
10931+ appname = getattr(self, 'appname', 'set_your_app_name')
10932+ podir = getattr(self, 'podir', '')
10933+
10934+ def install_translation(task):
10935+ out = task.outputs[0]
10936+ filename = out.name
10937+ (langname, ext) = os.path.splitext(filename)
10938+ inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
10939+ self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod)
10940+
10941+ linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS'))
10942+ if linguas:
10943+ # scan LINGUAS file for locales to process
10944+ file = open(linguas.abspath())
10945+ langs = []
10946+ for line in file.readlines():
10947+ # ignore lines containing comments
10948+ if not line.startswith('#'):
10949+ langs += line.split()
10950+ file.close()
10951+ re_linguas = re.compile('[-a-zA-Z_@.]+')
10952+ for lang in langs:
10953+ # Make sure that we only process lines which contain locales
10954+ if re_linguas.match(lang):
10955+ node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
10956+ task = self.create_task('po')
10957+ task.set_inputs(node)
10958+ task.set_outputs(node.change_ext('.mo'))
10959+ if self.bld.is_install: task.install = install_translation
10960+ else:
10961+ Utils.pprint('RED', "Error no LINGUAS file found in po directory")
10962+
10963+Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False)
10964+Task.simple_task_type('intltool',
10965+ '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',
10966+ color='BLUE', after="cc_link cxx_link", shell=False)
10967+
10968+def detect(conf):
10969+ pocom = conf.find_program('msgfmt')
10970+ if not pocom:
10971+ # if msgfmt should not be mandatory, catch the thrown exception in your wscript
10972+ conf.fatal('The program msgfmt (gettext) is mandatory!')
10973+ conf.env['POCOM'] = pocom
10974+
10975+ # NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it
10976+
10977+ intltool = conf.find_program('intltool-merge', var='INTLTOOL')
10978+ if not intltool:
10979+ # if intltool-merge should not be mandatory, catch the thrown exception in your wscript
10980+ if Options.platform == 'win32':
10981+ perl = conf.find_program('perl', var='PERL')
10982+ if not perl:
10983+ conf.fatal('The program perl (required by intltool) could not be found')
10984+
10985+ intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep))
10986+ if not intltooldir:
10987+ conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
10988+
10989+ conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge']
10990+ conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL']))
10991+ else:
10992+ conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
10993+
10994+ def getstr(varname):
10995+ return getattr(Options.options, varname, '')
10996+
10997+ prefix = conf.env['PREFIX']
10998+ datadir = getstr('datadir')
10999+ if not datadir: datadir = os.path.join(prefix,'share')
11000+
11001+ conf.define('LOCALEDIR', os.path.join(datadir, 'locale'))
11002+ conf.define('DATADIR', datadir)
11003+
11004+ if conf.env['CC'] or conf.env['CXX']:
11005+ # Define to 1 if <locale.h> is present
11006+ conf.check(header_name='locale.h')
11007+
11008+def set_options(opt):
11009+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
11010+ opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data')
11011+
11012diff --git a/buildtools/wafadmin/Tools/javaw.py b/buildtools/wafadmin/Tools/javaw.py
11013new file mode 100644
11014index 0000000..301ebc4
11015--- /dev/null
11016+++ b/buildtools/wafadmin/Tools/javaw.py
11017@@ -0,0 +1,255 @@
11018+#!/usr/bin/env python
11019+# encoding: utf-8
11020+# Thomas Nagy, 2006-2008 (ita)
11021+
11022+"""
11023+Java support
11024+
11025+Javac is one of the few compilers that behaves very badly:
11026+* it outputs files where it wants to (-d is only for the package root)
11027+* it recompiles files silently behind your back
11028+* it outputs an undefined amount of files (inner classes)
11029+
11030+Fortunately, the convention makes it possible to use the build dir without
11031+too many problems for the moment
11032+
11033+Inner classes must be located and cleaned when a problem arise,
11034+for the moment waf does not track the production of inner classes.
11035+
11036+Adding all the files to a task and executing it if any of the input files
11037+change is only annoying for the compilation times
11038+
11039+Compilation can be run using Jython[1] rather than regular Python. Instead of
11040+running one of the following commands:
11041+ ./waf configure
11042+ python waf configure
11043+You would have to run:
11044+ java -jar /path/to/jython.jar waf configure
11045+
11046+[1] http://www.jython.org/
11047+"""
11048+
11049+import os, re
11050+from Configure import conf
11051+import TaskGen, Task, Utils, Options, Build
11052+from TaskGen import feature, before, taskgen
11053+
11054+class_check_source = '''
11055+public class Test {
11056+ public static void main(String[] argv) {
11057+ Class lib;
11058+ if (argv.length < 1) {
11059+ System.err.println("Missing argument");
11060+ System.exit(77);
11061+ }
11062+ try {
11063+ lib = Class.forName(argv[0]);
11064+ } catch (ClassNotFoundException e) {
11065+ System.err.println("ClassNotFoundException");
11066+ System.exit(1);
11067+ }
11068+ lib = null;
11069+ System.exit(0);
11070+ }
11071+}
11072+'''
11073+
11074+@feature('jar')
11075+@before('apply_core')
11076+def jar_files(self):
11077+ basedir = getattr(self, 'basedir', '.')
11078+ destfile = getattr(self, 'destfile', 'test.jar')
11079+ jaropts = getattr(self, 'jaropts', [])
11080+ jarcreate = getattr(self, 'jarcreate', 'cf')
11081+
11082+ dir = self.path.find_dir(basedir)
11083+ if not dir: raise
11084+
11085+ jaropts.append('-C')
11086+ jaropts.append(dir.abspath(self.env))
11087+ jaropts.append('.')
11088+
11089+ out = self.path.find_or_declare(destfile)
11090+
11091+ tsk = self.create_task('jar_create')
11092+ tsk.set_outputs(out)
11093+ tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id]
11094+ tsk.env['JAROPTS'] = jaropts
11095+ tsk.env['JARCREATE'] = jarcreate
11096+
11097+@feature('javac')
11098+@before('apply_core')
11099+def apply_java(self):
11100+ Utils.def_attrs(self, jarname='', jaropts='', classpath='',
11101+ sourcepath='.', srcdir='.', source_re='**/*.java',
11102+ jar_mf_attributes={}, jar_mf_classpath=[])
11103+
11104+ if getattr(self, 'source_root', None):
11105+ # old stuff
11106+ self.srcdir = self.source_root
11107+
11108+
11109+ nodes_lst = []
11110+
11111+ if not self.classpath:
11112+ if not self.env['CLASSPATH']:
11113+ self.env['CLASSPATH'] = '..' + os.pathsep + '.'
11114+ else:
11115+ self.env['CLASSPATH'] = self.classpath
11116+
11117+ srcdir_node = self.path.find_dir(self.srcdir)
11118+ if not srcdir_node:
11119+ raise Utils.WafError('could not find srcdir %r' % self.srcdir)
11120+
11121+ src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)]
11122+ bld_nodes = [x.change_ext('.class') for x in src_nodes]
11123+
11124+ self.env['OUTDIR'] = [srcdir_node.bldpath(self.env)]
11125+
11126+ tsk = self.create_task('javac')
11127+ tsk.set_inputs(src_nodes)
11128+ tsk.set_outputs(bld_nodes)
11129+
11130+ if getattr(self, 'compat', None):
11131+ tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
11132+
11133+ if hasattr(self, 'sourcepath'):
11134+ fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
11135+ names = os.pathsep.join([x.srcpath() for x in fold])
11136+ else:
11137+ names = srcdir_node.srcpath()
11138+
11139+ if names:
11140+ tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
11141+
11142+ if self.jarname:
11143+ jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
11144+ jtsk.set_run_after(tsk)
11145+
11146+ if not self.env.JAROPTS:
11147+ if self.jaropts:
11148+ self.env.JAROPTS = self.jaropts
11149+ else:
11150+ dirs = '.'
11151+ self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
11152+
11153+Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN', shell=False)
11154+cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}', shell=False)
11155+cls.color = 'BLUE'
11156+def post_run_javac(self):
11157+ """this is for cleaning the folder
11158+ javac creates single files for inner classes
11159+ but it is not possible to know which inner classes in advance"""
11160+
11161+ par = {}
11162+ for x in self.inputs:
11163+ par[x.parent.id] = x.parent
11164+
11165+ inner = {}
11166+ for k in par.values():
11167+ path = k.abspath(self.env)
11168+ lst = os.listdir(path)
11169+
11170+ for u in lst:
11171+ if u.find('$') >= 0:
11172+ inner_class_node = k.find_or_declare(u)
11173+ inner[inner_class_node.id] = inner_class_node
11174+
11175+ to_add = set(inner.keys()) - set([x.id for x in self.outputs])
11176+ for x in to_add:
11177+ self.outputs.append(inner[x])
11178+
11179+ self.cached = True # disable the cache here - inner classes are a problem
11180+ return Task.Task.post_run(self)
11181+cls.post_run = post_run_javac
11182+
11183+def detect(conf):
11184+ # If JAVA_PATH is set, we prepend it to the path list
11185+ java_path = conf.environ['PATH'].split(os.pathsep)
11186+ v = conf.env
11187+
11188+ if 'JAVA_HOME' in conf.environ:
11189+ java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path
11190+ conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']]
11191+
11192+ for x in 'javac java jar'.split():
11193+ conf.find_program(x, var=x.upper(), path_list=java_path)
11194+ conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()])
11195+ v['JAVA_EXT'] = ['.java']
11196+
11197+ if 'CLASSPATH' in conf.environ:
11198+ v['CLASSPATH'] = conf.environ['CLASSPATH']
11199+
11200+ if not v['JAR']: conf.fatal('jar is required for making java packages')
11201+ if not v['JAVAC']: conf.fatal('javac is required for compiling java classes')
11202+ v['JARCREATE'] = 'cf' # can use cvf
11203+
11204+@conf
11205+def check_java_class(self, classname, with_classpath=None):
11206+ """Check if the specified java class is installed"""
11207+
11208+ import shutil
11209+
11210+ javatestdir = '.waf-javatest'
11211+
11212+ classpath = javatestdir
11213+ if self.env['CLASSPATH']:
11214+ classpath += os.pathsep + self.env['CLASSPATH']
11215+ if isinstance(with_classpath, str):
11216+ classpath += os.pathsep + with_classpath
11217+
11218+ shutil.rmtree(javatestdir, True)
11219+ os.mkdir(javatestdir)
11220+
11221+ java_file = open(os.path.join(javatestdir, 'Test.java'), 'w')
11222+ java_file.write(class_check_source)
11223+ java_file.close()
11224+
11225+ # Compile the source
11226+ Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
11227+
11228+ # Try to run the app
11229+ cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
11230+ self.log.write("%s\n" % str(cmd))
11231+ found = Utils.exec_command(cmd, shell=False, log=self.log)
11232+
11233+ self.check_message('Java class %s' % classname, "", not found)
11234+
11235+ shutil.rmtree(javatestdir, True)
11236+
11237+ return found
11238+
11239+@conf
11240+def check_jni_headers(conf):
11241+ """
11242+ Check for jni headers and libraries
11243+
11244+ On success the environment variable xxx_JAVA is added for uselib
11245+ """
11246+
11247+ if not conf.env.CC_NAME and not conf.env.CXX_NAME:
11248+ conf.fatal('load a compiler first (gcc, g++, ..)')
11249+
11250+ if not conf.env.JAVA_HOME:
11251+ conf.fatal('set JAVA_HOME in the system environment')
11252+
11253+ # jni requires the jvm
11254+ javaHome = conf.env['JAVA_HOME'][0]
11255+
11256+ b = Build.BuildContext()
11257+ b.load_dirs(conf.srcdir, conf.blddir)
11258+ dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
11259+ f = dir.ant_glob('**/(jni|jni_md).h', flat=False)
11260+ incDirs = [x.parent.abspath() for x in f]
11261+
11262+ dir = b.root.find_dir(conf.env.JAVA_HOME[0])
11263+ f = dir.ant_glob('**/*jvm.(so|dll)', flat=False)
11264+ libDirs = [x.parent.abspath() for x in f] or [javaHome]
11265+
11266+ for i, d in enumerate(libDirs):
11267+ if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
11268+ libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'):
11269+ break
11270+ else:
11271+ conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
11272+
11273diff --git a/buildtools/wafadmin/Tools/kde4.py b/buildtools/wafadmin/Tools/kde4.py
11274new file mode 100644
11275index 0000000..f480929
11276--- /dev/null
11277+++ b/buildtools/wafadmin/Tools/kde4.py
11278@@ -0,0 +1,74 @@
11279+#!/usr/bin/env python
11280+# encoding: utf-8
11281+# Thomas Nagy, 2006 (ita)
11282+
11283+import os, sys, re
11284+import Options, TaskGen, Task, Utils
11285+from TaskGen import taskgen, feature, after
11286+
11287+class msgfmt_taskgen(TaskGen.task_gen):
11288+ def __init__(self, *k, **kw):
11289+ TaskGen.task_gen.__init__(self, *k, **kw)
11290+
11291+@feature('msgfmt')
11292+def init_msgfmt(self):
11293+ #langs = '' # for example "foo/fr foo/br"
11294+ self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}'
11295+
11296+@feature('msgfmt')
11297+@after('init_msgfmt')
11298+def apply_msgfmt(self):
11299+ for lang in self.to_list(self.langs):
11300+ node = self.path.find_resource(lang+'.po')
11301+ task = self.create_task('msgfmt', node, node.change_ext('.mo'))
11302+
11303+ if not self.bld.is_install: continue
11304+ langname = lang.split('/')
11305+ langname = langname[-1]
11306+ task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES'
11307+ task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo'
11308+ task.chmod = self.chmod
11309+
11310+def detect(conf):
11311+ kdeconfig = conf.find_program('kde4-config')
11312+ if not kdeconfig:
11313+ conf.fatal('we need kde4-config')
11314+ prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip()
11315+ file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
11316+ try: os.stat(file)
11317+ except OSError:
11318+ file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
11319+ try: os.stat(file)
11320+ except OSError: conf.fatal('could not open %s' % file)
11321+
11322+ try:
11323+ txt = Utils.readf(file)
11324+ except (OSError, IOError):
11325+ conf.fatal('could not read %s' % file)
11326+
11327+ txt = txt.replace('\\\n', '\n')
11328+ fu = re.compile('#(.*)\n')
11329+ txt = fu.sub('', txt)
11330+
11331+ setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
11332+ found = setregexp.findall(txt)
11333+
11334+ for (_, key, val) in found:
11335+ #print key, val
11336+ conf.env[key] = val
11337+
11338+ # well well, i could just write an interpreter for cmake files
11339+ conf.env['LIB_KDECORE']='kdecore'
11340+ conf.env['LIB_KDEUI'] ='kdeui'
11341+ conf.env['LIB_KIO'] ='kio'
11342+ conf.env['LIB_KHTML'] ='khtml'
11343+ conf.env['LIB_KPARTS'] ='kparts'
11344+
11345+ conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR']
11346+ conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR']
11347+ conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
11348+
11349+ conf.env['MSGFMT'] = conf.find_program('msgfmt')
11350+
11351+Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False)
11352+
11353diff --git a/buildtools/wafadmin/Tools/libtool.py b/buildtools/wafadmin/Tools/libtool.py
11354new file mode 100644
11355index 0000000..47fa906
11356--- /dev/null
11357+++ b/buildtools/wafadmin/Tools/libtool.py
11358@@ -0,0 +1,330 @@
11359+#!/usr/bin/env python
11360+# encoding: utf-8
11361+# Matthias Jahn, 2008, jahn matthias ath freenet punto de
11362+# Thomas Nagy, 2008 (ita)
11363+
11364+import sys, re, os, optparse
11365+
11366+import TaskGen, Task, Utils, preproc
11367+from Logs import error, debug, warn
11368+from TaskGen import taskgen, after, before, feature
11369+
11370+REVISION="0.1.3"
11371+
11372+"""
11373+if you want to use the code here, you must use something like this:
11374+obj = obj.create(...)
11375+obj.features.append("libtool")
11376+obj.vnum = "1.2.3" # optional, but versioned libraries are common
11377+"""
11378+
11379+# fake libtool files
11380+fakelibtool_vardeps = ['CXX', 'PREFIX']
11381+def fakelibtool_build(task):
11382+ # Writes a .la file, used by libtool
11383+ env = task.env
11384+ dest = open(task.outputs[0].abspath(env), 'w')
11385+ sname = task.inputs[0].name
11386+ fu = dest.write
11387+ fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
11388+ if env['vnum']:
11389+ nums = env['vnum'].split('.')
11390+ libname = task.inputs[0].name
11391+ name3 = libname+'.'+env['vnum']
11392+ name2 = libname+'.'+nums[0]
11393+ name1 = libname
11394+ fu("dlname='%s'\n" % name2)
11395+ strn = " ".join([name3, name2, name1])
11396+ fu("library_names='%s'\n" % (strn) )
11397+ else:
11398+ fu("dlname='%s'\n" % sname)
11399+ fu("library_names='%s %s %s'\n" % (sname, sname, sname) )
11400+ fu("old_library=''\n")
11401+ vars = ' '.join(env['libtoolvars']+env['LINKFLAGS'])
11402+ fu("dependency_libs='%s'\n" % vars)
11403+ fu("current=0\n")
11404+ fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
11405+ fu("dlopen=''\ndlpreopen=''\n")
11406+ fu("libdir='%s/lib'\n" % env['PREFIX'])
11407+ dest.close()
11408+ return 0
11409+
11410+def read_la_file(path):
11411+ sp = re.compile(r'^([^=]+)=\'(.*)\'$')
11412+ dc={}
11413+ file = open(path, "r")
11414+ for line in file.readlines():
11415+ try:
11416+ #print sp.split(line.strip())
11417+ _, left, right, _ = sp.split(line.strip())
11418+ dc[left]=right
11419+ except ValueError:
11420+ pass
11421+ file.close()
11422+ return dc
11423+
11424+@feature("libtool")
11425+@after('apply_link')
11426+def apply_link_libtool(self):
11427+ if self.type != 'program':
11428+ linktask = self.link_task
11429+ self.latask = self.create_task('fakelibtool', linktask.outputs, linktask.outputs[0].change_ext('.la'))
11430+
11431+ if self.bld.is_install:
11432+ self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env)
11433+
11434+@feature("libtool")
11435+@before('apply_core')
11436+def apply_libtool(self):
11437+ self.env['vnum']=self.vnum
11438+
11439+ paths=[]
11440+ libs=[]
11441+ libtool_files=[]
11442+ libtool_vars=[]
11443+
11444+ for l in self.env['LINKFLAGS']:
11445+ if l[:2]=='-L':
11446+ paths.append(l[2:])
11447+ elif l[:2]=='-l':
11448+ libs.append(l[2:])
11449+
11450+ for l in libs:
11451+ for p in paths:
11452+ dict = read_la_file(p+'/lib'+l+'.la')
11453+ linkflags2 = dict.get('dependency_libs', '')
11454+ for v in linkflags2.split():
11455+ if v.endswith('.la'):
11456+ libtool_files.append(v)
11457+ libtool_vars.append(v)
11458+ continue
11459+ self.env.append_unique('LINKFLAGS', v)
11460+ break
11461+
11462+ self.env['libtoolvars']=libtool_vars
11463+
11464+ while libtool_files:
11465+ file = libtool_files.pop()
11466+ dict = read_la_file(file)
11467+ for v in dict['dependency_libs'].split():
11468+ if v[-3:] == '.la':
11469+ libtool_files.append(v)
11470+ continue
11471+ self.env.append_unique('LINKFLAGS', v)
11472+
11473+Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link")
11474+
11475+class libtool_la_file:
11476+ def __init__ (self, la_filename):
11477+ self.__la_filename = la_filename
11478+ #remove path and .la suffix
11479+ self.linkname = str(os.path.split(la_filename)[-1])[:-3]
11480+ if self.linkname.startswith("lib"):
11481+ self.linkname = self.linkname[3:]
11482+ # The name that we can dlopen(3).
11483+ self.dlname = None
11484+ # Names of this library
11485+ self.library_names = None
11486+ # The name of the static archive.
11487+ self.old_library = None
11488+ # Libraries that this one depends upon.
11489+ self.dependency_libs = None
11490+ # Version information for libIlmImf.
11491+ self.current = None
11492+ self.age = None
11493+ self.revision = None
11494+ # Is this an already installed library?
11495+ self.installed = None
11496+ # Should we warn about portability when linking against -modules?
11497+ self.shouldnotlink = None
11498+ # Files to dlopen/dlpreopen
11499+ self.dlopen = None
11500+ self.dlpreopen = None
11501+ # Directory that this library needs to be installed in:
11502+ self.libdir = '/usr/lib'
11503+ if not self.__parse():
11504+ raise ValueError("file %s not found!!" %(la_filename))
11505+
11506+ def __parse(self):
11507+ "Retrieve the variables from a file"
11508+ if not os.path.isfile(self.__la_filename): return 0
11509+ la_file=open(self.__la_filename, 'r')
11510+ for line in la_file:
11511+ ln = line.strip()
11512+ if not ln: continue
11513+ if ln[0]=='#': continue
11514+ (key, value) = str(ln).split('=', 1)
11515+ key = key.strip()
11516+ value = value.strip()
11517+ if value == "no": value = False
11518+ elif value == "yes": value = True
11519+ else:
11520+ try: value = int(value)
11521+ except ValueError: value = value.strip("'")
11522+ setattr(self, key, value)
11523+ la_file.close()
11524+ return 1
11525+
11526+ def get_libs(self):
11527+ """return linkflags for this lib"""
11528+ libs = []
11529+ if self.dependency_libs:
11530+ libs = str(self.dependency_libs).strip().split()
11531+ if libs == None:
11532+ libs = []
11533+ # add la lib and libdir
11534+ libs.insert(0, "-l%s" % self.linkname.strip())
11535+ libs.insert(0, "-L%s" % self.libdir.strip())
11536+ return libs
11537+
11538+ def __str__(self):
11539+ return '''\
11540+dlname = "%(dlname)s"
11541+library_names = "%(library_names)s"
11542+old_library = "%(old_library)s"
11543+dependency_libs = "%(dependency_libs)s"
11544+version = %(current)s.%(age)s.%(revision)s
11545+installed = "%(installed)s"
11546+shouldnotlink = "%(shouldnotlink)s"
11547+dlopen = "%(dlopen)s"
11548+dlpreopen = "%(dlpreopen)s"
11549+libdir = "%(libdir)s"''' % self.__dict__
11550+
11551+class libtool_config:
11552+ def __init__ (self, la_filename):
11553+ self.__libtool_la_file = libtool_la_file(la_filename)
11554+ tmp = self.__libtool_la_file
11555+ self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)]
11556+ self.__sub_la_files = []
11557+ self.__sub_la_files.append(la_filename)
11558+ self.__libs = None
11559+
11560+ def __cmp__(self, other):
11561+ """make it compareable with X.Y.Z versions (Y and Z are optional)"""
11562+ if not other:
11563+ return 1
11564+ othervers = [int(s) for s in str(other).split(".")]
11565+ selfvers = self.__version
11566+ return cmp(selfvers, othervers)
11567+
11568+ def __str__(self):
11569+ return "\n".join([
11570+ str(self.__libtool_la_file),
11571+ ' '.join(self.__libtool_la_file.get_libs()),
11572+ '* New getlibs:',
11573+ ' '.join(self.get_libs())
11574+ ])
11575+
11576+ def __get_la_libs(self, la_filename):
11577+ return libtool_la_file(la_filename).get_libs()
11578+
11579+ def get_libs(self):
11580+ """return the complete uniqe linkflags that do not
11581+ contain .la files anymore"""
11582+ libs_list = list(self.__libtool_la_file.get_libs())
11583+ libs_map = {}
11584+ while len(libs_list) > 0:
11585+ entry = libs_list.pop(0)
11586+ if entry:
11587+ if str(entry).endswith(".la"):
11588+ ## prevents duplicate .la checks
11589+ if entry not in self.__sub_la_files:
11590+ self.__sub_la_files.append(entry)
11591+ libs_list.extend(self.__get_la_libs(entry))
11592+ else:
11593+ libs_map[entry]=1
11594+ self.__libs = libs_map.keys()
11595+ return self.__libs
11596+
11597+ def get_libs_only_L(self):
11598+ if not self.__libs: self.get_libs()
11599+ libs = self.__libs
11600+ libs = [s for s in libs if str(s).startswith('-L')]
11601+ return libs
11602+
11603+ def get_libs_only_l(self):
11604+ if not self.__libs: self.get_libs()
11605+ libs = self.__libs
11606+ libs = [s for s in libs if str(s).startswith('-l')]
11607+ return libs
11608+
11609+ def get_libs_only_other(self):
11610+ if not self.__libs: self.get_libs()
11611+ libs = self.__libs
11612+ libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
11613+ return libs
11614+
11615+def useCmdLine():
11616+ """parse cmdline args and control build"""
11617+ usage = '''Usage: %prog [options] PathToFile.la
11618+example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
11619+nor: %prog --libs /usr/lib/libamarok.la'''
11620+ parser = optparse.OptionParser(usage)
11621+ a = parser.add_option
11622+ a("--version", dest = "versionNumber",
11623+ action = "store_true", default = False,
11624+ help = "output version of libtool-config"
11625+ )
11626+ a("--debug", dest = "debug",
11627+ action = "store_true", default = False,
11628+ help = "enable debug"
11629+ )
11630+ a("--libs", dest = "libs",
11631+ action = "store_true", default = False,
11632+ help = "output all linker flags"
11633+ )
11634+ a("--libs-only-l", dest = "libs_only_l",
11635+ action = "store_true", default = False,
11636+ help = "output -l flags"
11637+ )
11638+ a("--libs-only-L", dest = "libs_only_L",
11639+ action = "store_true", default = False,
11640+ help = "output -L flags"
11641+ )
11642+ a("--libs-only-other", dest = "libs_only_other",
11643+ action = "store_true", default = False,
11644+ help = "output other libs (e.g. -pthread)"
11645+ )
11646+ a("--atleast-version", dest = "atleast_version",
11647+ default=None,
11648+ help = "return 0 if the module is at least version ATLEAST_VERSION"
11649+ )
11650+ a("--exact-version", dest = "exact_version",
11651+ default=None,
11652+ help = "return 0 if the module is exactly version EXACT_VERSION"
11653+ )
11654+ a("--max-version", dest = "max_version",
11655+ default=None,
11656+ help = "return 0 if the module is at no newer than version MAX_VERSION"
11657+ )
11658+
11659+ (options, args) = parser.parse_args()
11660+ if len(args) != 1 and not options.versionNumber:
11661+ parser.error("incorrect number of arguments")
11662+ if options.versionNumber:
11663+ print("libtool-config version %s" % REVISION)
11664+ return 0
11665+ ltf = libtool_config(args[0])
11666+ if options.debug:
11667+ print(ltf)
11668+ if options.atleast_version:
11669+ if ltf >= options.atleast_version: return 0
11670+ sys.exit(1)
11671+ if options.exact_version:
11672+ if ltf == options.exact_version: return 0
11673+ sys.exit(1)
11674+ if options.max_version:
11675+ if ltf <= options.max_version: return 0
11676+ sys.exit(1)
11677+
11678+ def p(x):
11679+ print(" ".join(x))
11680+ if options.libs: p(ltf.get_libs())
11681+ elif options.libs_only_l: p(ltf.get_libs_only_l())
11682+ elif options.libs_only_L: p(ltf.get_libs_only_L())
11683+ elif options.libs_only_other: p(ltf.get_libs_only_other())
11684+ return 0
11685+
11686+if __name__ == '__main__':
11687+ useCmdLine()
11688+
11689diff --git a/buildtools/wafadmin/Tools/lua.py b/buildtools/wafadmin/Tools/lua.py
11690new file mode 100644
11691index 0000000..5b181e1
11692--- /dev/null
11693+++ b/buildtools/wafadmin/Tools/lua.py
11694@@ -0,0 +1,25 @@
11695+#!/usr/bin/env python
11696+# encoding: utf-8
11697+# Sebastian Schlingmann, 2008
11698+# Thomas Nagy, 2008 (ita)
11699+
11700+import TaskGen
11701+from TaskGen import taskgen, feature
11702+from Constants import *
11703+
11704+TaskGen.declare_chain(
11705+ name = 'luac',
11706+ rule = '${LUAC} -s -o ${TGT} ${SRC}',
11707+ ext_in = '.lua',
11708+ ext_out = '.luac',
11709+ reentrant = False,
11710+ install = 'LUADIR', # env variable
11711+)
11712+
11713+@feature('lua')
11714+def init_lua(self):
11715+ self.default_chmod = O755
11716+
11717+def detect(conf):
11718+ conf.find_program('luac', var='LUAC', mandatory = True)
11719+
11720diff --git a/buildtools/wafadmin/Tools/misc.py b/buildtools/wafadmin/Tools/misc.py
11721new file mode 100644
11722index 0000000..9903ee4
11723--- /dev/null
11724+++ b/buildtools/wafadmin/Tools/misc.py
11725@@ -0,0 +1,430 @@
11726+#!/usr/bin/env python
11727+# encoding: utf-8
11728+# Thomas Nagy, 2006 (ita)
11729+
11730+"""
11731+Custom objects:
11732+ - execute a function everytime
11733+ - copy a file somewhere else
11734+"""
11735+
11736+import shutil, re, os
11737+import TaskGen, Node, Task, Utils, Build, Constants
11738+from TaskGen import feature, taskgen, after, before
11739+from Logs import debug
11740+
11741+def copy_func(tsk):
11742+ "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
11743+ env = tsk.env
11744+ infile = tsk.inputs[0].abspath(env)
11745+ outfile = tsk.outputs[0].abspath(env)
11746+ try:
11747+ shutil.copy2(infile, outfile)
11748+ except (OSError, IOError):
11749+ return 1
11750+ else:
11751+ if tsk.chmod: os.chmod(outfile, tsk.chmod)
11752+ return 0
11753+
11754+def action_process_file_func(tsk):
11755+ "Ask the function attached to the task to process it"
11756+ if not tsk.fun: raise Utils.WafError('task must have a function attached to it for copy_func to work!')
11757+ return tsk.fun(tsk)
11758+
11759+class cmd_taskgen(TaskGen.task_gen):
11760+ def __init__(self, *k, **kw):
11761+ TaskGen.task_gen.__init__(self, *k, **kw)
11762+
11763+@feature('cmd')
11764+def apply_cmd(self):
11765+ "call a command everytime"
11766+ if not self.fun: raise Utils.WafError('cmdobj needs a function!')
11767+ tsk = Task.TaskBase()
11768+ tsk.fun = self.fun
11769+ tsk.env = self.env
11770+ self.tasks.append(tsk)
11771+ tsk.install_path = self.install_path
11772+
11773+class copy_taskgen(TaskGen.task_gen):
11774+ "By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)"
11775+ def __init__(self, *k, **kw):
11776+ TaskGen.task_gen.__init__(self, *k, **kw)
11777+
11778+@feature('copy')
11779+@before('apply_core')
11780+def apply_copy(self):
11781+ Utils.def_attrs(self, fun=copy_func)
11782+ self.default_install_path = 0
11783+
11784+ lst = self.to_list(self.source)
11785+ self.meths.remove('apply_core')
11786+
11787+ for filename in lst:
11788+ node = self.path.find_resource(filename)
11789+ if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
11790+
11791+ target = self.target
11792+ if not target or len(lst)>1: target = node.name
11793+
11794+ # TODO the file path may be incorrect
11795+ newnode = self.path.find_or_declare(target)
11796+
11797+ tsk = self.create_task('copy', node, newnode)
11798+ tsk.fun = self.fun
11799+ tsk.chmod = self.chmod
11800+ tsk.install_path = self.install_path
11801+
11802+ if not tsk.env:
11803+ tsk.debug()
11804+ raise Utils.WafError('task without an environment')
11805+
11806+def subst_func(tsk):
11807+ "Substitutes variables in a .in file"
11808+
11809+ m4_re = re.compile('@(\w+)@', re.M)
11810+
11811+ env = tsk.env
11812+ infile = tsk.inputs[0].abspath(env)
11813+ outfile = tsk.outputs[0].abspath(env)
11814+
11815+ code = Utils.readf(infile)
11816+
11817+ # replace all % by %% to prevent errors by % signs in the input file while string formatting
11818+ code = code.replace('%', '%%')
11819+
11820+ s = m4_re.sub(r'%(\1)s', code)
11821+
11822+ di = tsk.dict or {}
11823+ if not di:
11824+ names = m4_re.findall(code)
11825+ for i in names:
11826+ di[i] = env.get_flat(i) or env.get_flat(i.upper())
11827+
11828+ file = open(outfile, 'w')
11829+ file.write(s % di)
11830+ file.close()
11831+ if tsk.chmod: os.chmod(outfile, tsk.chmod)
11832+
11833+class subst_taskgen(TaskGen.task_gen):
11834+ def __init__(self, *k, **kw):
11835+ TaskGen.task_gen.__init__(self, *k, **kw)
11836+
11837+@feature('subst')
11838+@before('apply_core')
11839+def apply_subst(self):
11840+ Utils.def_attrs(self, fun=subst_func)
11841+ self.default_install_path = 0
11842+ lst = self.to_list(self.source)
11843+ self.meths.remove('apply_core')
11844+
11845+ self.dict = getattr(self, 'dict', {})
11846+
11847+ for filename in lst:
11848+ node = self.path.find_resource(filename)
11849+ if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
11850+
11851+ if self.target:
11852+ newnode = self.path.find_or_declare(self.target)
11853+ else:
11854+ newnode = node.change_ext('')
11855+
11856+ try:
11857+ self.dict = self.dict.get_merged_dict()
11858+ except AttributeError:
11859+ pass
11860+
11861+ if self.dict and not self.env['DICT_HASH']:
11862+ self.env = self.env.copy()
11863+ keys = list(self.dict.keys())
11864+ keys.sort()
11865+ lst = [self.dict[x] for x in keys]
11866+ self.env['DICT_HASH'] = str(Utils.h_list(lst))
11867+
11868+ tsk = self.create_task('copy', node, newnode)
11869+ tsk.fun = self.fun
11870+ tsk.dict = self.dict
11871+ tsk.dep_vars = ['DICT_HASH']
11872+ tsk.install_path = self.install_path
11873+ tsk.chmod = self.chmod
11874+
11875+ if not tsk.env:
11876+ tsk.debug()
11877+ raise Utils.WafError('task without an environment')
11878+
11879+####################
11880+## command-output ####
11881+####################
11882+
11883+class cmd_arg(object):
11884+ """command-output arguments for representing files or folders"""
11885+ def __init__(self, name, template='%s'):
11886+ self.name = name
11887+ self.template = template
11888+ self.node = None
11889+
11890+class input_file(cmd_arg):
11891+ def find_node(self, base_path):
11892+ assert isinstance(base_path, Node.Node)
11893+ self.node = base_path.find_resource(self.name)
11894+ if self.node is None:
11895+ raise Utils.WafError("Input file %s not found in " % (self.name, base_path))
11896+
11897+ def get_path(self, env, absolute):
11898+ if absolute:
11899+ return self.template % self.node.abspath(env)
11900+ else:
11901+ return self.template % self.node.srcpath(env)
11902+
11903+class output_file(cmd_arg):
11904+ def find_node(self, base_path):
11905+ assert isinstance(base_path, Node.Node)
11906+ self.node = base_path.find_or_declare(self.name)
11907+ if self.node is None:
11908+ raise Utils.WafError("Output file %s not found in " % (self.name, base_path))
11909+
11910+ def get_path(self, env, absolute):
11911+ if absolute:
11912+ return self.template % self.node.abspath(env)
11913+ else:
11914+ return self.template % self.node.bldpath(env)
11915+
11916+class cmd_dir_arg(cmd_arg):
11917+ def find_node(self, base_path):
11918+ assert isinstance(base_path, Node.Node)
11919+ self.node = base_path.find_dir(self.name)
11920+ if self.node is None:
11921+ raise Utils.WafError("Directory %s not found in " % (self.name, base_path))
11922+
11923+class input_dir(cmd_dir_arg):
11924+ def get_path(self, dummy_env, dummy_absolute):
11925+ return self.template % self.node.abspath()
11926+
11927+class output_dir(cmd_dir_arg):
11928+ def get_path(self, env, dummy_absolute):
11929+ return self.template % self.node.abspath(env)
11930+
11931+
11932+class command_output(Task.Task):
11933+ color = "BLUE"
11934+ def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
11935+ Task.Task.__init__(self, env, normal=1)
11936+ assert isinstance(command, (str, Node.Node))
11937+ self.command = command
11938+ self.command_args = command_args
11939+ self.stdin = stdin
11940+ self.stdout = stdout
11941+ self.cwd = cwd
11942+ self.os_env = os_env
11943+ self.stderr = stderr
11944+
11945+ if command_node is not None: self.dep_nodes = [command_node]
11946+ self.dep_vars = [] # additional environment variables to look
11947+
11948+ def run(self):
11949+ task = self
11950+ #assert len(task.inputs) > 0
11951+
11952+ def input_path(node, template):
11953+ if task.cwd is None:
11954+ return template % node.bldpath(task.env)
11955+ else:
11956+ return template % node.abspath()
11957+ def output_path(node, template):
11958+ fun = node.abspath
11959+ if task.cwd is None: fun = node.bldpath
11960+ return template % fun(task.env)
11961+
11962+ if isinstance(task.command, Node.Node):
11963+ argv = [input_path(task.command, '%s')]
11964+ else:
11965+ argv = [task.command]
11966+
11967+ for arg in task.command_args:
11968+ if isinstance(arg, str):
11969+ argv.append(arg)
11970+ else:
11971+ assert isinstance(arg, cmd_arg)
11972+ argv.append(arg.get_path(task.env, (task.cwd is not None)))
11973+
11974+ if task.stdin:
11975+ stdin = open(input_path(task.stdin, '%s'))
11976+ else:
11977+ stdin = None
11978+
11979+ if task.stdout:
11980+ stdout = open(output_path(task.stdout, '%s'), "w")
11981+ else:
11982+ stdout = None
11983+
11984+ if task.stderr:
11985+ stderr = open(output_path(task.stderr, '%s'), "w")
11986+ else:
11987+ stderr = None
11988+
11989+ if task.cwd is None:
11990+ cwd = ('None (actually %r)' % os.getcwd())
11991+ else:
11992+ cwd = repr(task.cwd)
11993+ debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
11994+ (cwd, stdin, stdout, argv))
11995+
11996+ if task.os_env is None:
11997+ os_env = os.environ
11998+ else:
11999+ os_env = task.os_env
12000+ command = Utils.pproc.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
12001+ return command.wait()
12002+
12003+class cmd_output_taskgen(TaskGen.task_gen):
12004+ def __init__(self, *k, **kw):
12005+ TaskGen.task_gen.__init__(self, *k, **kw)
12006+
12007+@feature('command-output')
12008+def init_cmd_output(self):
12009+ Utils.def_attrs(self,
12010+ stdin = None,
12011+ stdout = None,
12012+ stderr = None,
12013+ # the command to execute
12014+ command = None,
12015+
12016+ # whether it is an external command; otherwise it is assumed
12017+ # to be an executable binary or script that lives in the
12018+ # source or build tree.
12019+ command_is_external = False,
12020+
12021+ # extra parameters (argv) to pass to the command (excluding
12022+ # the command itself)
12023+ argv = [],
12024+
12025+ # dependencies to other objects -> this is probably not what you want (ita)
12026+ # values must be 'task_gen' instances (not names!)
12027+ dependencies = [],
12028+
12029+ # dependencies on env variable contents
12030+ dep_vars = [],
12031+
12032+ # input files that are implicit, i.e. they are not
12033+ # stdin, nor are they mentioned explicitly in argv
12034+ hidden_inputs = [],
12035+
12036+ # output files that are implicit, i.e. they are not
12037+ # stdout, nor are they mentioned explicitly in argv
12038+ hidden_outputs = [],
12039+
12040+ # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
12041+ cwd = None,
12042+
12043+ # OS environment variables to pass to the subprocess
12044+ # if None, use the default environment variables unchanged
12045+ os_env = None)
12046+
12047+@feature('command-output')
12048+@after('init_cmd_output')
12049+def apply_cmd_output(self):
12050+ if self.command is None:
12051+ raise Utils.WafError("command-output missing command")
12052+ if self.command_is_external:
12053+ cmd = self.command
12054+ cmd_node = None
12055+ else:
12056+ cmd_node = self.path.find_resource(self.command)
12057+ assert cmd_node is not None, ('''Could not find command '%s' in source tree.
12058+Hint: if this is an external command,
12059+use command_is_external=True''') % (self.command,)
12060+ cmd = cmd_node
12061+
12062+ if self.cwd is None:
12063+ cwd = None
12064+ else:
12065+ assert isinstance(cwd, CmdDirArg)
12066+ self.cwd.find_node(self.path)
12067+
12068+ args = []
12069+ inputs = []
12070+ outputs = []
12071+
12072+ for arg in self.argv:
12073+ if isinstance(arg, cmd_arg):
12074+ arg.find_node(self.path)
12075+ if isinstance(arg, input_file):
12076+ inputs.append(arg.node)
12077+ if isinstance(arg, output_file):
12078+ outputs.append(arg.node)
12079+
12080+ if self.stdout is None:
12081+ stdout = None
12082+ else:
12083+ assert isinstance(self.stdout, str)
12084+ stdout = self.path.find_or_declare(self.stdout)
12085+ if stdout is None:
12086+ raise Utils.WafError("File %s not found" % (self.stdout,))
12087+ outputs.append(stdout)
12088+
12089+ if self.stderr is None:
12090+ stderr = None
12091+ else:
12092+ assert isinstance(self.stderr, str)
12093+ stderr = self.path.find_or_declare(self.stderr)
12094+ if stderr is None:
12095+ raise Utils.WafError("File %s not found" % (self.stderr,))
12096+ outputs.append(stderr)
12097+
12098+ if self.stdin is None:
12099+ stdin = None
12100+ else:
12101+ assert isinstance(self.stdin, str)
12102+ stdin = self.path.find_resource(self.stdin)
12103+ if stdin is None:
12104+ raise Utils.WafError("File %s not found" % (self.stdin,))
12105+ inputs.append(stdin)
12106+
12107+ for hidden_input in self.to_list(self.hidden_inputs):
12108+ node = self.path.find_resource(hidden_input)
12109+ if node is None:
12110+ raise Utils.WafError("File %s not found in dir %s" % (hidden_input, self.path))
12111+ inputs.append(node)
12112+
12113+ for hidden_output in self.to_list(self.hidden_outputs):
12114+ node = self.path.find_or_declare(hidden_output)
12115+ if node is None:
12116+ raise Utils.WafError("File %s not found in dir %s" % (hidden_output, self.path))
12117+ outputs.append(node)
12118+
12119+ if not (inputs or getattr(self, 'no_inputs', None)):
12120+ raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
12121+ if not (outputs or getattr(self, 'no_outputs', None)):
12122+ raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
12123+
12124+ task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
12125+ Utils.copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
12126+ self.tasks.append(task)
12127+
12128+ task.inputs = inputs
12129+ task.outputs = outputs
12130+ task.dep_vars = self.to_list(self.dep_vars)
12131+
12132+ for dep in self.dependencies:
12133+ assert dep is not self
12134+ dep.post()
12135+ for dep_task in dep.tasks:
12136+ task.set_run_after(dep_task)
12137+
12138+ if not task.inputs:
12139+ # the case for svnversion, always run, and update the output nodes
12140+ task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
12141+ task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
12142+
12143+ # TODO the case with no outputs?
12144+
12145+def post_run(self):
12146+ for x in self.outputs:
12147+ h = Utils.h_file(x.abspath(self.env))
12148+ self.generator.bld.node_sigs[self.env.variant()][x.id] = h
12149+
12150+def runnable_status(self):
12151+ return Constants.RUN_ME
12152+
12153+Task.task_type_from_func('copy', vars=[], func=action_process_file_func)
12154+TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen
12155+
12156diff --git a/buildtools/wafadmin/Tools/msvc.py b/buildtools/wafadmin/Tools/msvc.py
12157new file mode 100644
12158index 0000000..4fde8b1
12159--- /dev/null
12160+++ b/buildtools/wafadmin/Tools/msvc.py
12161@@ -0,0 +1,797 @@
12162+#!/usr/bin/env python
12163+# encoding: utf-8
12164+# Carlos Rafael Giani, 2006 (dv)
12165+# Tamas Pal, 2007 (folti)
12166+# Nicolas Mercier, 2009
12167+# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing
12168+
12169+# usage:
12170+#
12171+# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
12172+# conf.env['MSVC_TARGETS'] = ['x64']
12173+# conf.check_tool('msvc')
12174+# OR conf.check_tool('msvc', funs='no_autodetect')
12175+# conf.check_lib_msvc('gdi32')
12176+# conf.check_libs_msvc('kernel32 user32', mandatory=true)
12177+# ...
12178+# obj.uselib = 'KERNEL32 USER32 GDI32'
12179+#
12180+# platforms and targets will be tested in the order they appear;
12181+# the first good configuration will be used
12182+# supported platforms :
12183+# ia64, x64, x86, x86_amd64, x86_ia64
12184+
12185+# compilers supported :
12186+# msvc => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008)
12187+# wsdk => Windows SDK, versions 6.0, 6.1, 7.0
12188+# icl => Intel compiler, versions 9,10,11
12189+# Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
12190+# PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i)
12191+
12192+
12193+import os, sys, re, string, optparse
12194+import Utils, TaskGen, Runner, Configure, Task, Options
12195+from Logs import debug, info, warn, error
12196+from TaskGen import after, before, feature
12197+
12198+from Configure import conftest, conf
12199+import ccroot, cc, cxx, ar, winres
12200+from libtool import read_la_file
12201+
12202+try:
12203+ import _winreg
12204+except:
12205+ import winreg as _winreg
12206+
12207+pproc = Utils.pproc
12208+
12209+# importlibs provided by MSVC/Platform SDK. Do NOT search them....
12210+g_msvc_systemlibs = """
12211+aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
12212+cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
12213+credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
12214+ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
12215+faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
12216+gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
12217+kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
12218+mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
12219+msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
12220+netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
12221+odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
12222+osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
12223+ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
12224+rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
12225+shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
12226+traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
12227+version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
12228+wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
12229+""".split()
12230+
12231+
12232+all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ]
12233+all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
12234+all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
12235+
12236+def setup_msvc(conf, versions):
12237+ platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
12238+ desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
12239+ versiondict = dict(versions)
12240+
12241+ for version in desired_versions:
12242+ try:
12243+ targets = dict(versiondict [version])
12244+ for target in platforms:
12245+ try:
12246+ arch,(p1,p2,p3) = targets[target]
12247+ compiler,revision = version.split()
12248+ return compiler,revision,p1,p2,p3
12249+ except KeyError: continue
12250+ except KeyError: continue
12251+ conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
12252+
12253+@conf
12254+def get_msvc_version(conf, compiler, version, target, vcvars):
12255+ debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
12256+ batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
12257+ f = open(batfile, 'w')
12258+ f.write("""@echo off
12259+set INCLUDE=
12260+set LIB=
12261+call "%s" %s
12262+echo PATH=%%PATH%%
12263+echo INCLUDE=%%INCLUDE%%
12264+echo LIB=%%LIB%%
12265+""" % (vcvars,target))
12266+ f.close()
12267+ sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
12268+ lines = sout.splitlines()
12269+
12270+ for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'):
12271+ if lines[0].find(x) != -1:
12272+ break
12273+ else:
12274+ debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
12275+ conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
12276+
12277+ for line in lines[1:]:
12278+ if line.startswith('PATH='):
12279+ path = line[5:]
12280+ MSVC_PATH = path.split(';')
12281+ elif line.startswith('INCLUDE='):
12282+ MSVC_INCDIR = [i for i in line[8:].split(';') if i]
12283+ elif line.startswith('LIB='):
12284+ MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
12285+
12286+ # Check if the compiler is usable at all.
12287+ # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
12288+ env = {}
12289+ env.update(os.environ)
12290+ env.update(PATH = path)
12291+ compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
12292+ cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
12293+ # delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
12294+ if env.has_key('CL'):
12295+ del(env['CL'])
12296+
12297+ try:
12298+ p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE)
12299+ out, err = p.communicate()
12300+ if p.returncode != 0:
12301+ raise Exception('return code: %r: %r' % (p.returncode, err))
12302+ except Exception, e:
12303+ debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
12304+ debug(str(e))
12305+ conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
12306+ else:
12307+ debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
12308+
12309+ return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
12310+
12311+@conf
12312+def gather_wsdk_versions(conf, versions):
12313+ version_pattern = re.compile('^v..?.?\...?.?')
12314+ try:
12315+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
12316+ except WindowsError:
12317+ try:
12318+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
12319+ except WindowsError:
12320+ return
12321+ index = 0
12322+ while 1:
12323+ try:
12324+ version = _winreg.EnumKey(all_versions, index)
12325+ except WindowsError:
12326+ break
12327+ index = index + 1
12328+ if not version_pattern.match(version):
12329+ continue
12330+ try:
12331+ msvc_version = _winreg.OpenKey(all_versions, version)
12332+ path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder')
12333+ except WindowsError:
12334+ continue
12335+ if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
12336+ targets = []
12337+ for target,arch in all_msvc_platforms:
12338+ try:
12339+ targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
12340+ except Configure.ConfigurationError:
12341+ pass
12342+ versions.append(('wsdk ' + version[1:], targets))
12343+
12344+@conf
12345+def gather_msvc_versions(conf, versions):
12346+ # checks SmartPhones SDKs
12347+ try:
12348+ ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
12349+ except WindowsError:
12350+ try:
12351+ ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
12352+ except WindowsError:
12353+ ce_sdk = ''
12354+ if ce_sdk:
12355+ supported_wince_platforms = []
12356+ ce_index = 0
12357+ while 1:
12358+ try:
12359+ sdk_device = _winreg.EnumKey(ce_sdk, ce_index)
12360+ except WindowsError:
12361+ break
12362+ ce_index = ce_index + 1
12363+ sdk = _winreg.OpenKey(ce_sdk, sdk_device)
12364+ path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir')
12365+ path=str(path)
12366+ path,device = os.path.split(path)
12367+ if not device:
12368+ path,device = os.path.split(path)
12369+ for arch,compiler in all_wince_platforms:
12370+ platforms = []
12371+ if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
12372+ platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
12373+ if platforms:
12374+ supported_wince_platforms.append((device, platforms))
12375+ # checks MSVC
12376+ version_pattern = re.compile('^..?\...?')
12377+ for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]:
12378+ try:
12379+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
12380+ except WindowsError:
12381+ try:
12382+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver)
12383+ except WindowsError:
12384+ continue
12385+ index = 0
12386+ while 1:
12387+ try:
12388+ version = _winreg.EnumKey(all_versions, index)
12389+ except WindowsError:
12390+ break
12391+ index = index + 1
12392+ if not version_pattern.match(version):
12393+ continue
12394+ try:
12395+ msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS")
12396+ path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir')
12397+ path=str(path)
12398+ targets = []
12399+ if ce_sdk:
12400+ for device,platforms in supported_wince_platforms:
12401+ cetargets = []
12402+ for platform,compiler,include,lib in platforms:
12403+ winCEpath = os.path.join(path, 'VC', 'ce')
12404+ if os.path.isdir(winCEpath):
12405+ common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))
12406+ if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
12407+ bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs
12408+ incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')]
12409+ libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)]
12410+ cetargets.append((platform, (platform, (bindirs,incdirs,libdirs))))
12411+ versions.append((device+' '+version, cetargets))
12412+ if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')):
12413+ for target,realtarget in all_msvc_platforms[::-1]:
12414+ try:
12415+ targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat')))))
12416+ except:
12417+ pass
12418+ elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')):
12419+ try:
12420+ targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')))))
12421+ except Configure.ConfigurationError:
12422+ pass
12423+ versions.append(('msvc '+version, targets))
12424+
12425+ except WindowsError:
12426+ continue
12427+
12428+@conf
12429+def gather_icl_versions(conf, versions):
12430+ version_pattern = re.compile('^...?.?\....?.?')
12431+ try:
12432+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
12433+ except WindowsError:
12434+ try:
12435+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
12436+ except WindowsError:
12437+ return
12438+ index = 0
12439+ while 1:
12440+ try:
12441+ version = _winreg.EnumKey(all_versions, index)
12442+ except WindowsError:
12443+ break
12444+ index = index + 1
12445+ if not version_pattern.match(version):
12446+ continue
12447+ targets = []
12448+ for target,arch in all_icl_platforms:
12449+ try:
12450+ icl_version = _winreg.OpenKey(all_versions, version+'\\'+target)
12451+ path,type = _winreg.QueryValueEx(icl_version,'ProductDir')
12452+ if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')):
12453+ try:
12454+ targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat')))))
12455+ except Configure.ConfigurationError:
12456+ pass
12457+ except WindowsError:
12458+ continue
12459+ major = version[0:2]
12460+ versions.append(('intel ' + major, targets))
12461+
12462+@conf
12463+def get_msvc_versions(conf):
12464+ if not conf.env.MSVC_INSTALLED_VERSIONS:
12465+ lst = []
12466+ conf.gather_msvc_versions(lst)
12467+ conf.gather_wsdk_versions(lst)
12468+ conf.gather_icl_versions(lst)
12469+ conf.env.MSVC_INSTALLED_VERSIONS = lst
12470+ return conf.env.MSVC_INSTALLED_VERSIONS
12471+
12472+@conf
12473+def print_all_msvc_detected(conf):
12474+ for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
12475+ info(version)
12476+ for target,l in targets:
12477+ info("\t"+target)
12478+
12479+def detect_msvc(conf):
12480+ versions = get_msvc_versions(conf)
12481+ return setup_msvc(conf, versions)
12482+
12483+@conf
12484+def find_lt_names_msvc(self, libname, is_static=False):
12485+ """
12486+ Win32/MSVC specific code to glean out information from libtool la files.
12487+ this function is not attached to the task_gen class
12488+ """
12489+ lt_names=[
12490+ 'lib%s.la' % libname,
12491+ '%s.la' % libname,
12492+ ]
12493+
12494+ for path in self.env['LIBPATH']:
12495+ for la in lt_names:
12496+ laf=os.path.join(path,la)
12497+ dll=None
12498+ if os.path.exists(laf):
12499+ ltdict=read_la_file(laf)
12500+ lt_libdir=None
12501+ if ltdict.get('libdir', ''):
12502+ lt_libdir = ltdict['libdir']
12503+ if not is_static and ltdict.get('library_names', ''):
12504+ dllnames=ltdict['library_names'].split()
12505+ dll=dllnames[0].lower()
12506+ dll=re.sub('\.dll$', '', dll)
12507+ return (lt_libdir, dll, False)
12508+ elif ltdict.get('old_library', ''):
12509+ olib=ltdict['old_library']
12510+ if os.path.exists(os.path.join(path,olib)):
12511+ return (path, olib, True)
12512+ elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
12513+ return (lt_libdir, olib, True)
12514+ else:
12515+ return (None, olib, True)
12516+ else:
12517+ raise Utils.WafError('invalid libtool object file: %s' % laf)
12518+ return (None, None, None)
12519+
12520+@conf
12521+def libname_msvc(self, libname, is_static=False, mandatory=False):
12522+ lib = libname.lower()
12523+ lib = re.sub('\.lib$','',lib)
12524+
12525+ if lib in g_msvc_systemlibs:
12526+ return lib
12527+
12528+ lib=re.sub('^lib','',lib)
12529+
12530+ if lib == 'm':
12531+ return None
12532+
12533+ (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
12534+
12535+ if lt_path != None and lt_libname != None:
12536+ if lt_static == True:
12537+ # file existance check has been made by find_lt_names
12538+ return os.path.join(lt_path,lt_libname)
12539+
12540+ if lt_path != None:
12541+ _libpaths=[lt_path] + self.env['LIBPATH']
12542+ else:
12543+ _libpaths=self.env['LIBPATH']
12544+
12545+ static_libs=[
12546+ 'lib%ss.lib' % lib,
12547+ 'lib%s.lib' % lib,
12548+ '%ss.lib' % lib,
12549+ '%s.lib' %lib,
12550+ ]
12551+
12552+ dynamic_libs=[
12553+ 'lib%s.dll.lib' % lib,
12554+ 'lib%s.dll.a' % lib,
12555+ '%s.dll.lib' % lib,
12556+ '%s.dll.a' % lib,
12557+ 'lib%s_d.lib' % lib,
12558+ '%s_d.lib' % lib,
12559+ '%s.lib' %lib,
12560+ ]
12561+
12562+ libnames=static_libs
12563+ if not is_static:
12564+ libnames=dynamic_libs + static_libs
12565+
12566+ for path in _libpaths:
12567+ for libn in libnames:
12568+ if os.path.exists(os.path.join(path, libn)):
12569+ debug('msvc: lib found: %s', os.path.join(path,libn))
12570+ return re.sub('\.lib$', '',libn)
12571+
12572+ #if no lib can be found, just return the libname as msvc expects it
12573+ if mandatory:
12574+ self.fatal("The library %r could not be found" % libname)
12575+ return re.sub('\.lib$', '', libname)
12576+
12577+@conf
12578+def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False):
12579+ "This is the api to use"
12580+ libn = self.libname_msvc(libname, is_static, mandatory)
12581+
12582+ if not uselib_store:
12583+ uselib_store = libname.upper()
12584+
12585+ # Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB,
12586+ # but we don't distinguish static libs from shared libs.
12587+ # This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER'])
12588+ if False and is_static: # disabled
12589+ self.env['STATICLIB_' + uselib_store] = [libn]
12590+ else:
12591+ self.env['LIB_' + uselib_store] = [libn]
12592+
12593+@conf
12594+def check_libs_msvc(self, libnames, is_static=False, mandatory=False):
12595+ for libname in Utils.to_list(libnames):
12596+ self.check_lib_msvc(libname, is_static, mandatory=mandatory)
12597+
12598+@conftest
12599+def no_autodetect(conf):
12600+ conf.eval_rules(detect.replace('autodetect', ''))
12601+
12602+
12603+detect = '''
12604+autodetect
12605+find_msvc
12606+msvc_common_flags
12607+cc_load_tools
12608+cxx_load_tools
12609+cc_add_flags
12610+cxx_add_flags
12611+link_add_flags
12612+'''
12613+
12614+@conftest
12615+def autodetect(conf):
12616+ v = conf.env
12617+ compiler, version, path, includes, libdirs = detect_msvc(conf)
12618+ v['PATH'] = path
12619+ v['CPPPATH'] = includes
12620+ v['LIBPATH'] = libdirs
12621+ v['MSVC_COMPILER'] = compiler
12622+
12623+def _get_prog_names(conf, compiler):
12624+ if compiler=='intel':
12625+ compiler_name = 'ICL'
12626+ linker_name = 'XILINK'
12627+ lib_name = 'XILIB'
12628+ else:
12629+ # assumes CL.exe
12630+ compiler_name = 'CL'
12631+ linker_name = 'LINK'
12632+ lib_name = 'LIB'
12633+ return compiler_name, linker_name, lib_name
12634+
12635+@conftest
12636+def find_msvc(conf):
12637+ # due to path format limitations, limit operation only to native Win32. Yeah it sucks.
12638+ if sys.platform != 'win32':
12639+ conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
12640+
12641+ v = conf.env
12642+
12643+ compiler, version, path, includes, libdirs = detect_msvc(conf)
12644+
12645+ compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
12646+ has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6) or (compiler == 'intel' and float(version) >= 11)
12647+
12648+ # compiler
12649+ cxx = None
12650+ if v.CXX: cxx = v.CXX
12651+ elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
12652+ if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
12653+ cxx = conf.cmd_to_list(cxx)
12654+
12655+ # before setting anything, check if the compiler is really msvc
12656+ env = dict(conf.environ)
12657+ env.update(PATH = ';'.join(path))
12658+ if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
12659+ conf.fatal('the msvc compiler could not be identified')
12660+
12661+ link = v.LINK_CXX
12662+ if not link:
12663+ link = conf.find_program(linker_name, path_list=path, mandatory=True)
12664+ ar = v.AR
12665+ if not ar:
12666+ ar = conf.find_program(lib_name, path_list=path, mandatory=True)
12667+
12668+ # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
12669+ mt = v.MT
12670+ if has_msvc_manifest:
12671+ mt = conf.find_program('MT', path_list=path, mandatory=True)
12672+
12673+ # no more possibility of failure means the data state will be consistent
12674+ # we may store the data safely now
12675+
12676+ v.MSVC_MANIFEST = has_msvc_manifest
12677+ v.PATH = path
12678+ v.CPPPATH = includes
12679+ v.LIBPATH = libdirs
12680+
12681+ # c/c++ compiler
12682+ v.CC = v.CXX = cxx
12683+ v.CC_NAME = v.CXX_NAME = 'msvc'
12684+
12685+ v.LINK = v.LINK_CXX = link
12686+ if not v.LINK_CC:
12687+ v.LINK_CC = v.LINK_CXX
12688+
12689+ v.AR = ar
12690+ v.MT = mt
12691+ v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
12692+
12693+
12694+ conf.check_tool('winres')
12695+
12696+ if not conf.env.WINRC:
12697+ warn('Resource compiler not found. Compiling resource file is disabled')
12698+
12699+ # environment flags
12700+ try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
12701+ except KeyError: pass
12702+ try: v.prepend_value('LIBPATH', conf.environ['LIB'])
12703+ except KeyError: pass
12704+
12705+@conftest
12706+def msvc_common_flags(conf):
12707+ v = conf.env
12708+
12709+ v['CPPFLAGS'] = ['/W3', '/nologo']
12710+
12711+ v['CCDEFINES_ST'] = '/D%s'
12712+ v['CXXDEFINES_ST'] = '/D%s'
12713+
12714+ # TODO just use _WIN32, which defined by the compiler itself!
12715+ v['CCDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
12716+ v['CXXDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
12717+
12718+ v['_CCINCFLAGS'] = []
12719+ v['_CCDEFFLAGS'] = []
12720+ v['_CXXINCFLAGS'] = []
12721+ v['_CXXDEFFLAGS'] = []
12722+
12723+ v['CC_SRC_F'] = ''
12724+ v['CC_TGT_F'] = ['/c', '/Fo']
12725+ v['CXX_SRC_F'] = ''
12726+ v['CXX_TGT_F'] = ['/c', '/Fo']
12727+
12728+ v['CPPPATH_ST'] = '/I%s' # template for adding include paths
12729+
12730+ v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
12731+
12732+ # Subsystem specific flags
12733+ v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE']
12734+ v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE']
12735+ v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX']
12736+ v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS']
12737+ v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE']
12738+
12739+ # CRT specific flags
12740+ v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT']
12741+ v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
12742+
12743+ # TODO these are defined by the compiler itself!
12744+ v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself!
12745+ v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself!
12746+
12747+ v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
12748+ v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
12749+
12750+ # TODO these are defined by the compiler itself!
12751+ v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself!
12752+ v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself!
12753+
12754+ # compiler debug levels
12755+ v['CCFLAGS'] = ['/TC']
12756+ v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
12757+ v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
12758+ v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
12759+ v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
12760+
12761+ v['CXXFLAGS'] = ['/TP', '/EHsc']
12762+ v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
12763+ v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
12764+
12765+ v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
12766+ v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
12767+
12768+ # linker
12769+ v['LIB'] = []
12770+
12771+ v['LIB_ST'] = '%s.lib' # template for adding libs
12772+ v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths
12773+ v['STATICLIB_ST'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
12774+ v['STATICLIBPATH_ST'] = '/LIBPATH:%s'
12775+
12776+ v['LINKFLAGS'] = ['/NOLOGO']
12777+ if v['MSVC_MANIFEST']:
12778+ v.append_value('LINKFLAGS', '/MANIFEST')
12779+ v['LINKFLAGS_DEBUG'] = ['/DEBUG']
12780+ v['LINKFLAGS_ULTRADEBUG'] = ['/DEBUG']
12781+
12782+ # shared library
12783+ v['shlib_CCFLAGS'] = ['']
12784+ v['shlib_CXXFLAGS'] = ['']
12785+ v['shlib_LINKFLAGS']= ['/DLL']
12786+ v['shlib_PATTERN'] = '%s.dll'
12787+ v['implib_PATTERN'] = '%s.lib'
12788+ v['IMPLIB_ST'] = '/IMPLIB:%s'
12789+
12790+ # static library
12791+ v['staticlib_LINKFLAGS'] = ['']
12792+ v['staticlib_PATTERN'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
12793+
12794+ # program
12795+ v['program_PATTERN'] = '%s.exe'
12796+
12797+
12798+#######################################################################################################
12799+##### conf above, build below
12800+
12801+@after('apply_link')
12802+@feature('cc', 'cxx')
12803+def apply_flags_msvc(self):
12804+ if self.env.CC_NAME != 'msvc' or not self.link_task:
12805+ return
12806+
12807+ subsystem = getattr(self, 'subsystem', '')
12808+ if subsystem:
12809+ subsystem = '/subsystem:%s' % subsystem
12810+ flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS'
12811+ self.env.append_value(flags, subsystem)
12812+
12813+ if getattr(self, 'link_task', None) and not 'cstaticlib' in self.features:
12814+ for f in self.env.LINKFLAGS:
12815+ d = f.lower()
12816+ if d[1:] == 'debug':
12817+ pdbnode = self.link_task.outputs[0].change_ext('.pdb')
12818+ pdbfile = pdbnode.bldpath(self.env)
12819+ self.link_task.outputs.append(pdbnode)
12820+ self.bld.install_files(self.install_path, [pdbnode], env=self.env)
12821+ break
12822+
12823+@feature('cprogram', 'cshlib', 'cstaticlib')
12824+@after('apply_lib_vars')
12825+@before('apply_obj_vars')
12826+def apply_obj_vars_msvc(self):
12827+ if self.env['CC_NAME'] != 'msvc':
12828+ return
12829+
12830+ try:
12831+ self.meths.remove('apply_obj_vars')
12832+ except ValueError:
12833+ pass
12834+
12835+ libpaths = getattr(self, 'libpaths', [])
12836+ if not libpaths: self.libpaths = libpaths
12837+
12838+ env = self.env
12839+ app = env.append_unique
12840+
12841+ cpppath_st = env['CPPPATH_ST']
12842+ lib_st = env['LIB_ST']
12843+ staticlib_st = env['STATICLIB_ST']
12844+ libpath_st = env['LIBPATH_ST']
12845+ staticlibpath_st = env['STATICLIBPATH_ST']
12846+
12847+ for i in env['LIBPATH']:
12848+ app('LINKFLAGS', libpath_st % i)
12849+ if not libpaths.count(i):
12850+ libpaths.append(i)
12851+
12852+ for i in env['LIBPATH']:
12853+ app('LINKFLAGS', staticlibpath_st % i)
12854+ if not libpaths.count(i):
12855+ libpaths.append(i)
12856+
12857+ # i doubt that anyone will make a fully static binary anyway
12858+ if not env['FULLSTATIC']:
12859+ if env['STATICLIB'] or env['LIB']:
12860+ app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work?
12861+
12862+ for i in env['STATICLIB']:
12863+ app('LINKFLAGS', staticlib_st % i)
12864+
12865+ for i in env['LIB']:
12866+ app('LINKFLAGS', lib_st % i)
12867+
12868+# split the manifest file processing from the link task, like for the rc processing
12869+
12870+@feature('cprogram', 'cshlib')
12871+@after('apply_link')
12872+def apply_manifest(self):
12873+ """Special linker for MSVC with support for embedding manifests into DLL's
12874+ and executables compiled by Visual Studio 2005 or probably later. Without
12875+ the manifest file, the binaries are unusable.
12876+ See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
12877+
12878+ if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
12879+ out_node = self.link_task.outputs[0]
12880+ man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
12881+ self.link_task.outputs.append(man_node)
12882+ self.link_task.do_manifest = True
12883+
12884+def exec_mf(self):
12885+ env = self.env
12886+ mtool = env['MT']
12887+ if not mtool:
12888+ return 0
12889+
12890+ self.do_manifest = False
12891+
12892+ outfile = self.outputs[0].bldpath(env)
12893+
12894+ manifest = None
12895+ for out_node in self.outputs:
12896+ if out_node.name.endswith('.manifest'):
12897+ manifest = out_node.bldpath(env)
12898+ break
12899+ if manifest is None:
12900+ # Should never get here. If we do, it means the manifest file was
12901+ # never added to the outputs list, thus we don't have a manifest file
12902+ # to embed, so we just return.
12903+ return 0
12904+
12905+ # embedding mode. Different for EXE's and DLL's.
12906+ # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
12907+ mode = ''
12908+ if 'cprogram' in self.generator.features:
12909+ mode = '1'
12910+ elif 'cshlib' in self.generator.features:
12911+ mode = '2'
12912+
12913+ debug('msvc: embedding manifest')
12914+ #flags = ' '.join(env['MTFLAGS'] or [])
12915+
12916+ lst = []
12917+ lst.extend([env['MT']])
12918+ lst.extend(Utils.to_list(env['MTFLAGS']))
12919+ lst.extend(Utils.to_list("-manifest"))
12920+ lst.extend(Utils.to_list(manifest))
12921+ lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
12922+
12923+ #cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
12924+ # manifest, outfile, mode)
12925+ lst = [lst]
12926+ return self.exec_command(*lst)
12927+
12928+########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
12929+
12930+def exec_command_msvc(self, *k, **kw):
12931+ "instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in"
12932+ if self.env['CC_NAME'] == 'msvc':
12933+ if isinstance(k[0], list):
12934+ lst = []
12935+ carry = ''
12936+ for a in k[0]:
12937+ if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':':
12938+ carry = a
12939+ else:
12940+ lst.append(carry + a)
12941+ carry = ''
12942+ k = [lst]
12943+
12944+ env = dict(os.environ)
12945+ env.update(PATH = ';'.join(self.env['PATH']))
12946+ kw['env'] = env
12947+
12948+ ret = self.generator.bld.exec_command(*k, **kw)
12949+ if ret: return ret
12950+ if getattr(self, 'do_manifest', None):
12951+ ret = exec_mf(self)
12952+ return ret
12953+
12954+for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
12955+ cls = Task.TaskBase.classes.get(k, None)
12956+ if cls:
12957+ cls.exec_command = exec_command_msvc
12958+
12959diff --git a/buildtools/wafadmin/Tools/nasm.py b/buildtools/wafadmin/Tools/nasm.py
12960new file mode 100644
12961index 0000000..b99c3c7
12962--- /dev/null
12963+++ b/buildtools/wafadmin/Tools/nasm.py
12964@@ -0,0 +1,49 @@
12965+#!/usr/bin/env python
12966+# encoding: utf-8
12967+# Thomas Nagy, 2008
12968+
12969+"""
12970+Nasm processing
12971+"""
12972+
12973+import os
12974+import TaskGen, Task, Utils
12975+from TaskGen import taskgen, before, extension
12976+
12977+nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
12978+
12979+EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
12980+
12981+@before('apply_link')
12982+def apply_nasm_vars(self):
12983+
12984+ # flags
12985+ if hasattr(self, 'nasm_flags'):
12986+ for flag in self.to_list(self.nasm_flags):
12987+ self.env.append_value('NASM_FLAGS', flag)
12988+
12989+ # includes - well, if we suppose it works with c processing
12990+ if hasattr(self, 'includes'):
12991+ for inc in self.to_list(self.includes):
12992+ node = self.path.find_dir(inc)
12993+ if not node:
12994+ raise Utils.WafError('cannot find the dir' + inc)
12995+ self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env))
12996+ self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env))
12997+
12998+@extension(EXT_NASM)
12999+def nasm_file(self, node):
13000+ try: obj_ext = self.obj_ext
13001+ except AttributeError: obj_ext = '_%d.o' % self.idx
13002+
13003+ task = self.create_task('nasm', node, node.change_ext(obj_ext))
13004+ self.compiled_tasks.append(task)
13005+
13006+ self.meths.append('apply_nasm_vars')
13007+
13008+# create our action here
13009+Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False)
13010+
13011+def detect(conf):
13012+ nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True)
13013+
13014diff --git a/buildtools/wafadmin/Tools/ocaml.py b/buildtools/wafadmin/Tools/ocaml.py
13015new file mode 100644
13016index 0000000..20c9269
13017--- /dev/null
13018+++ b/buildtools/wafadmin/Tools/ocaml.py
13019@@ -0,0 +1,298 @@
13020+#!/usr/bin/env python
13021+# encoding: utf-8
13022+# Thomas Nagy, 2006 (ita)
13023+
13024+"ocaml support"
13025+
13026+import os, re
13027+import TaskGen, Utils, Task, Build
13028+from Logs import error
13029+from TaskGen import taskgen, feature, before, after, extension
13030+
13031+EXT_MLL = ['.mll']
13032+EXT_MLY = ['.mly']
13033+EXT_MLI = ['.mli']
13034+EXT_MLC = ['.c']
13035+EXT_ML = ['.ml']
13036+
13037+open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
13038+foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
13039+def filter_comments(txt):
13040+ meh = [0]
13041+ def repl(m):
13042+ if m.group(1): meh[0] += 1
13043+ elif m.group(2): meh[0] -= 1
13044+ elif not meh[0]: return m.group(0)
13045+ return ''
13046+ return foo.sub(repl, txt)
13047+
13048+def scan(self):
13049+ node = self.inputs[0]
13050+ code = filter_comments(node.read(self.env))
13051+
13052+ global open_re
13053+ names = []
13054+ import_iterator = open_re.finditer(code)
13055+ if import_iterator:
13056+ for import_match in import_iterator:
13057+ names.append(import_match.group(1))
13058+ found_lst = []
13059+ raw_lst = []
13060+ for name in names:
13061+ nd = None
13062+ for x in self.incpaths:
13063+ nd = x.find_resource(name.lower()+'.ml')
13064+ if not nd: nd = x.find_resource(name+'.ml')
13065+ if nd:
13066+ found_lst.append(nd)
13067+ break
13068+ else:
13069+ raw_lst.append(name)
13070+
13071+ return (found_lst, raw_lst)
13072+
13073+native_lst=['native', 'all', 'c_object']
13074+bytecode_lst=['bytecode', 'all']
13075+class ocaml_taskgen(TaskGen.task_gen):
13076+ def __init__(self, *k, **kw):
13077+ TaskGen.task_gen.__init__(self, *k, **kw)
13078+
13079+@feature('ocaml')
13080+def init_ml(self):
13081+ Utils.def_attrs(self,
13082+ type = 'all',
13083+ incpaths_lst = [],
13084+ bld_incpaths_lst = [],
13085+ mlltasks = [],
13086+ mlytasks = [],
13087+ mlitasks = [],
13088+ native_tasks = [],
13089+ bytecode_tasks = [],
13090+ linktasks = [],
13091+ bytecode_env = None,
13092+ native_env = None,
13093+ compiled_tasks = [],
13094+ includes = '',
13095+ uselib = '',
13096+ are_deps_set = 0)
13097+
13098+@feature('ocaml')
13099+@after('init_ml')
13100+def init_envs_ml(self):
13101+
13102+ self.islibrary = getattr(self, 'islibrary', False)
13103+
13104+ global native_lst, bytecode_lst
13105+ self.native_env = None
13106+ if self.type in native_lst:
13107+ self.native_env = self.env.copy()
13108+ if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
13109+
13110+ self.bytecode_env = None
13111+ if self.type in bytecode_lst:
13112+ self.bytecode_env = self.env.copy()
13113+ if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
13114+
13115+ if self.type == 'c_object':
13116+ self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
13117+
13118+@feature('ocaml')
13119+@before('apply_vars_ml')
13120+@after('init_envs_ml')
13121+def apply_incpaths_ml(self):
13122+ inc_lst = self.includes.split()
13123+ lst = self.incpaths_lst
13124+ for dir in inc_lst:
13125+ node = self.path.find_dir(dir)
13126+ if not node:
13127+ error("node not found: " + str(dir))
13128+ continue
13129+ self.bld.rescan(node)
13130+ if not node in lst: lst.append(node)
13131+ self.bld_incpaths_lst.append(node)
13132+ # now the nodes are added to self.incpaths_lst
13133+
13134+@feature('ocaml')
13135+@before('apply_core')
13136+def apply_vars_ml(self):
13137+ for i in self.incpaths_lst:
13138+ if self.bytecode_env:
13139+ app = self.bytecode_env.append_value
13140+ app('OCAMLPATH', '-I')
13141+ app('OCAMLPATH', i.srcpath(self.env))
13142+ app('OCAMLPATH', '-I')
13143+ app('OCAMLPATH', i.bldpath(self.env))
13144+
13145+ if self.native_env:
13146+ app = self.native_env.append_value
13147+ app('OCAMLPATH', '-I')
13148+ app('OCAMLPATH', i.bldpath(self.env))
13149+ app('OCAMLPATH', '-I')
13150+ app('OCAMLPATH', i.srcpath(self.env))
13151+
13152+ varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
13153+ for name in self.uselib.split():
13154+ for vname in varnames:
13155+ cnt = self.env[vname+'_'+name]
13156+ if cnt:
13157+ if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
13158+ if self.native_env: self.native_env.append_value(vname, cnt)
13159+
13160+@feature('ocaml')
13161+@after('apply_core')
13162+def apply_link_ml(self):
13163+
13164+ if self.bytecode_env:
13165+ ext = self.islibrary and '.cma' or '.run'
13166+
13167+ linktask = self.create_task('ocalink')
13168+ linktask.bytecode = 1
13169+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
13170+ linktask.obj = self
13171+ linktask.env = self.bytecode_env
13172+ self.linktasks.append(linktask)
13173+
13174+ if self.native_env:
13175+ if self.type == 'c_object': ext = '.o'
13176+ elif self.islibrary: ext = '.cmxa'
13177+ else: ext = ''
13178+
13179+ linktask = self.create_task('ocalinkx')
13180+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
13181+ linktask.obj = self
13182+ linktask.env = self.native_env
13183+ self.linktasks.append(linktask)
13184+
13185+ # we produce a .o file to be used by gcc
13186+ self.compiled_tasks.append(linktask)
13187+
13188+@extension(EXT_MLL)
13189+def mll_hook(self, node):
13190+ mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'), env=self.native_env)
13191+ self.mlltasks.append(mll_task)
13192+
13193+ self.allnodes.append(mll_task.outputs[0])
13194+
13195+@extension(EXT_MLY)
13196+def mly_hook(self, node):
13197+ mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')], env=self.native_env)
13198+ self.mlytasks.append(mly_task)
13199+ self.allnodes.append(mly_task.outputs[0])
13200+
13201+ task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'), env=self.native_env)
13202+
13203+@extension(EXT_MLI)
13204+def mli_hook(self, node):
13205+ task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'), env=self.native_env)
13206+ self.mlitasks.append(task)
13207+
13208+@extension(EXT_MLC)
13209+def mlc_hook(self, node):
13210+ task = self.create_task('ocamlcc', node, node.change_ext('.o'), env=self.native_env)
13211+ self.compiled_tasks.append(task)
13212+
13213+@extension(EXT_ML)
13214+def ml_hook(self, node):
13215+ if self.native_env:
13216+ task = self.create_task('ocamlx', node, node.change_ext('.cmx'), env=self.native_env)
13217+ task.obj = self
13218+ task.incpaths = self.bld_incpaths_lst
13219+ self.native_tasks.append(task)
13220+
13221+ if self.bytecode_env:
13222+ task = self.create_task('ocaml', node, node.change_ext('.cmo'), env=self.bytecode_env)
13223+ task.obj = self
13224+ task.bytecode = 1
13225+ task.incpaths = self.bld_incpaths_lst
13226+ self.bytecode_tasks.append(task)
13227+
13228+def compile_may_start(self):
13229+ if not getattr(self, 'flag_deps', ''):
13230+ self.flag_deps = 1
13231+
13232+ # the evil part is that we can only compute the dependencies after the
13233+ # source files can be read (this means actually producing the source files)
13234+ if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks
13235+ else: alltasks = self.obj.native_tasks
13236+
13237+ self.signature() # ensure that files are scanned - unfortunately
13238+ tree = self.generator.bld
13239+ env = self.env
13240+ for node in self.inputs:
13241+ lst = tree.node_deps[self.unique_id()]
13242+ for depnode in lst:
13243+ for t in alltasks:
13244+ if t == self: continue
13245+ if depnode in t.inputs:
13246+ self.set_run_after(t)
13247+
13248+ # TODO necessary to get the signature right - for now
13249+ delattr(self, 'cache_sig')
13250+ self.signature()
13251+
13252+ return Task.Task.runnable_status(self)
13253+
13254+b = Task.simple_task_type
13255+cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
13256+cls.runnable_status = compile_may_start
13257+cls.scan = scan
13258+
13259+b = Task.simple_task_type
13260+cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
13261+cls.runnable_status = compile_may_start
13262+cls.scan = scan
13263+
13264+
13265+b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx")
13266+b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN')
13267+
13268+b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
13269+b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
13270+
13271+
13272+def link_may_start(self):
13273+ if not getattr(self, 'order', ''):
13274+
13275+ # now reorder the inputs given the task dependencies
13276+ if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks
13277+ else: alltasks = self.obj.native_tasks
13278+
13279+ # this part is difficult, we do not have a total order on the tasks
13280+ # if the dependencies are wrong, this may not stop
13281+ seen = []
13282+ pendant = []+alltasks
13283+ while pendant:
13284+ task = pendant.pop(0)
13285+ if task in seen: continue
13286+ for x in task.run_after:
13287+ if not x in seen:
13288+ pendant.append(task)
13289+ break
13290+ else:
13291+ seen.append(task)
13292+ self.inputs = [x.outputs[0] for x in seen]
13293+ self.order = 1
13294+ return Task.Task.runnable_status(self)
13295+
13296+act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc")
13297+act.runnable_status = link_may_start
13298+act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc")
13299+act.runnable_status = link_may_start
13300+
13301+def detect(conf):
13302+ opt = conf.find_program('ocamlopt', var='OCAMLOPT')
13303+ occ = conf.find_program('ocamlc', var='OCAMLC')
13304+ if (not opt) or (not occ):
13305+ conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
13306+
13307+ v = conf.env
13308+ v['OCAMLC'] = occ
13309+ v['OCAMLOPT'] = opt
13310+ v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX')
13311+ v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC')
13312+ v['OCAMLFLAGS'] = ''
13313+ v['OCAMLLIB'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13314+ v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13315+ v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13316+ v['LIB_OCAML'] = 'camlrun'
13317+
13318diff --git a/buildtools/wafadmin/Tools/osx.py b/buildtools/wafadmin/Tools/osx.py
13319new file mode 100644
13320index 0000000..561eca4
13321--- /dev/null
13322+++ b/buildtools/wafadmin/Tools/osx.py
13323@@ -0,0 +1,188 @@
13324+#!/usr/bin/env python
13325+# encoding: utf-8
13326+# Thomas Nagy 2008
13327+
13328+"""MacOSX related tools
13329+
13330+To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute
13331+ obj.mac_app = True
13332+
13333+To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute:
13334+ obj.mac_bundle = True
13335+"""
13336+
13337+import os, shutil, sys, platform
13338+import TaskGen, Task, Build, Options, Utils
13339+from TaskGen import taskgen, feature, after, before
13340+from Logs import error, debug
13341+
13342+# plist template
13343+app_info = '''
13344+<?xml version="1.0" encoding="UTF-8"?>
13345+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
13346+<plist version="0.9">
13347+<dict>
13348+ <key>CFBundlePackageType</key>
13349+ <string>APPL</string>
13350+ <key>CFBundleGetInfoString</key>
13351+ <string>Created by Waf</string>
13352+ <key>CFBundleSignature</key>
13353+ <string>????</string>
13354+ <key>NOTE</key>
13355+ <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
13356+ <key>CFBundleExecutable</key>
13357+ <string>%s</string>
13358+</dict>
13359+</plist>
13360+'''
13361+
13362+# see WAF issue 285
13363+# and also http://trac.macports.org/ticket/17059
13364+@feature('cc', 'cxx')
13365+@before('apply_lib_vars')
13366+def set_macosx_deployment_target(self):
13367+ if self.env['MACOSX_DEPLOYMENT_TARGET']:
13368+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
13369+ elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
13370+ if sys.platform == 'darwin':
13371+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
13372+
13373+@feature('cc', 'cxx')
13374+@after('apply_lib_vars')
13375+def apply_framework(self):
13376+ for x in self.to_list(self.env['FRAMEWORKPATH']):
13377+ frameworkpath_st = '-F%s'
13378+ self.env.append_unique('CXXFLAGS', frameworkpath_st % x)
13379+ self.env.append_unique('CCFLAGS', frameworkpath_st % x)
13380+ self.env.append_unique('LINKFLAGS', frameworkpath_st % x)
13381+
13382+ for x in self.to_list(self.env['FRAMEWORK']):
13383+ self.env.append_value('LINKFLAGS', ['-framework', x])
13384+
13385+@taskgen
13386+def create_bundle_dirs(self, name, out):
13387+ bld = self.bld
13388+ dir = out.parent.get_dir(name)
13389+
13390+ if not dir:
13391+ dir = out.__class__(name, out.parent, 1)
13392+ bld.rescan(dir)
13393+ contents = out.__class__('Contents', dir, 1)
13394+ bld.rescan(contents)
13395+ macos = out.__class__('MacOS', contents, 1)
13396+ bld.rescan(macos)
13397+ return dir
13398+
13399+def bundle_name_for_output(out):
13400+ name = out.name
13401+ k = name.rfind('.')
13402+ if k >= 0:
13403+ name = name[:k] + '.app'
13404+ else:
13405+ name = name + '.app'
13406+ return name
13407+
13408+@taskgen
13409+@after('apply_link')
13410+@feature('cprogram')
13411+def create_task_macapp(self):
13412+ """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
13413+ or use obj.mac_app = True to build specific targets as Mac apps"""
13414+ if self.env['MACAPP'] or getattr(self, 'mac_app', False):
13415+ apptask = self.create_task('macapp')
13416+ apptask.set_inputs(self.link_task.outputs)
13417+
13418+ out = self.link_task.outputs[0]
13419+
13420+ name = bundle_name_for_output(out)
13421+ dir = self.create_bundle_dirs(name, out)
13422+
13423+ n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
13424+
13425+ apptask.set_outputs([n1])
13426+ apptask.chmod = 0755
13427+ apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS')
13428+ self.apptask = apptask
13429+
13430+@after('apply_link')
13431+@feature('cprogram')
13432+def create_task_macplist(self):
13433+ """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
13434+ or use obj.mac_app = True to build specific targets as Mac apps"""
13435+ if self.env['MACAPP'] or getattr(self, 'mac_app', False):
13436+ # check if the user specified a plist before using our template
13437+ if not getattr(self, 'mac_plist', False):
13438+ self.mac_plist = app_info
13439+
13440+ plisttask = self.create_task('macplist')
13441+ plisttask.set_inputs(self.link_task.outputs)
13442+
13443+ out = self.link_task.outputs[0]
13444+ self.mac_plist = self.mac_plist % (out.name)
13445+
13446+ name = bundle_name_for_output(out)
13447+ dir = self.create_bundle_dirs(name, out)
13448+
13449+ n1 = dir.find_or_declare(['Contents', 'Info.plist'])
13450+
13451+ plisttask.set_outputs([n1])
13452+ plisttask.mac_plist = self.mac_plist
13453+ plisttask.install_path = os.path.join(self.install_path, name, 'Contents')
13454+ self.plisttask = plisttask
13455+
13456+@after('apply_link')
13457+@feature('cshlib')
13458+def apply_link_osx(self):
13459+ name = self.link_task.outputs[0].name
13460+ if not self.install_path:
13461+ return
13462+ if getattr(self, 'vnum', None):
13463+ name = name.replace('.dylib', '.%s.dylib' % self.vnum)
13464+
13465+ path = os.path.join(Utils.subst_vars(self.install_path, self.env), name)
13466+ if '-dynamiclib' in self.env['LINKFLAGS']:
13467+ self.env.append_value('LINKFLAGS', '-install_name')
13468+ self.env.append_value('LINKFLAGS', path)
13469+
13470+@before('apply_link', 'apply_lib_vars')
13471+@feature('cc', 'cxx')
13472+def apply_bundle(self):
13473+ """use env['MACBUNDLE'] to force all shlibs into mac bundles
13474+ or use obj.mac_bundle = True for specific targets only"""
13475+ if not ('cshlib' in self.features or 'shlib' in self.features): return
13476+ if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
13477+ self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN']
13478+ uselib = self.uselib = self.to_list(self.uselib)
13479+ if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE')
13480+
13481+@after('apply_link')
13482+@feature('cshlib')
13483+def apply_bundle_remove_dynamiclib(self):
13484+ if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
13485+ if not getattr(self, 'vnum', None):
13486+ try:
13487+ self.env['LINKFLAGS'].remove('-dynamiclib')
13488+ self.env['LINKFLAGS'].remove('-single_module')
13489+ except ValueError:
13490+ pass
13491+
13492+# TODO REMOVE IN 1.6 (global variable)
13493+app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
13494+
13495+def app_build(task):
13496+ env = task.env
13497+ shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env))
13498+
13499+ return 0
13500+
13501+def plist_build(task):
13502+ env = task.env
13503+ f = open(task.outputs[0].abspath(env), "w")
13504+ f.write(task.mac_plist)
13505+ f.close()
13506+
13507+ return 0
13508+
13509+Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link")
13510+Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link")
13511+
13512diff --git a/buildtools/wafadmin/Tools/perl.py b/buildtools/wafadmin/Tools/perl.py
13513new file mode 100644
13514index 0000000..a6787a8
13515--- /dev/null
13516+++ b/buildtools/wafadmin/Tools/perl.py
13517@@ -0,0 +1,109 @@
13518+#!/usr/bin/env python
13519+# encoding: utf-8
13520+# andersg at 0x63.nu 2007
13521+
13522+import os
13523+import Task, Options, Utils
13524+from Configure import conf
13525+from TaskGen import extension, taskgen, feature, before
13526+
13527+xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
13528+EXT_XS = ['.xs']
13529+
13530+@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars')
13531+@feature('perlext')
13532+def init_perlext(self):
13533+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
13534+ if not 'PERL' in self.uselib: self.uselib.append('PERL')
13535+ if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
13536+ self.env['shlib_PATTERN'] = self.env['perlext_PATTERN']
13537+
13538+@extension(EXT_XS)
13539+def xsubpp_file(self, node):
13540+ outnode = node.change_ext('.c')
13541+ self.create_task('xsubpp', node, outnode)
13542+ self.allnodes.append(outnode)
13543+
13544+Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before='cc cxx', shell=False)
13545+
13546+@conf
13547+def check_perl_version(conf, minver=None):
13548+ """
13549+ Checks if perl is installed.
13550+
13551+ If installed the variable PERL will be set in environment.
13552+
13553+ Perl binary can be overridden by --with-perl-binary config variable
13554+
13555+ """
13556+
13557+ if getattr(Options.options, 'perlbinary', None):
13558+ conf.env.PERL = Options.options.perlbinary
13559+ else:
13560+ conf.find_program('perl', var='PERL', mandatory=True)
13561+
13562+ try:
13563+ version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
13564+ except:
13565+ conf.fatal('could not determine the perl version')
13566+
13567+ conf.env.PERL_VERSION = version
13568+ cver = ''
13569+ if minver:
13570+ try:
13571+ ver = tuple(map(int, version.split('.')))
13572+ except:
13573+ conf.fatal('unsupported perl version %r' % version)
13574+ if ver < minver:
13575+ conf.fatal('perl is too old')
13576+
13577+ cver = '.'.join(map(str,minver))
13578+ conf.check_message('perl', cver, True, version)
13579+
13580+@conf
13581+def check_perl_module(conf, module):
13582+ """
13583+ Check if specified perlmodule is installed.
13584+
13585+ Minimum version can be specified by specifying it after modulename
13586+ like this:
13587+
13588+ conf.check_perl_module("Some::Module 2.92")
13589+ """
13590+ cmd = [conf.env['PERL'], '-e', 'use %s' % module]
13591+ r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0
13592+ conf.check_message("perl module %s" % module, "", r)
13593+ return r
13594+
13595+@conf
13596+def check_perl_ext_devel(conf):
13597+ """
13598+ Check for configuration needed to build perl extensions.
13599+
13600+ Sets different xxx_PERLEXT variables in the environment.
13601+
13602+ Also sets the ARCHDIR_PERL variable useful as installation path,
13603+ which can be overridden by --with-perl-archdir
13604+ """
13605+ if not conf.env.PERL:
13606+ conf.fatal('perl detection is required first')
13607+
13608+ def read_out(cmd):
13609+ return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
13610+
13611+ conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
13612+ conf.env.CPPPATH_PERLEXT = read_out('print "$Config{archlib}/CORE"')
13613+ conf.env.CCFLAGS_PERLEXT = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
13614+ conf.env.XSUBPP = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
13615+ conf.env.EXTUTILS_TYPEMAP = read_out('print "$Config{privlib}/ExtUtils/typemap"')
13616+ conf.env.perlext_PATTERN = '%s.' + read_out('print $Config{dlext}')[0]
13617+
13618+ if getattr(Options.options, 'perlarchdir', None):
13619+ conf.env.ARCHDIR_PERL = Options.options.perlarchdir
13620+ else:
13621+ conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0]
13622+
13623+def set_options(opt):
13624+ opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)
13625+ opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None)
13626+
13627diff --git a/buildtools/wafadmin/Tools/preproc.py b/buildtools/wafadmin/Tools/preproc.py
13628new file mode 100644
13629index 0000000..5055456
13630--- /dev/null
13631+++ b/buildtools/wafadmin/Tools/preproc.py
13632@@ -0,0 +1,836 @@
13633+#!/usr/bin/env python
13634+# encoding: utf-8
13635+# Thomas Nagy, 2006-2009 (ita)
13636+
13637+"""
13638+C/C++ preprocessor for finding dependencies
13639+
13640+Reasons for using the Waf preprocessor by default
13641+1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
13642+2. Not all compilers provide .d files for obtaining the dependencies (portability)
13643+3. A naive file scanner will not catch the constructs such as "#include foo()"
13644+4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
13645+
13646+Regarding the speed concerns:
13647+a. the preprocessing is performed only when files must be compiled
13648+b. the macros are evaluated only for #if/#elif/#include
13649+c. the time penalty is about 10%
13650+d. system headers are not scanned
13651+
13652+Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced
13653+during the compilation to track the dependencies (useful when used with the boost libraries).
13654+It only works with gcc though, and it cannot be used with Qt builds. A dumb
13655+file scanner will be added in the future, so we will have most bahaviours.
13656+"""
13657+# TODO: more varargs, pragma once
13658+# TODO: dumb file scanner tracking all includes
13659+
13660+import re, sys, os, string
13661+import Logs, Build, Utils
13662+from Logs import debug, error
13663+import traceback
13664+
13665+class PreprocError(Utils.WafError):
13666+ pass
13667+
13668+POPFILE = '-'
13669+
13670+
13671+recursion_limit = 5000
13672+"do not loop too much on header inclusion"
13673+
13674+go_absolute = 0
13675+"set to 1 to track headers on files in /usr/include - else absolute paths are ignored"
13676+
13677+standard_includes = ['/usr/include']
13678+if sys.platform == "win32":
13679+ standard_includes = []
13680+
13681+use_trigraphs = 0
13682+'apply the trigraph rules first'
13683+
13684+strict_quotes = 0
13685+"Keep <> for system includes (do not search for those includes)"
13686+
13687+g_optrans = {
13688+'not':'!',
13689+'and':'&&',
13690+'bitand':'&',
13691+'and_eq':'&=',
13692+'or':'||',
13693+'bitor':'|',
13694+'or_eq':'|=',
13695+'xor':'^',
13696+'xor_eq':'^=',
13697+'compl':'~',
13698+}
13699+"these ops are for c++, to reset, set an empty dict"
13700+
13701+# ignore #warning and #error
13702+re_lines = re.compile(\
13703+ '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
13704+ re.IGNORECASE | re.MULTILINE)
13705+
13706+re_mac = re.compile("^[a-zA-Z_]\w*")
13707+re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
13708+re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
13709+re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
13710+re_cpp = re.compile(
13711+ r"""(/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)|//[^\n]*|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^/"'\\]*)""",
13712+ re.MULTILINE)
13713+trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
13714+chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
13715+
13716+NUM = 'i'
13717+OP = 'O'
13718+IDENT = 'T'
13719+STR = 's'
13720+CHAR = 'c'
13721+
13722+tok_types = [NUM, STR, IDENT, OP]
13723+exp_types = [
13724+ r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
13725+ r'L?"([^"\\]|\\.)*"',
13726+ r'[a-zA-Z_]\w*',
13727+ r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
13728+]
13729+re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
13730+
13731+accepted = 'a'
13732+ignored = 'i'
13733+undefined = 'u'
13734+skipped = 's'
13735+
13736+def repl(m):
13737+ if m.group(1):
13738+ return ' '
13739+ s = m.group(2)
13740+ if s is None:
13741+ return ''
13742+ return s
13743+
13744+def filter_comments(filename):
13745+ # return a list of tuples : keyword, line
13746+ code = Utils.readf(filename)
13747+ if use_trigraphs:
13748+ for (a, b) in trig_def: code = code.split(a).join(b)
13749+ code = re_nl.sub('', code)
13750+ code = re_cpp.sub(repl, code)
13751+ return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
13752+
13753+prec = {}
13754+# op -> number, needed for such expressions: #if 1 && 2 != 0
13755+ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
13756+for x in range(len(ops)):
13757+ syms = ops[x]
13758+ for u in syms.split():
13759+ prec[u] = x
13760+
13761+def reduce_nums(val_1, val_2, val_op):
13762+ """apply arithmetic rules and try to return an integer result"""
13763+ #print val_1, val_2, val_op
13764+
13765+ # now perform the operation, make certain a and b are numeric
13766+ try: a = 0 + val_1
13767+ except TypeError: a = int(val_1)
13768+ try: b = 0 + val_2
13769+ except TypeError: b = int(val_2)
13770+
13771+ d = val_op
13772+ if d == '%': c = a%b
13773+ elif d=='+': c = a+b
13774+ elif d=='-': c = a-b
13775+ elif d=='*': c = a*b
13776+ elif d=='/': c = a/b
13777+ elif d=='^': c = a^b
13778+ elif d=='|': c = a|b
13779+ elif d=='||': c = int(a or b)
13780+ elif d=='&': c = a&b
13781+ elif d=='&&': c = int(a and b)
13782+ elif d=='==': c = int(a == b)
13783+ elif d=='!=': c = int(a != b)
13784+ elif d=='<=': c = int(a <= b)
13785+ elif d=='<': c = int(a < b)
13786+ elif d=='>': c = int(a > b)
13787+ elif d=='>=': c = int(a >= b)
13788+ elif d=='^': c = int(a^b)
13789+ elif d=='<<': c = a<<b
13790+ elif d=='>>': c = a>>b
13791+ else: c = 0
13792+ return c
13793+
13794+def get_num(lst):
13795+ if not lst: raise PreprocError("empty list for get_num")
13796+ (p, v) = lst[0]
13797+ if p == OP:
13798+ if v == '(':
13799+ count_par = 1
13800+ i = 1
13801+ while i < len(lst):
13802+ (p, v) = lst[i]
13803+
13804+ if p == OP:
13805+ if v == ')':
13806+ count_par -= 1
13807+ if count_par == 0:
13808+ break
13809+ elif v == '(':
13810+ count_par += 1
13811+ i += 1
13812+ else:
13813+ raise PreprocError("rparen expected %r" % lst)
13814+
13815+ (num, _) = get_term(lst[1:i])
13816+ return (num, lst[i+1:])
13817+
13818+ elif v == '+':
13819+ return get_num(lst[1:])
13820+ elif v == '-':
13821+ num, lst = get_num(lst[1:])
13822+ return (reduce_nums('-1', num, '*'), lst)
13823+ elif v == '!':
13824+ num, lst = get_num(lst[1:])
13825+ return (int(not int(num)), lst)
13826+ elif v == '~':
13827+ return (~ int(num), lst)
13828+ else:
13829+ raise PreprocError("invalid op token %r for get_num" % lst)
13830+ elif p == NUM:
13831+ return v, lst[1:]
13832+ elif p == IDENT:
13833+ # all macros should have been replaced, remaining identifiers eval to 0
13834+ return 0, lst[1:]
13835+ else:
13836+ raise PreprocError("invalid token %r for get_num" % lst)
13837+
13838+def get_term(lst):
13839+ if not lst: raise PreprocError("empty list for get_term")
13840+ num, lst = get_num(lst)
13841+ if not lst:
13842+ return (num, [])
13843+ (p, v) = lst[0]
13844+ if p == OP:
13845+ if v == '&&' and not num:
13846+ return (num, [])
13847+ elif v == '||' and num:
13848+ return (num, [])
13849+ elif v == ',':
13850+ # skip
13851+ return get_term(lst[1:])
13852+ elif v == '?':
13853+ count_par = 0
13854+ i = 1
13855+ while i < len(lst):
13856+ (p, v) = lst[i]
13857+
13858+ if p == OP:
13859+ if v == ')':
13860+ count_par -= 1
13861+ elif v == '(':
13862+ count_par += 1
13863+ elif v == ':':
13864+ if count_par == 0:
13865+ break
13866+ i += 1
13867+ else:
13868+ raise PreprocError("rparen expected %r" % lst)
13869+
13870+ if int(num):
13871+ return get_term(lst[1:i])
13872+ else:
13873+ return get_term(lst[i+1:])
13874+
13875+ else:
13876+ num2, lst = get_num(lst[1:])
13877+
13878+ if not lst:
13879+ # no more tokens to process
13880+ num2 = reduce_nums(num, num2, v)
13881+ return get_term([(NUM, num2)] + lst)
13882+
13883+ # operator precedence
13884+ p2, v2 = lst[0]
13885+ if p2 != OP:
13886+ raise PreprocError("op expected %r" % lst)
13887+
13888+ if prec[v2] >= prec[v]:
13889+ num2 = reduce_nums(num, num2, v)
13890+ return get_term([(NUM, num2)] + lst)
13891+ else:
13892+ num3, lst = get_num(lst[1:])
13893+ num3 = reduce_nums(num2, num3, v2)
13894+ return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
13895+
13896+
13897+ raise PreprocError("cannot reduce %r" % lst)
13898+
13899+def reduce_eval(lst):
13900+ """take a list of tokens and output true or false (#if/#elif conditions)"""
13901+ num, lst = get_term(lst)
13902+ return (NUM, num)
13903+
13904+def stringize(lst):
13905+ """use for converting a list of tokens to a string"""
13906+ lst = [str(v2) for (p2, v2) in lst]
13907+ return "".join(lst)
13908+
13909+def paste_tokens(t1, t2):
13910+ """
13911+ here is what we can paste:
13912+ a ## b -> ab
13913+ > ## = -> >=
13914+ a ## 2 -> a2
13915+ """
13916+ p1 = None
13917+ if t1[0] == OP and t2[0] == OP:
13918+ p1 = OP
13919+ elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
13920+ p1 = IDENT
13921+ elif t1[0] == NUM and t2[0] == NUM:
13922+ p1 = NUM
13923+ if not p1:
13924+ raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
13925+ return (p1, t1[1] + t2[1])
13926+
13927+def reduce_tokens(lst, defs, ban=[]):
13928+ """replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied"""
13929+ i = 0
13930+
13931+ while i < len(lst):
13932+ (p, v) = lst[i]
13933+
13934+ if p == IDENT and v == "defined":
13935+ del lst[i]
13936+ if i < len(lst):
13937+ (p2, v2) = lst[i]
13938+ if p2 == IDENT:
13939+ if v2 in defs:
13940+ lst[i] = (NUM, 1)
13941+ else:
13942+ lst[i] = (NUM, 0)
13943+ elif p2 == OP and v2 == '(':
13944+ del lst[i]
13945+ (p2, v2) = lst[i]
13946+ del lst[i] # remove the ident, and change the ) for the value
13947+ if v2 in defs:
13948+ lst[i] = (NUM, 1)
13949+ else:
13950+ lst[i] = (NUM, 0)
13951+ else:
13952+ raise PreprocError("invalid define expression %r" % lst)
13953+
13954+ elif p == IDENT and v in defs:
13955+
13956+ if isinstance(defs[v], str):
13957+ a, b = extract_macro(defs[v])
13958+ defs[v] = b
13959+ macro_def = defs[v]
13960+ to_add = macro_def[1]
13961+
13962+ if isinstance(macro_def[0], list):
13963+ # macro without arguments
13964+ del lst[i]
13965+ for x in xrange(len(to_add)):
13966+ lst.insert(i, to_add[x])
13967+ i += 1
13968+ else:
13969+ # collect the arguments for the funcall
13970+
13971+ args = []
13972+ del lst[i]
13973+
13974+ if i >= len(lst):
13975+ raise PreprocError("expected '(' after %r (got nothing)" % v)
13976+
13977+ (p2, v2) = lst[i]
13978+ if p2 != OP or v2 != '(':
13979+ raise PreprocError("expected '(' after %r" % v)
13980+
13981+ del lst[i]
13982+
13983+ one_param = []
13984+ count_paren = 0
13985+ while i < len(lst):
13986+ p2, v2 = lst[i]
13987+
13988+ del lst[i]
13989+ if p2 == OP and count_paren == 0:
13990+ if v2 == '(':
13991+ one_param.append((p2, v2))
13992+ count_paren += 1
13993+ elif v2 == ')':
13994+ if one_param: args.append(one_param)
13995+ break
13996+ elif v2 == ',':
13997+ if not one_param: raise PreprocError("empty param in funcall %s" % p)
13998+ args.append(one_param)
13999+ one_param = []
14000+ else:
14001+ one_param.append((p2, v2))
14002+ else:
14003+ one_param.append((p2, v2))
14004+ if v2 == '(': count_paren += 1
14005+ elif v2 == ')': count_paren -= 1
14006+ else:
14007+ raise PreprocError('malformed macro')
14008+
14009+ # substitute the arguments within the define expression
14010+ accu = []
14011+ arg_table = macro_def[0]
14012+ j = 0
14013+ while j < len(to_add):
14014+ (p2, v2) = to_add[j]
14015+
14016+ if p2 == OP and v2 == '#':
14017+ # stringize is for arguments only
14018+ if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
14019+ toks = args[arg_table[to_add[j+1][1]]]
14020+ accu.append((STR, stringize(toks)))
14021+ j += 1
14022+ else:
14023+ accu.append((p2, v2))
14024+ elif p2 == OP and v2 == '##':
14025+ # token pasting, how can man invent such a complicated system?
14026+ if accu and j+1 < len(to_add):
14027+ # we have at least two tokens
14028+
14029+ t1 = accu[-1]
14030+
14031+ if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
14032+ toks = args[arg_table[to_add[j+1][1]]]
14033+
14034+ if toks:
14035+ accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
14036+ accu.extend(toks[1:])
14037+ else:
14038+ # error, case "a##"
14039+ accu.append((p2, v2))
14040+ accu.extend(toks)
14041+ elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
14042+ # TODO not sure
14043+ # first collect the tokens
14044+ va_toks = []
14045+ st = len(macro_def[0])
14046+ pt = len(args)
14047+ for x in args[pt-st+1:]:
14048+ va_toks.extend(x)
14049+ va_toks.append((OP, ','))
14050+ if va_toks: va_toks.pop() # extra comma
14051+ if len(accu)>1:
14052+ (p3, v3) = accu[-1]
14053+ (p4, v4) = accu[-2]
14054+ if v3 == '##':
14055+ # remove the token paste
14056+ accu.pop()
14057+ if v4 == ',' and pt < st:
14058+ # remove the comma
14059+ accu.pop()
14060+ accu += va_toks
14061+ else:
14062+ accu[-1] = paste_tokens(t1, to_add[j+1])
14063+
14064+ j += 1
14065+ else:
14066+ # invalid paste, case "##a" or "b##"
14067+ accu.append((p2, v2))
14068+
14069+ elif p2 == IDENT and v2 in arg_table:
14070+ toks = args[arg_table[v2]]
14071+ reduce_tokens(toks, defs, ban+[v])
14072+ accu.extend(toks)
14073+ else:
14074+ accu.append((p2, v2))
14075+
14076+ j += 1
14077+
14078+
14079+ reduce_tokens(accu, defs, ban+[v])
14080+
14081+ for x in xrange(len(accu)-1, -1, -1):
14082+ lst.insert(i, accu[x])
14083+
14084+ i += 1
14085+
14086+
14087+def eval_macro(lst, adefs):
14088+ """reduce the tokens from the list lst, and try to return a 0/1 result"""
14089+ reduce_tokens(lst, adefs, [])
14090+ if not lst: raise PreprocError("missing tokens to evaluate")
14091+ (p, v) = reduce_eval(lst)
14092+ return int(v) != 0
14093+
14094+def extract_macro(txt):
14095+ """process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments"""
14096+ t = tokenize(txt)
14097+ if re_fun.search(txt):
14098+ p, name = t[0]
14099+
14100+ p, v = t[1]
14101+ if p != OP: raise PreprocError("expected open parenthesis")
14102+
14103+ i = 1
14104+ pindex = 0
14105+ params = {}
14106+ prev = '('
14107+
14108+ while 1:
14109+ i += 1
14110+ p, v = t[i]
14111+
14112+ if prev == '(':
14113+ if p == IDENT:
14114+ params[v] = pindex
14115+ pindex += 1
14116+ prev = p
14117+ elif p == OP and v == ')':
14118+ break
14119+ else:
14120+ raise PreprocError("unexpected token (3)")
14121+ elif prev == IDENT:
14122+ if p == OP and v == ',':
14123+ prev = v
14124+ elif p == OP and v == ')':
14125+ break
14126+ else:
14127+ raise PreprocError("comma or ... expected")
14128+ elif prev == ',':
14129+ if p == IDENT:
14130+ params[v] = pindex
14131+ pindex += 1
14132+ prev = p
14133+ elif p == OP and v == '...':
14134+ raise PreprocError("not implemented (1)")
14135+ else:
14136+ raise PreprocError("comma or ... expected (2)")
14137+ elif prev == '...':
14138+ raise PreprocError("not implemented (2)")
14139+ else:
14140+ raise PreprocError("unexpected else")
14141+
14142+ #~ print (name, [params, t[i+1:]])
14143+ return (name, [params, t[i+1:]])
14144+ else:
14145+ (p, v) = t[0]
14146+ return (v, [[], t[1:]])
14147+
14148+re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
14149+def extract_include(txt, defs):
14150+ """process a line in the form "#include foo" to return a string representing the file"""
14151+ m = re_include.search(txt)
14152+ if m:
14153+ if m.group('a'): return '<', m.group('a')
14154+ if m.group('b'): return '"', m.group('b')
14155+
14156+ # perform preprocessing and look at the result, it must match an include
14157+ toks = tokenize(txt)
14158+ reduce_tokens(toks, defs, ['waf_include'])
14159+
14160+ if not toks:
14161+ raise PreprocError("could not parse include %s" % txt)
14162+
14163+ if len(toks) == 1:
14164+ if toks[0][0] == STR:
14165+ return '"', toks[0][1]
14166+ else:
14167+ if toks[0][1] == '<' and toks[-1][1] == '>':
14168+ return stringize(toks).lstrip('<').rstrip('>')
14169+
14170+ raise PreprocError("could not parse include %s." % txt)
14171+
14172+def parse_char(txt):
14173+ if not txt: raise PreprocError("attempted to parse a null char")
14174+ if txt[0] != '\\':
14175+ return ord(txt)
14176+ c = txt[1]
14177+ if c == 'x':
14178+ if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
14179+ return int(txt[2:], 16)
14180+ elif c.isdigit():
14181+ if c == '0' and len(txt)==2: return 0
14182+ for i in 3, 2, 1:
14183+ if len(txt) > i and txt[1:1+i].isdigit():
14184+ return (1+i, int(txt[1:1+i], 8))
14185+ else:
14186+ try: return chr_esc[c]
14187+ except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
14188+
14189+@Utils.run_once
14190+def tokenize(s):
14191+ """convert a string into a list of tokens (shlex.split does not apply to c/c++/d)"""
14192+ ret = []
14193+ for match in re_clexer.finditer(s):
14194+ m = match.group
14195+ for name in tok_types:
14196+ v = m(name)
14197+ if v:
14198+ if name == IDENT:
14199+ try: v = g_optrans[v]; name = OP
14200+ except KeyError:
14201+ # c++ specific
14202+ if v.lower() == "true":
14203+ v = 1
14204+ name = NUM
14205+ elif v.lower() == "false":
14206+ v = 0
14207+ name = NUM
14208+ elif name == NUM:
14209+ if m('oct'): v = int(v, 8)
14210+ elif m('hex'): v = int(m('hex'), 16)
14211+ elif m('n0'): v = m('n0')
14212+ else:
14213+ v = m('char')
14214+ if v: v = parse_char(v)
14215+ else: v = m('n2') or m('n4')
14216+ elif name == OP:
14217+ if v == '%:': v = '#'
14218+ elif v == '%:%:': v = '##'
14219+ elif name == STR:
14220+ # remove the quotes around the string
14221+ v = v[1:-1]
14222+ ret.append((name, v))
14223+ break
14224+ return ret
14225+
14226+@Utils.run_once
14227+def define_name(line):
14228+ return re_mac.match(line).group(0)
14229+
14230+class c_parser(object):
14231+ def __init__(self, nodepaths=None, defines=None):
14232+ #self.lines = txt.split('\n')
14233+ self.lines = []
14234+
14235+ if defines is None:
14236+ self.defs = {}
14237+ else:
14238+ self.defs = dict(defines) # make a copy
14239+ self.state = []
14240+
14241+ self.env = None # needed for the variant when searching for files
14242+
14243+ self.count_files = 0
14244+ self.currentnode_stack = []
14245+
14246+ self.nodepaths = nodepaths or []
14247+
14248+ self.nodes = []
14249+ self.names = []
14250+
14251+ # file added
14252+ self.curfile = ''
14253+ self.ban_includes = set([])
14254+
14255+ def cached_find_resource(self, node, filename):
14256+ try:
14257+ nd = node.bld.cache_nd
14258+ except:
14259+ nd = node.bld.cache_nd = {}
14260+
14261+ tup = (node.id, filename)
14262+ try:
14263+ return nd[tup]
14264+ except KeyError:
14265+ ret = node.find_resource(filename)
14266+ nd[tup] = ret
14267+ return ret
14268+
14269+ def tryfind(self, filename):
14270+ self.curfile = filename
14271+
14272+ # for msvc it should be a for loop on the whole stack
14273+ found = self.cached_find_resource(self.currentnode_stack[-1], filename)
14274+
14275+ for n in self.nodepaths:
14276+ if found:
14277+ break
14278+ found = self.cached_find_resource(n, filename)
14279+
14280+ if found:
14281+ self.nodes.append(found)
14282+ if filename[-4:] != '.moc':
14283+ self.addlines(found)
14284+ else:
14285+ if not filename in self.names:
14286+ self.names.append(filename)
14287+ return found
14288+
14289+ def addlines(self, node):
14290+
14291+ self.currentnode_stack.append(node.parent)
14292+ filepath = node.abspath(self.env)
14293+
14294+ self.count_files += 1
14295+ if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded")
14296+ pc = self.parse_cache
14297+ debug('preproc: reading file %r', filepath)
14298+ try:
14299+ lns = pc[filepath]
14300+ except KeyError:
14301+ pass
14302+ else:
14303+ self.lines.extend(lns)
14304+ return
14305+
14306+ try:
14307+ lines = filter_comments(filepath)
14308+ lines.append((POPFILE, ''))
14309+ lines.reverse()
14310+ pc[filepath] = lines # cache the lines filtered
14311+ self.lines.extend(lines)
14312+ except IOError:
14313+ raise PreprocError("could not read the file %s" % filepath)
14314+ except Exception:
14315+ if Logs.verbose > 0:
14316+ error("parsing %s failed" % filepath)
14317+ traceback.print_exc()
14318+
14319+ def start(self, node, env):
14320+ debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
14321+
14322+ self.env = env
14323+ variant = node.variant(env)
14324+ bld = node.__class__.bld
14325+ try:
14326+ self.parse_cache = bld.parse_cache
14327+ except AttributeError:
14328+ bld.parse_cache = {}
14329+ self.parse_cache = bld.parse_cache
14330+
14331+ self.addlines(node)
14332+ if env['DEFLINES']:
14333+ lst = [('define', x) for x in env['DEFLINES']]
14334+ lst.reverse()
14335+ self.lines.extend(lst)
14336+
14337+ while self.lines:
14338+ (kind, line) = self.lines.pop()
14339+ if kind == POPFILE:
14340+ self.currentnode_stack.pop()
14341+ continue
14342+ try:
14343+ self.process_line(kind, line)
14344+ except Exception, e:
14345+ if Logs.verbose:
14346+ debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
14347+
14348+ def process_line(self, token, line):
14349+ """
14350+ WARNING: a new state must be added for if* because the endif
14351+ """
14352+ ve = Logs.verbose
14353+ if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
14354+ state = self.state
14355+
14356+ # make certain we define the state if we are about to enter in an if block
14357+ if token in ['ifdef', 'ifndef', 'if']:
14358+ state.append(undefined)
14359+ elif token == 'endif':
14360+ state.pop()
14361+
14362+ # skip lines when in a dead 'if' branch, wait for the endif
14363+ if not token in ['else', 'elif', 'endif']:
14364+ if skipped in self.state or ignored in self.state:
14365+ return
14366+
14367+ if token == 'if':
14368+ ret = eval_macro(tokenize(line), self.defs)
14369+ if ret: state[-1] = accepted
14370+ else: state[-1] = ignored
14371+ elif token == 'ifdef':
14372+ m = re_mac.match(line)
14373+ if m and m.group(0) in self.defs: state[-1] = accepted
14374+ else: state[-1] = ignored
14375+ elif token == 'ifndef':
14376+ m = re_mac.match(line)
14377+ if m and m.group(0) in self.defs: state[-1] = ignored
14378+ else: state[-1] = accepted
14379+ elif token == 'include' or token == 'import':
14380+ (kind, inc) = extract_include(line, self.defs)
14381+ if inc in self.ban_includes: return
14382+ if token == 'import': self.ban_includes.add(inc)
14383+ if ve: debug('preproc: include found %s (%s) ', inc, kind)
14384+ if kind == '"' or not strict_quotes:
14385+ self.tryfind(inc)
14386+ elif token == 'elif':
14387+ if state[-1] == accepted:
14388+ state[-1] = skipped
14389+ elif state[-1] == ignored:
14390+ if eval_macro(tokenize(line), self.defs):
14391+ state[-1] = accepted
14392+ elif token == 'else':
14393+ if state[-1] == accepted: state[-1] = skipped
14394+ elif state[-1] == ignored: state[-1] = accepted
14395+ elif token == 'define':
14396+ try:
14397+ self.defs[define_name(line)] = line
14398+ except:
14399+ raise PreprocError("invalid define line %s" % line)
14400+ elif token == 'undef':
14401+ m = re_mac.match(line)
14402+ if m and m.group(0) in self.defs:
14403+ self.defs.__delitem__(m.group(0))
14404+ #print "undef %s" % name
14405+ elif token == 'pragma':
14406+ if re_pragma_once.match(line.lower()):
14407+ self.ban_includes.add(self.curfile)
14408+
14409+def get_deps(node, env, nodepaths=[]):
14410+ """
14411+ Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind
14412+ #include some_macro()
14413+ """
14414+
14415+ gruik = c_parser(nodepaths)
14416+ gruik.start(node, env)
14417+ return (gruik.nodes, gruik.names)
14418+
14419+#################### dumb dependency scanner
14420+
14421+re_inc = re.compile(\
14422+ '^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',
14423+ re.IGNORECASE | re.MULTILINE)
14424+
14425+def lines_includes(filename):
14426+ code = Utils.readf(filename)
14427+ if use_trigraphs:
14428+ for (a, b) in trig_def: code = code.split(a).join(b)
14429+ code = re_nl.sub('', code)
14430+ code = re_cpp.sub(repl, code)
14431+ return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
14432+
14433+def get_deps_simple(node, env, nodepaths=[], defines={}):
14434+ """
14435+ Get the dependencies by just looking recursively at the #include statements
14436+ """
14437+
14438+ nodes = []
14439+ names = []
14440+
14441+ def find_deps(node):
14442+ lst = lines_includes(node.abspath(env))
14443+
14444+ for (_, line) in lst:
14445+ (t, filename) = extract_include(line, defines)
14446+ if filename in names:
14447+ continue
14448+
14449+ if filename.endswith('.moc'):
14450+ names.append(filename)
14451+
14452+ found = None
14453+ for n in nodepaths:
14454+ if found:
14455+ break
14456+ found = n.find_resource(filename)
14457+
14458+ if not found:
14459+ if not filename in names:
14460+ names.append(filename)
14461+ elif not found in nodes:
14462+ nodes.append(found)
14463+ find_deps(node)
14464+
14465+ find_deps(node)
14466+ return (nodes, names)
14467+
14468+
14469diff --git a/buildtools/wafadmin/Tools/python.py b/buildtools/wafadmin/Tools/python.py
14470new file mode 100644
14471index 0000000..4f73081
14472--- /dev/null
14473+++ b/buildtools/wafadmin/Tools/python.py
14474@@ -0,0 +1,413 @@
14475+#!/usr/bin/env python
14476+# encoding: utf-8
14477+# Thomas Nagy, 2007 (ita)
14478+# Gustavo Carneiro (gjc), 2007
14479+
14480+"Python support"
14481+
14482+import os, sys
14483+import TaskGen, Utils, Utils, Runner, Options, Build
14484+from Logs import debug, warn, info
14485+from TaskGen import extension, taskgen, before, after, feature
14486+from Configure import conf
14487+
14488+EXT_PY = ['.py']
14489+FRAG_2 = '''
14490+#include "Python.h"
14491+#ifdef __cplusplus
14492+extern "C" {
14493+#endif
14494+ void Py_Initialize(void);
14495+ void Py_Finalize(void);
14496+#ifdef __cplusplus
14497+}
14498+#endif
14499+int main()
14500+{
14501+ Py_Initialize();
14502+ Py_Finalize();
14503+ return 0;
14504+}
14505+'''
14506+
14507+@feature('pyext')
14508+@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars', 'apply_bundle')
14509+@after('vars_target_cshlib')
14510+def init_pyext(self):
14511+ self.default_install_path = '${PYTHONARCHDIR}'
14512+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
14513+ if not 'PYEXT' in self.uselib:
14514+ self.uselib.append('PYEXT')
14515+ self.env['MACBUNDLE'] = True
14516+
14517+@before('apply_link', 'apply_lib_vars', 'apply_type_vars')
14518+@after('apply_bundle')
14519+@feature('pyext')
14520+def pyext_shlib_ext(self):
14521+ # override shlib_PATTERN set by the osx module
14522+ self.env['shlib_PATTERN'] = self.env['pyext_PATTERN']
14523+
14524+@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
14525+@feature('pyembed')
14526+def init_pyembed(self):
14527+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
14528+ if not 'PYEMBED' in self.uselib:
14529+ self.uselib.append('PYEMBED')
14530+
14531+@extension(EXT_PY)
14532+def process_py(self, node):
14533+ if not (self.bld.is_install and self.install_path):
14534+ return
14535+ def inst_py(ctx):
14536+ install_pyfile(self, node)
14537+ self.bld.add_post_fun(inst_py)
14538+
14539+def install_pyfile(self, node):
14540+ path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env)
14541+
14542+ self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False)
14543+ if self.bld.is_install < 0:
14544+ info("* removing byte compiled python files")
14545+ for x in 'co':
14546+ try:
14547+ os.remove(path + x)
14548+ except OSError:
14549+ pass
14550+
14551+ if self.bld.is_install > 0:
14552+ if self.env['PYC'] or self.env['PYO']:
14553+ info("* byte compiling %r" % path)
14554+
14555+ if self.env['PYC']:
14556+ program = ("""
14557+import sys, py_compile
14558+for pyfile in sys.argv[1:]:
14559+ py_compile.compile(pyfile, pyfile + 'c')
14560+""")
14561+ argv = [self.env['PYTHON'], '-c', program, path]
14562+ ret = Utils.pproc.Popen(argv).wait()
14563+ if ret:
14564+ raise Utils.WafError('bytecode compilation failed %r' % path)
14565+
14566+ if self.env['PYO']:
14567+ program = ("""
14568+import sys, py_compile
14569+for pyfile in sys.argv[1:]:
14570+ py_compile.compile(pyfile, pyfile + 'o')
14571+""")
14572+ argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path]
14573+ ret = Utils.pproc.Popen(argv).wait()
14574+ if ret:
14575+ raise Utils.WafError('bytecode compilation failed %r' % path)
14576+
14577+# COMPAT
14578+class py_taskgen(TaskGen.task_gen):
14579+ def __init__(self, *k, **kw):
14580+ TaskGen.task_gen.__init__(self, *k, **kw)
14581+
14582+@before('apply_core')
14583+@after('vars_target_cprogram', 'vars_target_cshlib')
14584+@feature('py')
14585+def init_py(self):
14586+ self.default_install_path = '${PYTHONDIR}'
14587+
14588+def _get_python_variables(python_exe, variables, imports=['import sys']):
14589+ """Run a python interpreter and print some variables"""
14590+ program = list(imports)
14591+ program.append('')
14592+ for v in variables:
14593+ program.append("print(repr(%s))" % v)
14594+ os_env = dict(os.environ)
14595+ try:
14596+ del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
14597+ except KeyError:
14598+ pass
14599+ proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env)
14600+ output = proc.communicate()[0].split("\n") # do not touch, python3
14601+ if proc.returncode:
14602+ if Options.options.verbose:
14603+ warn("Python program to extract python configuration variables failed:\n%s"
14604+ % '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)]))
14605+ raise RuntimeError
14606+ return_values = []
14607+ for s in output:
14608+ s = s.strip()
14609+ if not s:
14610+ continue
14611+ if s == 'None':
14612+ return_values.append(None)
14613+ elif s[0] == "'" and s[-1] == "'":
14614+ return_values.append(s[1:-1])
14615+ elif s[0].isdigit():
14616+ return_values.append(int(s))
14617+ else: break
14618+ return return_values
14619+
14620+@conf
14621+def check_python_headers(conf, mandatory=True):
14622+ """Check for headers and libraries necessary to extend or embed python.
14623+
14624+ On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib
14625+
14626+ PYEXT: for compiling python extensions
14627+ PYEMBED: for embedding a python interpreter"""
14628+
14629+ if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
14630+ conf.fatal('load a compiler first (gcc, g++, ..)')
14631+
14632+ if not conf.env['PYTHON_VERSION']:
14633+ conf.check_python_version()
14634+
14635+ env = conf.env
14636+ python = env['PYTHON']
14637+ if not python:
14638+ conf.fatal('could not find the python executable')
14639+
14640+ ## On Mac OSX we need to use mac bundles for python plugins
14641+ if Options.platform == 'darwin':
14642+ conf.check_tool('osx')
14643+
14644+ try:
14645+ # Get some python configuration variables using distutils
14646+ v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
14647+ (python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
14648+ python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED,
14649+ python_MACOSX_DEPLOYMENT_TARGET) = \
14650+ _get_python_variables(python, ["get_config_var('%s')" % x for x in v],
14651+ ['from distutils.sysconfig import get_config_var'])
14652+ except RuntimeError:
14653+ conf.fatal("Python development headers not found (-v for details).")
14654+
14655+ conf.log.write("""Configuration returned from %r:
14656+python_prefix = %r
14657+python_SO = %r
14658+python_SYSLIBS = %r
14659+python_LDFLAGS = %r
14660+python_SHLIBS = %r
14661+python_LIBDIR = %r
14662+python_LIBPL = %r
14663+INCLUDEPY = %r
14664+Py_ENABLE_SHARED = %r
14665+MACOSX_DEPLOYMENT_TARGET = %r
14666+""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
14667+ python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET))
14668+
14669+ if python_MACOSX_DEPLOYMENT_TARGET:
14670+ conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
14671+ conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
14672+
14673+ env['pyext_PATTERN'] = '%s'+python_SO
14674+
14675+ # Check for python libraries for embedding
14676+ if python_SYSLIBS is not None:
14677+ for lib in python_SYSLIBS.split():
14678+ if lib.startswith('-l'):
14679+ lib = lib[2:] # strip '-l'
14680+ env.append_value('LIB_PYEMBED', lib)
14681+
14682+ if python_SHLIBS is not None:
14683+ for lib in python_SHLIBS.split():
14684+ if lib.startswith('-l'):
14685+ env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l'
14686+ else:
14687+ env.append_value('LINKFLAGS_PYEMBED', lib)
14688+
14689+ if Options.platform != 'darwin' and python_LDFLAGS:
14690+ env.append_value('LINKFLAGS_PYEMBED', python_LDFLAGS.split())
14691+
14692+ result = False
14693+ name = 'python' + env['PYTHON_VERSION']
14694+
14695+ if python_LIBDIR is not None:
14696+ path = [python_LIBDIR]
14697+ conf.log.write("\n\n# Trying LIBDIR: %r\n" % path)
14698+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14699+
14700+ if not result and python_LIBPL is not None:
14701+ conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
14702+ path = [python_LIBPL]
14703+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14704+
14705+ if not result:
14706+ conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
14707+ path = [os.path.join(python_prefix, "libs")]
14708+ name = 'python' + env['PYTHON_VERSION'].replace('.', '')
14709+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14710+
14711+ if result:
14712+ env['LIBPATH_PYEMBED'] = path
14713+ env.append_value('LIB_PYEMBED', name)
14714+ else:
14715+ conf.log.write("\n\n### LIB NOT FOUND\n")
14716+
14717+ # under certain conditions, python extensions must link to
14718+ # python libraries, not just python embedding programs.
14719+ if (sys.platform == 'win32' or sys.platform.startswith('os2')
14720+ or sys.platform == 'darwin' or Py_ENABLE_SHARED):
14721+ env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
14722+ env['LIB_PYEXT'] = env['LIB_PYEMBED']
14723+
14724+ # We check that pythonX.Y-config exists, and if it exists we
14725+ # use it to get only the includes, else fall back to distutils.
14726+ python_config = conf.find_program(
14727+ 'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
14728+ var='PYTHON_CONFIG')
14729+ if not python_config:
14730+ python_config = conf.find_program(
14731+ 'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
14732+ var='PYTHON_CONFIG')
14733+
14734+ includes = []
14735+ if python_config:
14736+ for incstr in Utils.cmd_output("%s %s --includes" % (python, python_config)).strip().split():
14737+ # strip the -I or /I
14738+ if (incstr.startswith('-I')
14739+ or incstr.startswith('/I')):
14740+ incstr = incstr[2:]
14741+ # append include path, unless already given
14742+ if incstr not in includes:
14743+ includes.append(incstr)
14744+ conf.log.write("Include path for Python extensions "
14745+ "(found via python-config --includes): %r\n" % (includes,))
14746+ env['CPPPATH_PYEXT'] = includes
14747+ env['CPPPATH_PYEMBED'] = includes
14748+ else:
14749+ conf.log.write("Include path for Python extensions "
14750+ "(found via distutils module): %r\n" % (INCLUDEPY,))
14751+ env['CPPPATH_PYEXT'] = [INCLUDEPY]
14752+ env['CPPPATH_PYEMBED'] = [INCLUDEPY]
14753+
14754+ # Code using the Python API needs to be compiled with -fno-strict-aliasing
14755+ if env['CC_NAME'] == 'gcc':
14756+ env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing')
14757+ env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing')
14758+ if env['CXX_NAME'] == 'gcc':
14759+ env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing')
14760+ env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
14761+
14762+ # See if it compiles
14763+ conf.check(define_name='HAVE_PYTHON_H',
14764+ uselib='PYEMBED', fragment=FRAG_2,
14765+ errmsg='Could not find the python development headers', mandatory=mandatory)
14766+
14767+@conf
14768+def check_python_version(conf, minver=None):
14769+ """
14770+ Check if the python interpreter is found matching a given minimum version.
14771+ minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
14772+
14773+ If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
14774+ (eg. '2.4') of the actual python version found, and PYTHONDIR is
14775+ defined, pointing to the site-packages directory appropriate for
14776+ this python version, where modules/packages/extensions should be
14777+ installed.
14778+ """
14779+ assert minver is None or isinstance(minver, tuple)
14780+ python = conf.env['PYTHON']
14781+ if not python:
14782+ conf.fatal('could not find the python executable')
14783+
14784+ # Get python version string
14785+ cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"]
14786+ debug('python: Running python command %r' % cmd)
14787+ proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE)
14788+ lines = proc.communicate()[0].split()
14789+ assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
14790+ pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
14791+
14792+ # compare python version with the minimum required
14793+ result = (minver is None) or (pyver_tuple >= minver)
14794+
14795+ if result:
14796+ # define useful environment variables
14797+ pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
14798+ conf.env['PYTHON_VERSION'] = pyver
14799+
14800+ if 'PYTHONDIR' in conf.environ:
14801+ pydir = conf.environ['PYTHONDIR']
14802+ else:
14803+ if sys.platform == 'win32':
14804+ (python_LIBDEST, pydir) = \
14805+ _get_python_variables(python,
14806+ ["get_config_var('LIBDEST')",
14807+ "get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14808+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14809+ else:
14810+ python_LIBDEST = None
14811+ (pydir,) = \
14812+ _get_python_variables(python,
14813+ ["get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14814+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14815+ if python_LIBDEST is None:
14816+ if conf.env['LIBDIR']:
14817+ python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
14818+ else:
14819+ python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
14820+
14821+ if 'PYTHONARCHDIR' in conf.environ:
14822+ pyarchdir = conf.environ['PYTHONARCHDIR']
14823+ else:
14824+ (pyarchdir,) = _get_python_variables(python,
14825+ ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14826+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14827+ if not pyarchdir:
14828+ pyarchdir = pydir
14829+
14830+ if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
14831+ conf.define('PYTHONDIR', pydir)
14832+ conf.define('PYTHONARCHDIR', pyarchdir)
14833+
14834+ conf.env['PYTHONDIR'] = pydir
14835+
14836+ # Feedback
14837+ pyver_full = '.'.join(map(str, pyver_tuple[:3]))
14838+ if minver is None:
14839+ conf.check_message_custom('Python version', '', pyver_full)
14840+ else:
14841+ minver_str = '.'.join(map(str, minver))
14842+ conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full)
14843+
14844+ if not result:
14845+ conf.fatal('The python version is too old (%r)' % pyver_full)
14846+
14847+@conf
14848+def check_python_module(conf, module_name):
14849+ """
14850+ Check if the selected python interpreter can import the given python module.
14851+ """
14852+ result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name],
14853+ stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait()
14854+ conf.check_message('Python module', module_name, result)
14855+ if not result:
14856+ conf.fatal('Could not find the python module %r' % module_name)
14857+
14858+def detect(conf):
14859+
14860+ if not conf.env.PYTHON:
14861+ conf.env.PYTHON = sys.executable
14862+
14863+ python = conf.find_program('python', var='PYTHON')
14864+ if not python:
14865+ conf.fatal('Could not find the path of the python executable')
14866+
14867+ v = conf.env
14868+
14869+ v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
14870+ v['PYFLAGS'] = ''
14871+ v['PYFLAGS_OPT'] = '-O'
14872+
14873+ v['PYC'] = getattr(Options.options, 'pyc', 1)
14874+ v['PYO'] = getattr(Options.options, 'pyo', 1)
14875+
14876+def set_options(opt):
14877+ opt.add_option('--nopyc',
14878+ action='store_false',
14879+ default=1,
14880+ help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
14881+ dest = 'pyc')
14882+ opt.add_option('--nopyo',
14883+ action='store_false',
14884+ default=1,
14885+ help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
14886+ dest='pyo')
14887+
14888diff --git a/buildtools/wafadmin/Tools/qt4.py b/buildtools/wafadmin/Tools/qt4.py
14889new file mode 100644
14890index 0000000..84d121a
14891--- /dev/null
14892+++ b/buildtools/wafadmin/Tools/qt4.py
14893@@ -0,0 +1,505 @@
14894+#!/usr/bin/env python
14895+# encoding: utf-8
14896+# Thomas Nagy, 2006 (ita)
14897+
14898+"""
14899+Qt4 support
14900+
14901+If QT4_ROOT is given (absolute path), the configuration will look in it first
14902+
14903+This module also demonstrates how to add tasks dynamically (when the build has started)
14904+"""
14905+
14906+try:
14907+ from xml.sax import make_parser
14908+ from xml.sax.handler import ContentHandler
14909+except ImportError:
14910+ has_xml = False
14911+ ContentHandler = object
14912+else:
14913+ has_xml = True
14914+
14915+import os, sys
14916+import ccroot, cxx
14917+import TaskGen, Task, Utils, Runner, Options, Node, Configure
14918+from TaskGen import taskgen, feature, after, extension
14919+from Logs import error
14920+from Constants import *
14921+
14922+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
14923+EXT_RCC = ['.qrc']
14924+EXT_UI = ['.ui']
14925+EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
14926+
14927+class qxx_task(Task.Task):
14928+ "A cpp task that may create a moc task dynamically"
14929+
14930+ before = ['cxx_link', 'static_link']
14931+
14932+ def __init__(self, *k, **kw):
14933+ Task.Task.__init__(self, *k, **kw)
14934+ self.moc_done = 0
14935+
14936+ def scan(self):
14937+ (nodes, names) = ccroot.scan(self)
14938+ # for some reasons (variants) the moc node may end in the list of node deps
14939+ for x in nodes:
14940+ if x.name.endswith('.moc'):
14941+ nodes.remove(x)
14942+ names.append(x.relpath_gen(self.inputs[0].parent))
14943+ return (nodes, names)
14944+
14945+ def runnable_status(self):
14946+ if self.moc_done:
14947+ # if there is a moc task, delay the computation of the file signature
14948+ for t in self.run_after:
14949+ if not t.hasrun:
14950+ return ASK_LATER
14951+ # the moc file enters in the dependency calculation
14952+ # so we need to recompute the signature when the moc file is present
14953+ self.signature()
14954+ return Task.Task.runnable_status(self)
14955+ else:
14956+ # yes, really, there are people who generate cxx files
14957+ for t in self.run_after:
14958+ if not t.hasrun:
14959+ return ASK_LATER
14960+ self.add_moc_tasks()
14961+ return ASK_LATER
14962+
14963+ def add_moc_tasks(self):
14964+
14965+ node = self.inputs[0]
14966+ tree = node.__class__.bld
14967+
14968+ try:
14969+ # compute the signature once to know if there is a moc file to create
14970+ self.signature()
14971+ except KeyError:
14972+ # the moc file may be referenced somewhere else
14973+ pass
14974+ else:
14975+ # remove the signature, it must be recomputed with the moc task
14976+ delattr(self, 'cache_sig')
14977+
14978+ moctasks=[]
14979+ mocfiles=[]
14980+ variant = node.variant(self.env)
14981+ try:
14982+ tmp_lst = tree.raw_deps[self.unique_id()]
14983+ tree.raw_deps[self.unique_id()] = []
14984+ except KeyError:
14985+ tmp_lst = []
14986+ for d in tmp_lst:
14987+ if not d.endswith('.moc'): continue
14988+ # paranoid check
14989+ if d in mocfiles:
14990+ error("paranoia owns")
14991+ continue
14992+
14993+ # process that base.moc only once
14994+ mocfiles.append(d)
14995+
14996+ # find the extension (performed only when the .cpp has changes)
14997+ base2 = d[:-4]
14998+ for path in [node.parent] + self.generator.env['INC_PATHS']:
14999+ tree.rescan(path)
15000+ vals = getattr(Options.options, 'qt_header_ext', '') or MOC_H
15001+ for ex in vals:
15002+ h_node = path.find_resource(base2 + ex)
15003+ if h_node:
15004+ break
15005+ else:
15006+ continue
15007+ break
15008+ else:
15009+ raise Utils.WafError("no header found for %s which is a moc file" % str(d))
15010+
15011+ m_node = h_node.change_ext('.moc')
15012+ tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node
15013+
15014+ # create the task
15015+ task = Task.TaskBase.classes['moc'](self.env, normal=0)
15016+ task.set_inputs(h_node)
15017+ task.set_outputs(m_node)
15018+
15019+ generator = tree.generator
15020+ generator.outstanding.insert(0, task)
15021+ generator.total += 1
15022+
15023+ moctasks.append(task)
15024+
15025+ # remove raw deps except the moc files to save space (optimization)
15026+ tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles
15027+
15028+ # look at the file inputs, it is set right above
15029+ lst = tree.node_deps.get(self.unique_id(), ())
15030+ for d in lst:
15031+ name = d.name
15032+ if name.endswith('.moc'):
15033+ task = Task.TaskBase.classes['moc'](self.env, normal=0)
15034+ task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple
15035+ task.set_outputs(d)
15036+
15037+ generator = tree.generator
15038+ generator.outstanding.insert(0, task)
15039+ generator.total += 1
15040+
15041+ moctasks.append(task)
15042+
15043+ # simple scheduler dependency: run the moc task before others
15044+ self.run_after = moctasks
15045+ self.moc_done = 1
15046+
15047+ run = Task.TaskBase.classes['cxx'].__dict__['run']
15048+
15049+def translation_update(task):
15050+ outs = [a.abspath(task.env) for a in task.outputs]
15051+ outs = " ".join(outs)
15052+ lupdate = task.env['QT_LUPDATE']
15053+
15054+ for x in task.inputs:
15055+ file = x.abspath(task.env)
15056+ cmd = "%s %s -ts %s" % (lupdate, file, outs)
15057+ Utils.pprint('BLUE', cmd)
15058+ task.generator.bld.exec_command(cmd)
15059+
15060+class XMLHandler(ContentHandler):
15061+ def __init__(self):
15062+ self.buf = []
15063+ self.files = []
15064+ def startElement(self, name, attrs):
15065+ if name == 'file':
15066+ self.buf = []
15067+ def endElement(self, name):
15068+ if name == 'file':
15069+ self.files.append(''.join(self.buf))
15070+ def characters(self, cars):
15071+ self.buf.append(cars)
15072+
15073+def scan(self):
15074+ "add the dependency on the files referenced in the qrc"
15075+ node = self.inputs[0]
15076+ parser = make_parser()
15077+ curHandler = XMLHandler()
15078+ parser.setContentHandler(curHandler)
15079+ fi = open(self.inputs[0].abspath(self.env))
15080+ parser.parse(fi)
15081+ fi.close()
15082+
15083+ nodes = []
15084+ names = []
15085+ root = self.inputs[0].parent
15086+ for x in curHandler.files:
15087+ nd = root.find_resource(x)
15088+ if nd: nodes.append(nd)
15089+ else: names.append(x)
15090+
15091+ return (nodes, names)
15092+
15093+@extension(EXT_RCC)
15094+def create_rcc_task(self, node):
15095+ "hook for rcc files"
15096+ rcnode = node.change_ext('_rc.cpp')
15097+ rcctask = self.create_task('rcc', node, rcnode)
15098+ cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
15099+ self.compiled_tasks.append(cpptask)
15100+ return cpptask
15101+
15102+@extension(EXT_UI)
15103+def create_uic_task(self, node):
15104+ "hook for uic tasks"
15105+ uictask = self.create_task('ui4', node)
15106+ uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
15107+ return uictask
15108+
15109+class qt4_taskgen(cxx.cxx_taskgen):
15110+ def __init__(self, *k, **kw):
15111+ cxx.cxx_taskgen.__init__(self, *k, **kw)
15112+ self.features.append('qt4')
15113+
15114+@extension('.ts')
15115+def add_lang(self, node):
15116+ """add all the .ts file into self.lang"""
15117+ self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
15118+
15119+@feature('qt4')
15120+@after('apply_link')
15121+def apply_qt4(self):
15122+ if getattr(self, 'lang', None):
15123+ update = getattr(self, 'update', None)
15124+ lst=[]
15125+ trans=[]
15126+ for l in self.to_list(self.lang):
15127+
15128+ if not isinstance(l, Node.Node):
15129+ l = self.path.find_resource(l+'.ts')
15130+
15131+ t = self.create_task('ts2qm', l, l.change_ext('.qm'))
15132+ lst.append(t.outputs[0])
15133+
15134+ if update:
15135+ trans.append(t.inputs[0])
15136+
15137+ trans_qt4 = getattr(Options.options, 'trans_qt4', False)
15138+ if update and trans_qt4:
15139+ # we need the cpp files given, except the rcc task we create after
15140+ # FIXME may be broken
15141+ u = Task.TaskCmd(translation_update, self.env, 2)
15142+ u.inputs = [a.inputs[0] for a in self.compiled_tasks]
15143+ u.outputs = trans
15144+
15145+ if getattr(self, 'langname', None):
15146+ t = Task.TaskBase.classes['qm2rcc'](self.env)
15147+ t.set_inputs(lst)
15148+ t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
15149+ t.path = self.path
15150+ k = create_rcc_task(self, t.outputs[0])
15151+ self.link_task.inputs.append(k.outputs[0])
15152+
15153+ self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS)
15154+ self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS)
15155+
15156+@extension(EXT_QT4)
15157+def cxx_hook(self, node):
15158+ # create the compilation task: cpp or cc
15159+ try: obj_ext = self.obj_ext
15160+ except AttributeError: obj_ext = '_%d.o' % self.idx
15161+
15162+ task = self.create_task('qxx', node, node.change_ext(obj_ext))
15163+ self.compiled_tasks.append(task)
15164+ return task
15165+
15166+def process_qm2rcc(task):
15167+ outfile = task.outputs[0].abspath(task.env)
15168+ f = open(outfile, 'w')
15169+ f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
15170+ for k in task.inputs:
15171+ f.write(' <file>')
15172+ #f.write(k.name)
15173+ f.write(k.path_to_parent(task.path))
15174+ f.write('</file>\n')
15175+ f.write('</qresource>\n</RCC>')
15176+ f.close()
15177+
15178+b = Task.simple_task_type
15179+b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False)
15180+cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False)
15181+cls.scan = scan
15182+b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False)
15183+b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False)
15184+
15185+Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm')
15186+
15187+def detect_qt4(conf):
15188+ env = conf.env
15189+ opt = Options.options
15190+
15191+ qtdir = getattr(opt, 'qtdir', '')
15192+ qtbin = getattr(opt, 'qtbin', '')
15193+ qtlibs = getattr(opt, 'qtlibs', '')
15194+ useframework = getattr(opt, 'use_qt4_osxframework', True)
15195+
15196+ paths = []
15197+
15198+ # the path to qmake has been given explicitely
15199+ if qtbin:
15200+ paths = [qtbin]
15201+
15202+ # the qt directory has been given - we deduce the qt binary path
15203+ if not qtdir:
15204+ qtdir = conf.environ.get('QT4_ROOT', '')
15205+ qtbin = os.path.join(qtdir, 'bin')
15206+ paths = [qtbin]
15207+
15208+ # no qtdir, look in the path and in /usr/local/Trolltech
15209+ if not qtdir:
15210+ paths = os.environ.get('PATH', '').split(os.pathsep)
15211+ paths.append('/usr/share/qt4/bin/')
15212+ try:
15213+ lst = os.listdir('/usr/local/Trolltech/')
15214+ except OSError:
15215+ pass
15216+ else:
15217+ if lst:
15218+ lst.sort()
15219+ lst.reverse()
15220+
15221+ # keep the highest version
15222+ qtdir = '/usr/local/Trolltech/%s/' % lst[0]
15223+ qtbin = os.path.join(qtdir, 'bin')
15224+ paths.append(qtbin)
15225+
15226+ # at the end, try to find qmake in the paths given
15227+ # keep the one with the highest version
15228+ cand = None
15229+ prev_ver = ['4', '0', '0']
15230+ for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
15231+ qmake = conf.find_program(qmk, path_list=paths)
15232+ if qmake:
15233+ try:
15234+ version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip()
15235+ except ValueError:
15236+ pass
15237+ else:
15238+ if version:
15239+ new_ver = version.split('.')
15240+ if new_ver > prev_ver:
15241+ cand = qmake
15242+ prev_ver = new_ver
15243+ if cand:
15244+ qmake = cand
15245+ else:
15246+ conf.fatal('could not find qmake for qt4')
15247+
15248+ conf.env.QMAKE = qmake
15249+ qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip()
15250+ qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
15251+ qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
15252+
15253+ if not qtlibs:
15254+ try:
15255+ qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep
15256+ except ValueError:
15257+ qtlibs = os.path.join(qtdir, 'lib')
15258+
15259+ def find_bin(lst, var):
15260+ for f in lst:
15261+ ret = conf.find_program(f, path_list=paths)
15262+ if ret:
15263+ env[var]=ret
15264+ break
15265+
15266+ vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
15267+
15268+ find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
15269+ find_bin(['uic-qt4', 'uic'], 'QT_UIC')
15270+ if not env['QT_UIC']:
15271+ conf.fatal('cannot find the uic compiler for qt4')
15272+
15273+ try:
15274+ version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip()
15275+ except ValueError:
15276+ conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
15277+
15278+ version = version.replace('Qt User Interface Compiler ','')
15279+ version = version.replace('User Interface Compiler for Qt', '')
15280+ if version.find(" 3.") != -1:
15281+ conf.check_message('uic version', '(too old)', 0, option='(%s)'%version)
15282+ sys.exit(1)
15283+ conf.check_message('uic version', '', 1, option='(%s)'%version)
15284+
15285+ find_bin(['moc-qt4', 'moc'], 'QT_MOC')
15286+ find_bin(['rcc'], 'QT_RCC')
15287+ find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
15288+ find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
15289+
15290+ env['UIC3_ST']= '%s -o %s'
15291+ env['UIC_ST'] = '%s -o %s'
15292+ env['MOC_ST'] = '-o'
15293+ env['ui_PATTERN'] = 'ui_%s.h'
15294+ env['QT_LRELEASE_FLAGS'] = ['-silent']
15295+
15296+ vars_debug = [a+'_debug' for a in vars]
15297+
15298+ try:
15299+ conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True)
15300+
15301+ except Configure.ConfigurationError:
15302+
15303+ for lib in vars_debug+vars:
15304+ uselib = lib.upper()
15305+
15306+ d = (lib.find('_debug') > 0) and 'd' or ''
15307+
15308+ # original author seems to prefer static to shared libraries
15309+ for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')):
15310+
15311+ conf.check_message_1('Checking for %s %s' % (lib, kind))
15312+
15313+ for ext in ['', '4']:
15314+ path = os.path.join(qtlibs, pat % (lib + d + ext))
15315+ if os.path.exists(path):
15316+ env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
15317+ conf.check_message_2('ok ' + path, 'GREEN')
15318+ break
15319+ path = os.path.join(qtbin, pat % (lib + d + ext))
15320+ if os.path.exists(path):
15321+ env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
15322+ conf.check_message_2('ok ' + path, 'GREEN')
15323+ break
15324+ else:
15325+ conf.check_message_2('not found', 'YELLOW')
15326+ continue
15327+ break
15328+
15329+ env.append_unique('LIBPATH_' + uselib, qtlibs)
15330+ env.append_unique('CPPPATH_' + uselib, qtincludes)
15331+ env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib)
15332+ else:
15333+ for i in vars_debug+vars:
15334+ try:
15335+ conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig)
15336+ except ValueError:
15337+ pass
15338+
15339+ # the libpaths are set nicely, unfortunately they make really long command-lines
15340+ # remove the qtcore ones from qtgui, etc
15341+ def process_lib(vars_, coreval):
15342+ for d in vars_:
15343+ var = d.upper()
15344+ if var == 'QTCORE': continue
15345+
15346+ value = env['LIBPATH_'+var]
15347+ if value:
15348+ core = env[coreval]
15349+ accu = []
15350+ for lib in value:
15351+ if lib in core: continue
15352+ accu.append(lib)
15353+ env['LIBPATH_'+var] = accu
15354+
15355+ process_lib(vars, 'LIBPATH_QTCORE')
15356+ process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG')
15357+
15358+ # rpath if wanted
15359+ want_rpath = getattr(Options.options, 'want_rpath', 1)
15360+ if want_rpath:
15361+ def process_rpath(vars_, coreval):
15362+ for d in vars_:
15363+ var = d.upper()
15364+ value = env['LIBPATH_'+var]
15365+ if value:
15366+ core = env[coreval]
15367+ accu = []
15368+ for lib in value:
15369+ if var != 'QTCORE':
15370+ if lib in core:
15371+ continue
15372+ accu.append('-Wl,--rpath='+lib)
15373+ env['RPATH_'+var] = accu
15374+ process_rpath(vars, 'LIBPATH_QTCORE')
15375+ process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG')
15376+
15377+ env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale'
15378+
15379+def detect(conf):
15380+ detect_qt4(conf)
15381+
15382+def set_options(opt):
15383+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
15384+
15385+ opt.add_option('--header-ext',
15386+ type='string',
15387+ default='',
15388+ help='header extension for moc files',
15389+ dest='qt_header_ext')
15390+
15391+ for i in 'qtdir qtbin qtlibs'.split():
15392+ opt.add_option('--'+i, type='string', default='', dest=i)
15393+
15394+ if sys.platform == "darwin":
15395+ opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True)
15396+
15397+ opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
15398+
15399diff --git a/buildtools/wafadmin/Tools/ruby.py b/buildtools/wafadmin/Tools/ruby.py
15400new file mode 100644
15401index 0000000..d3b7569
15402--- /dev/null
15403+++ b/buildtools/wafadmin/Tools/ruby.py
15404@@ -0,0 +1,120 @@
15405+#!/usr/bin/env python
15406+# encoding: utf-8
15407+# daniel.svensson at purplescout.se 2008
15408+
15409+import os
15410+import Task, Options, Utils
15411+from TaskGen import before, feature, after
15412+from Configure import conf
15413+
15414+@feature('rubyext')
15415+@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle')
15416+@after('default_cc', 'vars_target_cshlib')
15417+def init_rubyext(self):
15418+ self.default_install_path = '${ARCHDIR_RUBY}'
15419+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
15420+ if not 'RUBY' in self.uselib:
15421+ self.uselib.append('RUBY')
15422+ if not 'RUBYEXT' in self.uselib:
15423+ self.uselib.append('RUBYEXT')
15424+
15425+@feature('rubyext')
15426+@before('apply_link')
15427+def apply_ruby_so_name(self):
15428+ self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN']
15429+
15430+@conf
15431+def check_ruby_version(conf, minver=()):
15432+ """
15433+ Checks if ruby is installed.
15434+ If installed the variable RUBY will be set in environment.
15435+ Ruby binary can be overridden by --with-ruby-binary config variable
15436+ """
15437+
15438+ if Options.options.rubybinary:
15439+ conf.env.RUBY = Options.options.rubybinary
15440+ else:
15441+ conf.find_program("ruby", var="RUBY", mandatory=True)
15442+
15443+ ruby = conf.env.RUBY
15444+
15445+ try:
15446+ version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
15447+ except:
15448+ conf.fatal('could not determine ruby version')
15449+ conf.env.RUBY_VERSION = version
15450+
15451+ try:
15452+ ver = tuple(map(int, version.split(".")))
15453+ except:
15454+ conf.fatal('unsupported ruby version %r' % version)
15455+
15456+ cver = ''
15457+ if minver:
15458+ if ver < minver:
15459+ conf.fatal('ruby is too old')
15460+ cver = ".".join([str(x) for x in minver])
15461+
15462+ conf.check_message('ruby', cver, True, version)
15463+
15464+@conf
15465+def check_ruby_ext_devel(conf):
15466+ if not conf.env.RUBY:
15467+ conf.fatal('ruby detection is required first')
15468+
15469+ if not conf.env.CC_NAME and not conf.env.CXX_NAME:
15470+ conf.fatal('load a c/c++ compiler first')
15471+
15472+ version = tuple(map(int, conf.env.RUBY_VERSION.split(".")))
15473+
15474+ def read_out(cmd):
15475+ return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd]))
15476+
15477+ def read_config(key):
15478+ return read_out('puts Config::CONFIG[%r]' % key)
15479+
15480+ ruby = conf.env['RUBY']
15481+ archdir = read_config('archdir')
15482+ cpppath = archdir
15483+ if version >= (1, 9, 0):
15484+ ruby_hdrdir = read_config('rubyhdrdir')
15485+ cpppath += ruby_hdrdir
15486+ cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
15487+
15488+ conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file')
15489+
15490+ conf.env.LIBPATH_RUBYEXT = read_config('libdir')
15491+ conf.env.LIBPATH_RUBYEXT += archdir
15492+ conf.env.CPPPATH_RUBYEXT = cpppath
15493+ conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS")
15494+ conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
15495+
15496+ # ok this is really stupid, but the command and flags are combined.
15497+ # so we try to find the first argument...
15498+ flags = read_config('LDSHARED')
15499+ while flags and flags[0][0] != '-':
15500+ flags = flags[1:]
15501+
15502+ # we also want to strip out the deprecated ppc flags
15503+ if len(flags) > 1 and flags[1] == "ppc":
15504+ flags = flags[2:]
15505+
15506+ conf.env.LINKFLAGS_RUBYEXT = flags
15507+ conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS")
15508+ conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED")
15509+
15510+ if Options.options.rubyarchdir:
15511+ conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir
15512+ else:
15513+ conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
15514+
15515+ if Options.options.rubylibdir:
15516+ conf.env.LIBDIR_RUBY = Options.options.rubylibdir
15517+ else:
15518+ conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
15519+
15520+def set_options(opt):
15521+ opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
15522+ opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
15523+ opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
15524+
15525diff --git a/buildtools/wafadmin/Tools/suncc.py b/buildtools/wafadmin/Tools/suncc.py
15526new file mode 100644
15527index 0000000..b1a2aad
15528--- /dev/null
15529+++ b/buildtools/wafadmin/Tools/suncc.py
15530@@ -0,0 +1,76 @@
15531+#!/usr/bin/env python
15532+# encoding: utf-8
15533+# Thomas Nagy, 2006 (ita)
15534+# Ralf Habacker, 2006 (rh)
15535+
15536+import os, optparse
15537+import Utils, Options, Configure
15538+import ccroot, ar
15539+from Configure import conftest
15540+
15541+@conftest
15542+def find_scc(conf):
15543+ v = conf.env
15544+ cc = None
15545+ if v['CC']: cc = v['CC']
15546+ elif 'CC' in conf.environ: cc = conf.environ['CC']
15547+ #if not cc: cc = conf.find_program('gcc', var='CC')
15548+ if not cc: cc = conf.find_program('cc', var='CC')
15549+ if not cc: conf.fatal('suncc was not found')
15550+ cc = conf.cmd_to_list(cc)
15551+
15552+ try:
15553+ if not Utils.cmd_output(cc + ['-flags']):
15554+ conf.fatal('suncc %r was not found' % cc)
15555+ except ValueError:
15556+ conf.fatal('suncc -flags could not be executed')
15557+
15558+ v['CC'] = cc
15559+ v['CC_NAME'] = 'sun'
15560+
15561+@conftest
15562+def scc_common_flags(conf):
15563+ v = conf.env
15564+
15565+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
15566+
15567+ v['CC_SRC_F'] = ''
15568+ v['CC_TGT_F'] = ['-c', '-o', '']
15569+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
15570+
15571+ # linker
15572+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
15573+ v['CCLNK_SRC_F'] = ''
15574+ v['CCLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
15575+
15576+ v['LIB_ST'] = '-l%s' # template for adding libs
15577+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
15578+ v['STATICLIB_ST'] = '-l%s'
15579+ v['STATICLIBPATH_ST'] = '-L%s'
15580+ v['CCDEFINES_ST'] = '-D%s'
15581+
15582+ v['SONAME_ST'] = '-Wl,-h -Wl,%s'
15583+ v['SHLIB_MARKER'] = '-Bdynamic'
15584+ v['STATICLIB_MARKER'] = '-Bstatic'
15585+
15586+ # program
15587+ v['program_PATTERN'] = '%s'
15588+
15589+ # shared library
15590+ v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
15591+ v['shlib_LINKFLAGS'] = ['-G']
15592+ v['shlib_PATTERN'] = 'lib%s.so'
15593+
15594+ # static lib
15595+ v['staticlib_LINKFLAGS'] = ['-Bstatic']
15596+ v['staticlib_PATTERN'] = 'lib%s.a'
15597+
15598+detect = '''
15599+find_scc
15600+find_cpp
15601+find_ar
15602+scc_common_flags
15603+cc_load_tools
15604+cc_add_flags
15605+link_add_flags
15606+'''
15607diff --git a/buildtools/wafadmin/Tools/suncxx.py b/buildtools/wafadmin/Tools/suncxx.py
15608new file mode 100644
15609index 0000000..8754b6c
15610--- /dev/null
15611+++ b/buildtools/wafadmin/Tools/suncxx.py
15612@@ -0,0 +1,75 @@
15613+#!/usr/bin/env python
15614+# encoding: utf-8
15615+# Thomas Nagy, 2006 (ita)
15616+# Ralf Habacker, 2006 (rh)
15617+
15618+import os, optparse
15619+import Utils, Options, Configure
15620+import ccroot, ar
15621+from Configure import conftest
15622+
15623+@conftest
15624+def find_sxx(conf):
15625+ v = conf.env
15626+ cc = None
15627+ if v['CXX']: cc = v['CXX']
15628+ elif 'CXX' in conf.environ: cc = conf.environ['CXX']
15629+ if not cc: cc = conf.find_program('c++', var='CXX')
15630+ if not cc: conf.fatal('sunc++ was not found')
15631+ cc = conf.cmd_to_list(cc)
15632+
15633+ try:
15634+ if not Utils.cmd_output(cc + ['-flags']):
15635+ conf.fatal('sunc++ %r was not found' % cc)
15636+ except ValueError:
15637+ conf.fatal('sunc++ -flags could not be executed')
15638+
15639+ v['CXX'] = cc
15640+ v['CXX_NAME'] = 'sun'
15641+
15642+@conftest
15643+def sxx_common_flags(conf):
15644+ v = conf.env
15645+
15646+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
15647+
15648+ v['CXX_SRC_F'] = ''
15649+ v['CXX_TGT_F'] = ['-c', '-o', '']
15650+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
15651+
15652+ # linker
15653+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
15654+ v['CXXLNK_SRC_F'] = ''
15655+ v['CXXLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
15656+
15657+ v['LIB_ST'] = '-l%s' # template for adding libs
15658+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
15659+ v['STATICLIB_ST'] = '-l%s'
15660+ v['STATICLIBPATH_ST'] = '-L%s'
15661+ v['CXXDEFINES_ST'] = '-D%s'
15662+
15663+ v['SONAME_ST'] = '-Wl,-h -Wl,%s'
15664+ v['SHLIB_MARKER'] = '-Bdynamic'
15665+ v['STATICLIB_MARKER'] = '-Bstatic'
15666+
15667+ # program
15668+ v['program_PATTERN'] = '%s'
15669+
15670+ # shared library
15671+ v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC']
15672+ v['shlib_LINKFLAGS'] = ['-G']
15673+ v['shlib_PATTERN'] = 'lib%s.so'
15674+
15675+ # static lib
15676+ v['staticlib_LINKFLAGS'] = ['-Bstatic']
15677+ v['staticlib_PATTERN'] = 'lib%s.a'
15678+
15679+detect = '''
15680+find_sxx
15681+find_cpp
15682+find_ar
15683+sxx_common_flags
15684+cxx_load_tools
15685+cxx_add_flags
15686+link_add_flags
15687+'''
15688diff --git a/buildtools/wafadmin/Tools/tex.py b/buildtools/wafadmin/Tools/tex.py
15689new file mode 100644
15690index 0000000..2dd748b
15691--- /dev/null
15692+++ b/buildtools/wafadmin/Tools/tex.py
15693@@ -0,0 +1,251 @@
15694+#!/usr/bin/env python
15695+# encoding: utf-8
15696+# Thomas Nagy, 2006 (ita)
15697+
15698+"TeX/LaTeX/PDFLaTeX support"
15699+
15700+import os, re
15701+import Utils, TaskGen, Task, Runner, Build
15702+from TaskGen import feature, before
15703+from Logs import error, warn, debug
15704+
15705+re_tex = re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}', re.M)
15706+def scan(self):
15707+ node = self.inputs[0]
15708+ env = self.env
15709+
15710+ nodes = []
15711+ names = []
15712+ if not node: return (nodes, names)
15713+
15714+ code = Utils.readf(node.abspath(env))
15715+
15716+ curdirnode = self.curdirnode
15717+ abs = curdirnode.abspath()
15718+ for match in re_tex.finditer(code):
15719+ path = match.group('file')
15720+ if path:
15721+ for k in ['', '.tex', '.ltx']:
15722+ # add another loop for the tex include paths?
15723+ debug('tex: trying %s%s' % (path, k))
15724+ try:
15725+ os.stat(abs+os.sep+path+k)
15726+ except OSError:
15727+ continue
15728+ found = path+k
15729+ node = curdirnode.find_resource(found)
15730+ if node:
15731+ nodes.append(node)
15732+ else:
15733+ debug('tex: could not find %s' % path)
15734+ names.append(path)
15735+
15736+ debug("tex: found the following : %s and names %s" % (nodes, names))
15737+ return (nodes, names)
15738+
15739+latex_fun, _ = Task.compile_fun('latex', '${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
15740+pdflatex_fun, _ = Task.compile_fun('pdflatex', '${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
15741+bibtex_fun, _ = Task.compile_fun('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
15742+makeindex_fun, _ = Task.compile_fun('bibtex', '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
15743+
15744+g_bibtex_re = re.compile('bibdata', re.M)
15745+def tex_build(task, command='LATEX'):
15746+ env = task.env
15747+ bld = task.generator.bld
15748+
15749+ if not env['PROMPT_LATEX']:
15750+ env.append_value('LATEXFLAGS', '-interaction=batchmode')
15751+ env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
15752+
15753+ fun = latex_fun
15754+ if command == 'PDFLATEX':
15755+ fun = pdflatex_fun
15756+
15757+ node = task.inputs[0]
15758+ reldir = node.bld_dir(env)
15759+
15760+ #lst = []
15761+ #for c in Utils.split_path(reldir):
15762+ # if c: lst.append('..')
15763+ #srcfile = os.path.join(*(lst + [node.srcpath(env)]))
15764+ #sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
15765+ srcfile = node.abspath(env)
15766+ sr2 = node.parent.abspath() + os.pathsep + node.parent.abspath(env) + os.pathsep
15767+
15768+ aux_node = node.change_ext('.aux')
15769+ idx_node = node.change_ext('.idx')
15770+
15771+ nm = aux_node.name
15772+ docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
15773+
15774+ # important, set the cwd for everybody
15775+ task.cwd = task.inputs[0].parent.abspath(task.env)
15776+
15777+
15778+ warn('first pass on %s' % command)
15779+
15780+ task.env.env = {'TEXINPUTS': sr2}
15781+ task.env.SRCFILE = srcfile
15782+ ret = fun(task)
15783+ if ret:
15784+ return ret
15785+
15786+ # look in the .aux file if there is a bibfile to process
15787+ try:
15788+ ct = Utils.readf(aux_node.abspath(env))
15789+ except (OSError, IOError):
15790+ error('error bibtex scan')
15791+ else:
15792+ fo = g_bibtex_re.findall(ct)
15793+
15794+ # there is a .aux file to process
15795+ if fo:
15796+ warn('calling bibtex')
15797+
15798+ task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2}
15799+ task.env.SRCFILE = docuname
15800+ ret = bibtex_fun(task)
15801+ if ret:
15802+ error('error when calling bibtex %s' % docuname)
15803+ return ret
15804+
15805+ # look on the filesystem if there is a .idx file to process
15806+ try:
15807+ idx_path = idx_node.abspath(env)
15808+ os.stat(idx_path)
15809+ except OSError:
15810+ error('error file.idx scan')
15811+ else:
15812+ warn('calling makeindex')
15813+
15814+ task.env.SRCFILE = idx_node.name
15815+ task.env.env = {}
15816+ ret = makeindex_fun(task)
15817+ if ret:
15818+ error('error when calling makeindex %s' % idx_path)
15819+ return ret
15820+
15821+
15822+ hash = ''
15823+ i = 0
15824+ while i < 10:
15825+ # prevent against infinite loops - one never knows
15826+ i += 1
15827+
15828+ # watch the contents of file.aux
15829+ prev_hash = hash
15830+ try:
15831+ hash = Utils.h_file(aux_node.abspath(env))
15832+ except KeyError:
15833+ error('could not read aux.h -> %s' % aux_node.abspath(env))
15834+ pass
15835+
15836+ # debug
15837+ #print "hash is, ", hash, " ", old_hash
15838+
15839+ # stop if file.aux does not change anymore
15840+ if hash and hash == prev_hash:
15841+ break
15842+
15843+ # run the command
15844+ warn('calling %s' % command)
15845+
15846+ task.env.env = {'TEXINPUTS': sr2 + os.pathsep}
15847+ task.env.SRCFILE = srcfile
15848+ ret = fun(task)
15849+ if ret:
15850+ error('error when calling %s %s' % (command, latex_compile_cmd))
15851+ return ret
15852+
15853+ return None # ok
15854+
15855+latex_vardeps = ['LATEX', 'LATEXFLAGS']
15856+def latex_build(task):
15857+ return tex_build(task, 'LATEX')
15858+
15859+pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS']
15860+def pdflatex_build(task):
15861+ return tex_build(task, 'PDFLATEX')
15862+
15863+class tex_taskgen(TaskGen.task_gen):
15864+ def __init__(self, *k, **kw):
15865+ TaskGen.task_gen.__init__(self, *k, **kw)
15866+
15867+@feature('tex')
15868+@before('apply_core')
15869+def apply_tex(self):
15870+ if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
15871+ self.type = 'pdflatex'
15872+
15873+ tree = self.bld
15874+ outs = Utils.to_list(getattr(self, 'outs', []))
15875+
15876+ # prompt for incomplete files (else the batchmode is used)
15877+ self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
15878+
15879+ deps_lst = []
15880+
15881+ if getattr(self, 'deps', None):
15882+ deps = self.to_list(self.deps)
15883+ for filename in deps:
15884+ n = self.path.find_resource(filename)
15885+ if not n in deps_lst: deps_lst.append(n)
15886+
15887+ self.source = self.to_list(self.source)
15888+ for filename in self.source:
15889+ base, ext = os.path.splitext(filename)
15890+
15891+ node = self.path.find_resource(filename)
15892+ if not node: raise Utils.WafError('cannot find %s' % filename)
15893+
15894+ if self.type == 'latex':
15895+ task = self.create_task('latex', node, node.change_ext('.dvi'))
15896+ elif self.type == 'pdflatex':
15897+ task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
15898+
15899+ task.env = self.env
15900+ task.curdirnode = self.path
15901+
15902+ # add the manual dependencies
15903+ if deps_lst:
15904+ variant = node.variant(self.env)
15905+ try:
15906+ lst = tree.node_deps[task.unique_id()]
15907+ for n in deps_lst:
15908+ if not n in lst:
15909+ lst.append(n)
15910+ except KeyError:
15911+ tree.node_deps[task.unique_id()] = deps_lst
15912+
15913+ if self.type == 'latex':
15914+ if 'ps' in outs:
15915+ tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
15916+ tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
15917+ if 'pdf' in outs:
15918+ tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
15919+ tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
15920+ elif self.type == 'pdflatex':
15921+ if 'ps' in outs:
15922+ self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
15923+ self.source = []
15924+
15925+def detect(conf):
15926+ v = conf.env
15927+ for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
15928+ conf.find_program(p, var=p.upper())
15929+ v[p.upper()+'FLAGS'] = ''
15930+ v['DVIPSFLAGS'] = '-Ppdf'
15931+
15932+b = Task.simple_task_type
15933+b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
15934+b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
15935+b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
15936+b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
15937+b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
15938+
15939+b = Task.task_type_from_func
15940+cls = b('latex', latex_build, vars=latex_vardeps)
15941+cls.scan = scan
15942+cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
15943+cls.scan = scan
15944+
15945diff --git a/buildtools/wafadmin/Tools/unittestw.py b/buildtools/wafadmin/Tools/unittestw.py
15946new file mode 100644
15947index 0000000..0e30a51
15948--- /dev/null
15949+++ b/buildtools/wafadmin/Tools/unittestw.py
15950@@ -0,0 +1,310 @@
15951+#!/usr/bin/env python
15952+# encoding: utf-8
15953+# Carlos Rafael Giani, 2006
15954+
15955+"""
15956+Unit tests run in the shutdown() method, and for c/c++ programs
15957+
15958+One should NOT have to give parameters to programs to execute
15959+
15960+In the shutdown method, add the following code:
15961+
15962+ >>> def shutdown():
15963+ ... ut = UnitTest.unit_test()
15964+ ... ut.run()
15965+ ... ut.print_results()
15966+
15967+
15968+Each object to use as a unit test must be a program and must have X{obj.unit_test=1}
15969+"""
15970+import os, sys
15971+import Build, TaskGen, Utils, Options, Logs, Task
15972+from TaskGen import before, after, feature
15973+from Constants import *
15974+
15975+class unit_test(object):
15976+ "Unit test representation"
15977+ def __init__(self):
15978+ self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one
15979+ # will cause the unit test to be marked as "FAILED".
15980+
15981+ # The following variables are filled with data by run().
15982+
15983+ # print_results() uses these for printing the unit test summary,
15984+ # but if there is need for direct access to the results,
15985+ # they can be retrieved here, after calling run().
15986+
15987+ self.num_tests_ok = 0 # Number of successful unit tests
15988+ self.num_tests_failed = 0 # Number of failed unit tests
15989+ self.num_tests_err = 0 # Tests that have not even run
15990+ self.total_num_tests = 0 # Total amount of unit tests
15991+ self.max_label_length = 0 # Maximum label length (pretty-print the output)
15992+
15993+ self.unit_tests = Utils.ordered_dict() # Unit test dictionary. Key: the label (unit test filename relative
15994+ # to the build dir), value: unit test filename with absolute path
15995+ self.unit_test_results = {} # Dictionary containing the unit test results.
15996+ # Key: the label, value: result (true = success false = failure)
15997+ self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests.
15998+ # Key: the label, value: true = unit test has an error false = unit test is ok
15999+ self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir
16000+ self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites)
16001+ self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites)
16002+ self.run_if_waf_does = 'check' #build was the old default
16003+
16004+ def run(self):
16005+ "Run the unit tests and gather results (note: no output here)"
16006+
16007+ self.num_tests_ok = 0
16008+ self.num_tests_failed = 0
16009+ self.num_tests_err = 0
16010+ self.total_num_tests = 0
16011+ self.max_label_length = 0
16012+
16013+ self.unit_tests = Utils.ordered_dict()
16014+ self.unit_test_results = {}
16015+ self.unit_test_erroneous = {}
16016+
16017+ ld_library_path = []
16018+
16019+ # If waf is not building, don't run anything
16020+ if not Options.commands[self.run_if_waf_does]: return
16021+
16022+ # Get the paths for the shared libraries, and obtain the unit tests to execute
16023+ for obj in Build.bld.all_task_gen:
16024+ try:
16025+ link_task = obj.link_task
16026+ except AttributeError:
16027+ pass
16028+ else:
16029+ lib_path = link_task.outputs[0].parent.abspath(obj.env)
16030+ if lib_path not in ld_library_path:
16031+ ld_library_path.append(lib_path)
16032+
16033+ unit_test = getattr(obj, 'unit_test', '')
16034+ if unit_test and 'cprogram' in obj.features:
16035+ try:
16036+ output = obj.path
16037+ filename = os.path.join(output.abspath(obj.env), obj.target)
16038+ srcdir = output.abspath()
16039+ label = os.path.join(output.bldpath(obj.env), obj.target)
16040+ self.max_label_length = max(self.max_label_length, len(label))
16041+ self.unit_tests[label] = (filename, srcdir)
16042+ except KeyError:
16043+ pass
16044+ self.total_num_tests = len(self.unit_tests)
16045+ # Now run the unit tests
16046+ Utils.pprint('GREEN', 'Running the unit tests')
16047+ count = 0
16048+ result = 1
16049+
16050+ for label in self.unit_tests.allkeys:
16051+ file_and_src = self.unit_tests[label]
16052+ filename = file_and_src[0]
16053+ srcdir = file_and_src[1]
16054+ count += 1
16055+ line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL)
16056+ if Options.options.progress_bar and line:
16057+ sys.stderr.write(line)
16058+ sys.stderr.flush()
16059+ try:
16060+ kwargs = {}
16061+ kwargs['env'] = os.environ.copy()
16062+ if self.change_to_testfile_dir:
16063+ kwargs['cwd'] = srcdir
16064+ if not self.want_to_see_test_output:
16065+ kwargs['stdout'] = Utils.pproc.PIPE # PIPE for ignoring output
16066+ if not self.want_to_see_test_error:
16067+ kwargs['stderr'] = Utils.pproc.PIPE # PIPE for ignoring output
16068+ if ld_library_path:
16069+ v = kwargs['env']
16070+ def add_path(dct, path, var):
16071+ dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
16072+ if sys.platform == 'win32':
16073+ add_path(v, ld_library_path, 'PATH')
16074+ elif sys.platform == 'darwin':
16075+ add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH')
16076+ add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
16077+ else:
16078+ add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
16079+
16080+ pp = Utils.pproc.Popen(filename, **kwargs)
16081+ (out, err) = pp.communicate() # uh, and the output is ignored?? - fortunately this is going to disappear
16082+
16083+ result = int(pp.returncode == self.returncode_ok)
16084+
16085+ if result:
16086+ self.num_tests_ok += 1
16087+ else:
16088+ self.num_tests_failed += 1
16089+
16090+ self.unit_test_results[label] = result
16091+ self.unit_test_erroneous[label] = 0
16092+ except OSError:
16093+ self.unit_test_erroneous[label] = 1
16094+ self.num_tests_err += 1
16095+ except KeyboardInterrupt:
16096+ pass
16097+ if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on)
16098+
16099+ def print_results(self):
16100+ "Pretty-prints a summary of all unit tests, along with some statistics"
16101+
16102+ # If waf is not building, don't output anything
16103+ if not Options.commands[self.run_if_waf_does]: return
16104+
16105+ p = Utils.pprint
16106+ # Early quit if no tests were performed
16107+ if self.total_num_tests == 0:
16108+ p('YELLOW', 'No unit tests present')
16109+ return
16110+
16111+ for label in self.unit_tests.allkeys:
16112+ filename = self.unit_tests[label]
16113+ err = 0
16114+ result = 0
16115+
16116+ try: err = self.unit_test_erroneous[label]
16117+ except KeyError: pass
16118+
16119+ try: result = self.unit_test_results[label]
16120+ except KeyError: pass
16121+
16122+ n = self.max_label_length - len(label)
16123+ if err: n += 4
16124+ elif result: n += 7
16125+ else: n += 3
16126+
16127+ line = '%s %s' % (label, '.' * n)
16128+
16129+ if err: p('RED', '%sERROR' % line)
16130+ elif result: p('GREEN', '%sOK' % line)
16131+ else: p('YELLOW', '%sFAILED' % line)
16132+
16133+ percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0
16134+ percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0
16135+ percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0
16136+
16137+ p('NORMAL', '''
16138+Successful tests: %i (%.1f%%)
16139+Failed tests: %i (%.1f%%)
16140+Erroneous tests: %i (%.1f%%)
16141+
16142+Total number of tests: %i
16143+''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed,
16144+ self.num_tests_err, percentage_erroneous, self.total_num_tests))
16145+ p('GREEN', 'Unit tests finished')
16146+
16147+
16148+############################################################################################
16149+
16150+"""
16151+New unit test system
16152+
16153+The targets with feature 'test' are executed after they are built
16154+bld(features='cprogram cc test', ...)
16155+
16156+To display the results:
16157+import UnitTest
16158+bld.add_post_fun(UnitTest.summary)
16159+"""
16160+
16161+import threading
16162+testlock = threading.Lock()
16163+
16164+def set_options(opt):
16165+ opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests')
16166+
16167+@feature('test')
16168+@after('apply_link', 'vars_target_cprogram')
16169+def make_test(self):
16170+ if not 'cprogram' in self.features:
16171+ Logs.error('test cannot be executed %s' % self)
16172+ return
16173+
16174+ self.default_install_path = None
16175+ self.create_task('utest', self.link_task.outputs)
16176+
16177+def exec_test(self):
16178+
16179+ status = 0
16180+
16181+ variant = self.env.variant()
16182+
16183+ filename = self.inputs[0].abspath(self.env)
16184+ self.ut_exec = getattr(self, 'ut_exec', [filename])
16185+ if getattr(self.generator, 'ut_fun', None):
16186+ self.generator.ut_fun(self)
16187+
16188+ try:
16189+ fu = getattr(self.generator.bld, 'all_test_paths')
16190+ except AttributeError:
16191+ fu = os.environ.copy()
16192+ self.generator.bld.all_test_paths = fu
16193+
16194+ lst = []
16195+ for obj in self.generator.bld.all_task_gen:
16196+ link_task = getattr(obj, 'link_task', None)
16197+ if link_task and link_task.env.variant() == variant:
16198+ lst.append(link_task.outputs[0].parent.abspath(obj.env))
16199+
16200+ def add_path(dct, path, var):
16201+ dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
16202+
16203+ if sys.platform == 'win32':
16204+ add_path(fu, lst, 'PATH')
16205+ elif sys.platform == 'darwin':
16206+ add_path(fu, lst, 'DYLD_LIBRARY_PATH')
16207+ add_path(fu, lst, 'LD_LIBRARY_PATH')
16208+ else:
16209+ add_path(fu, lst, 'LD_LIBRARY_PATH')
16210+
16211+
16212+ cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env)
16213+ proc = Utils.pproc.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE)
16214+ (stdout, stderr) = proc.communicate()
16215+
16216+ tup = (filename, proc.returncode, stdout, stderr)
16217+ self.generator.utest_result = tup
16218+
16219+ testlock.acquire()
16220+ try:
16221+ bld = self.generator.bld
16222+ Logs.debug("ut: %r", tup)
16223+ try:
16224+ bld.utest_results.append(tup)
16225+ except AttributeError:
16226+ bld.utest_results = [tup]
16227+ finally:
16228+ testlock.release()
16229+
16230+cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin')
16231+
16232+old = cls.runnable_status
16233+def test_status(self):
16234+ ret = old(self)
16235+ if ret == SKIP_ME and getattr(Options.options, 'all_tests', False):
16236+ return RUN_ME
16237+ return ret
16238+
16239+cls.runnable_status = test_status
16240+cls.quiet = 1
16241+
16242+def summary(bld):
16243+ lst = getattr(bld, 'utest_results', [])
16244+ if lst:
16245+ Utils.pprint('CYAN', 'execution summary')
16246+
16247+ total = len(lst)
16248+ tfail = len([x for x in lst if x[1]])
16249+
16250+ Utils.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
16251+ for (f, code, out, err) in lst:
16252+ if not code:
16253+ Utils.pprint('CYAN', ' %s' % f)
16254+
16255+ Utils.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
16256+ for (f, code, out, err) in lst:
16257+ if code:
16258+ Utils.pprint('CYAN', ' %s' % f)
16259+
16260+
16261diff --git a/buildtools/wafadmin/Tools/vala.py b/buildtools/wafadmin/Tools/vala.py
16262new file mode 100644
16263index 0000000..753ee8d
16264--- /dev/null
16265+++ b/buildtools/wafadmin/Tools/vala.py
16266@@ -0,0 +1,308 @@
16267+#!/usr/bin/env python
16268+# encoding: utf-8
16269+# Ali Sabil, 2007
16270+
16271+import os.path, shutil
16272+import Task, Runner, Utils, Logs, Build, Node, Options
16273+from TaskGen import extension, after, before
16274+
16275+EXT_VALA = ['.vala', '.gs']
16276+
16277+class valac_task(Task.Task):
16278+
16279+ vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS")
16280+ before = ("cc", "cxx")
16281+
16282+ def run(self):
16283+ env = self.env
16284+ inputs = [a.srcpath(env) for a in self.inputs]
16285+ valac = env['VALAC']
16286+ vala_flags = env.get_flat('VALAFLAGS')
16287+ top_src = self.generator.bld.srcnode.abspath()
16288+ top_bld = self.generator.bld.srcnode.abspath(env)
16289+
16290+ if env['VALAC_VERSION'] > (0, 1, 6):
16291+ cmd = [valac, '-C', '--quiet', vala_flags]
16292+ else:
16293+ cmd = [valac, '-C', vala_flags]
16294+
16295+ if self.threading:
16296+ cmd.append('--thread')
16297+
16298+ if self.profile:
16299+ cmd.append('--profile=%s' % self.profile)
16300+
16301+ if self.target_glib:
16302+ cmd.append('--target-glib=%s' % self.target_glib)
16303+
16304+ features = self.generator.features
16305+
16306+ if 'cshlib' in features or 'cstaticlib' in features:
16307+ output_dir = self.outputs[0].bld_dir(env)
16308+ cmd.append('--library ' + self.target)
16309+ if env['VALAC_VERSION'] >= (0, 7, 0):
16310+ for x in self.outputs:
16311+ if x.name.endswith('.h'):
16312+ cmd.append('--header ' + x.bldpath(self.env))
16313+ cmd.append('--basedir ' + top_src)
16314+ cmd.append('-d ' + top_bld)
16315+ if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'):
16316+ cmd.append('--gir=%s.gir' % self.gir)
16317+
16318+ else:
16319+ output_dir = self.outputs[0].bld_dir(env)
16320+ cmd.append('-d %s' % output_dir)
16321+
16322+ for vapi_dir in self.vapi_dirs:
16323+ cmd.append('--vapidir=%s' % vapi_dir)
16324+
16325+ for package in self.packages:
16326+ cmd.append('--pkg %s' % package)
16327+
16328+ for package in self.packages_private:
16329+ cmd.append('--pkg %s' % package)
16330+
16331+ cmd.append(" ".join(inputs))
16332+ result = self.generator.bld.exec_command(" ".join(cmd))
16333+
16334+ if not 'cprogram' in features:
16335+ # generate the .deps file
16336+ if self.packages:
16337+ filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target)
16338+ deps = open(filename, 'w')
16339+ for package in self.packages:
16340+ deps.write(package + '\n')
16341+ deps.close()
16342+
16343+ # handle vala 0.1.6 who doesn't honor --directory for the generated .vapi
16344+ self._fix_output("../%s.vapi" % self.target)
16345+ # handle vala >= 0.1.7 who has a weid definition for --directory
16346+ self._fix_output("%s.vapi" % self.target)
16347+ # handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl
16348+ self._fix_output("%s.gidl" % self.target)
16349+ # handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir
16350+ self._fix_output("%s.gir" % self.target)
16351+ if hasattr(self, 'gir'):
16352+ self._fix_output("%s.gir" % self.gir)
16353+
16354+ first = None
16355+ for node in self.outputs:
16356+ if not first:
16357+ first = node
16358+ else:
16359+ if first.parent.id != node.parent.id:
16360+ # issue #483
16361+ if env['VALAC_VERSION'] < (0, 7, 0):
16362+ shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env))
16363+ return result
16364+
16365+ def install(self):
16366+ bld = self.generator.bld
16367+ features = self.generator.features
16368+
16369+ if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features):
16370+ headers_list = [o for o in self.outputs if o.suffix() == ".h"]
16371+ vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))]
16372+ gir_list = [o for o in self.outputs if o.suffix() == ".gir"]
16373+
16374+ for header in headers_list:
16375+ top_src = self.generator.bld.srcnode
16376+ package = self.env['PACKAGE']
16377+ try:
16378+ api_version = Utils.g_module.API_VERSION
16379+ except AttributeError:
16380+ version = Utils.g_module.VERSION.split(".")
16381+ if version[0] == "0":
16382+ api_version = "0." + version[1]
16383+ else:
16384+ api_version = version[0] + ".0"
16385+ install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src))
16386+ bld.install_as(install_path, header, self.env)
16387+ bld.install_files('${DATAROOTDIR}/vala/vapi', vapi_list, self.env)
16388+ bld.install_files('${DATAROOTDIR}/gir-1.0', gir_list, self.env)
16389+
16390+ def _fix_output(self, output):
16391+ top_bld = self.generator.bld.srcnode.abspath(self.env)
16392+ try:
16393+ src = os.path.join(top_bld, output)
16394+ dst = self.generator.path.abspath (self.env)
16395+ shutil.move(src, dst)
16396+ except:
16397+ pass
16398+
16399+@extension(EXT_VALA)
16400+def vala_file(self, node):
16401+ valatask = getattr(self, "valatask", None)
16402+ # there is only one vala task and it compiles all vala files .. :-/
16403+ if not valatask:
16404+ valatask = self.create_task('valac')
16405+ self.valatask = valatask
16406+ self.includes = Utils.to_list(getattr(self, 'includes', []))
16407+ self.uselib = self.to_list(self.uselib)
16408+ valatask.packages = []
16409+ valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', []))
16410+ valatask.vapi_dirs = []
16411+ valatask.target = self.target
16412+ valatask.threading = False
16413+ valatask.install_path = self.install_path
16414+ valatask.profile = getattr (self, 'profile', 'gobject')
16415+ valatask.target_glib = None #Deprecated
16416+
16417+ packages = Utils.to_list(getattr(self, 'packages', []))
16418+ vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
16419+ includes = []
16420+
16421+ if hasattr(self, 'uselib_local'):
16422+ local_packages = Utils.to_list(self.uselib_local)
16423+ seen = []
16424+ while len(local_packages) > 0:
16425+ package = local_packages.pop()
16426+ if package in seen:
16427+ continue
16428+ seen.append(package)
16429+
16430+ # check if the package exists
16431+ package_obj = self.name_to_obj(package)
16432+ if not package_obj:
16433+ raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name))
16434+
16435+ package_name = package_obj.target
16436+ package_node = package_obj.path
16437+ package_dir = package_node.relpath_gen(self.path)
16438+
16439+ for task in package_obj.tasks:
16440+ for output in task.outputs:
16441+ if output.name == package_name + ".vapi":
16442+ valatask.set_run_after(task)
16443+ if package_name not in packages:
16444+ packages.append(package_name)
16445+ if package_dir not in vapi_dirs:
16446+ vapi_dirs.append(package_dir)
16447+ if package_dir not in includes:
16448+ includes.append(package_dir)
16449+
16450+ if hasattr(package_obj, 'uselib_local'):
16451+ lst = self.to_list(package_obj.uselib_local)
16452+ lst.reverse()
16453+ local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
16454+
16455+ valatask.packages = packages
16456+ for vapi_dir in vapi_dirs:
16457+ try:
16458+ valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
16459+ valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
16460+ except AttributeError:
16461+ Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir)
16462+
16463+ self.includes.append(node.bld.srcnode.abspath())
16464+ self.includes.append(node.bld.srcnode.abspath(self.env))
16465+ for include in includes:
16466+ try:
16467+ self.includes.append(self.path.find_dir(include).abspath())
16468+ self.includes.append(self.path.find_dir(include).abspath(self.env))
16469+ except AttributeError:
16470+ Logs.warn("Unable to locate include directory: '%s'" % include)
16471+
16472+ if valatask.profile == 'gobject':
16473+ if hasattr(self, 'target_glib'):
16474+ Logs.warn ('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options')
16475+
16476+ if getattr(Options.options, 'vala_target_glib', None):
16477+ valatask.target_glib = Options.options.vala_target_glib
16478+
16479+ if not 'GOBJECT' in self.uselib:
16480+ self.uselib.append('GOBJECT')
16481+
16482+ if hasattr(self, 'threading'):
16483+ if valatask.profile == 'gobject':
16484+ valatask.threading = self.threading
16485+ if not 'GTHREAD' in self.uselib:
16486+ self.uselib.append('GTHREAD')
16487+ else:
16488+ #Vala doesn't have threading support for dova nor posix
16489+ Logs.warn("Profile %s does not have threading support" % valatask.profile)
16490+
16491+ if hasattr(self, 'gir'):
16492+ valatask.gir = self.gir
16493+
16494+ env = valatask.env
16495+
16496+ output_nodes = []
16497+
16498+ c_node = node.change_ext('.c')
16499+ output_nodes.append(c_node)
16500+ self.allnodes.append(c_node)
16501+
16502+ if env['VALAC_VERSION'] < (0, 7, 0):
16503+ output_nodes.append(node.change_ext('.h'))
16504+ else:
16505+ if not 'cprogram' in self.features:
16506+ output_nodes.append(self.path.find_or_declare('%s.h' % self.target))
16507+
16508+ if not 'cprogram' in self.features:
16509+ output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target))
16510+ if env['VALAC_VERSION'] > (0, 7, 2):
16511+ if hasattr(self, 'gir'):
16512+ output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir))
16513+ elif env['VALAC_VERSION'] > (0, 3, 5):
16514+ output_nodes.append(self.path.find_or_declare('%s.gir' % self.target))
16515+ elif env['VALAC_VERSION'] > (0, 1, 7):
16516+ output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target))
16517+ if valatask.packages:
16518+ output_nodes.append(self.path.find_or_declare('%s.deps' % self.target))
16519+
16520+ valatask.inputs.append(node)
16521+ valatask.outputs.extend(output_nodes)
16522+
16523+def detect(conf):
16524+ min_version = (0, 1, 6)
16525+ min_version_str = "%d.%d.%d" % min_version
16526+
16527+ valac = conf.find_program('valac', var='VALAC', mandatory=True)
16528+
16529+ if not conf.env["HAVE_GOBJECT"]:
16530+ pkg_args = {'package': 'gobject-2.0',
16531+ 'uselib_store': 'GOBJECT',
16532+ 'args': '--cflags --libs'}
16533+ if getattr(Options.options, 'vala_target_glib', None):
16534+ pkg_args['atleast_version'] = Options.options.vala_target_glib
16535+
16536+ conf.check_cfg(**pkg_args)
16537+
16538+ if not conf.env["HAVE_GTHREAD"]:
16539+ pkg_args = {'package': 'gthread-2.0',
16540+ 'uselib_store': 'GTHREAD',
16541+ 'args': '--cflags --libs'}
16542+ if getattr(Options.options, 'vala_target_glib', None):
16543+ pkg_args['atleast_version'] = Options.options.vala_target_glib
16544+
16545+ conf.check_cfg(**pkg_args)
16546+
16547+ try:
16548+ output = Utils.cmd_output(valac + " --version", silent=True)
16549+ version = output.split(' ', 1)[-1].strip().split(".")[0:3]
16550+ version = [int(x) for x in version]
16551+ valac_version = tuple(version)
16552+ except Exception:
16553+ valac_version = (0, 0, 0)
16554+
16555+ conf.check_message('program version',
16556+ 'valac >= ' + min_version_str,
16557+ valac_version >= min_version,
16558+ "%d.%d.%d" % valac_version)
16559+
16560+ conf.check_tool('gnu_dirs')
16561+
16562+ if valac_version < min_version:
16563+ conf.fatal("valac version too old to be used with this tool")
16564+ return
16565+
16566+ conf.env['VALAC_VERSION'] = valac_version
16567+ conf.env['VALAFLAGS'] = ''
16568+
16569+def set_options (opt):
16570+ valaopts = opt.add_option_group('Vala Compiler Options')
16571+ valaopts.add_option ('--vala-target-glib', default=None,
16572+ dest='vala_target_glib', metavar='MAJOR.MINOR',
16573+ help='Target version of glib for Vala GObject code generation')
16574+
16575diff --git a/buildtools/wafadmin/Tools/winres.py b/buildtools/wafadmin/Tools/winres.py
16576new file mode 100644
16577index 0000000..2500d43
16578--- /dev/null
16579+++ b/buildtools/wafadmin/Tools/winres.py
16580@@ -0,0 +1,45 @@
16581+#!/usr/bin/env python
16582+# encoding: utf-8
16583+# Brant Young, 2007
16584+
16585+"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}"
16586+
16587+import os, sys, re
16588+import TaskGen, Task
16589+from Utils import quote_whitespace
16590+from TaskGen import extension
16591+
16592+EXT_WINRC = ['.rc']
16593+
16594+winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
16595+
16596+@extension(EXT_WINRC)
16597+def rc_file(self, node):
16598+ obj_ext = '.rc.o'
16599+ if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res'
16600+
16601+ rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
16602+ self.compiled_tasks.append(rctask)
16603+
16604+# create our action, for use with rc file
16605+Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False)
16606+
16607+def detect(conf):
16608+ v = conf.env
16609+
16610+ winrc = v['WINRC']
16611+ v['WINRC_TGT_F'] = '-o'
16612+ v['WINRC_SRC_F'] = '-i'
16613+ # find rc.exe
16614+ if not winrc:
16615+ if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']:
16616+ winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH'])
16617+ elif v['CC_NAME'] == 'msvc':
16618+ winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH'])
16619+ v['WINRC_TGT_F'] = '/fo'
16620+ v['WINRC_SRC_F'] = ''
16621+ if not winrc:
16622+ conf.fatal('winrc was not found!')
16623+
16624+ v['WINRCFLAGS'] = ''
16625+
16626diff --git a/buildtools/wafadmin/Tools/xlc.py b/buildtools/wafadmin/Tools/xlc.py
16627new file mode 100644
16628index 0000000..e33b7a1
16629--- /dev/null
16630+++ b/buildtools/wafadmin/Tools/xlc.py
16631@@ -0,0 +1,78 @@
16632+#!/usr/bin/env python
16633+# encoding: utf-8
16634+# Thomas Nagy, 2006-2008 (ita)
16635+# Ralf Habacker, 2006 (rh)
16636+# Yinon Ehrlich, 2009
16637+# Michael Kuhn, 2009
16638+
16639+import os, sys
16640+import Configure, Options, Utils
16641+import ccroot, ar
16642+from Configure import conftest
16643+
16644+@conftest
16645+def find_xlc(conf):
16646+ cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True)
16647+ cc = conf.cmd_to_list(cc)
16648+ conf.env.CC_NAME = 'xlc'
16649+ conf.env.CC = cc
16650+
16651+@conftest
16652+def find_cpp(conf):
16653+ v = conf.env
16654+ cpp = None
16655+ if v['CPP']: cpp = v['CPP']
16656+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
16657+ #if not cpp: cpp = v['CC']
16658+ v['CPP'] = cpp
16659+
16660+@conftest
16661+def xlc_common_flags(conf):
16662+ v = conf.env
16663+
16664+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
16665+ v['CCFLAGS_DEBUG'] = ['-g']
16666+ v['CCFLAGS_RELEASE'] = ['-O2']
16667+
16668+ v['CC_SRC_F'] = ''
16669+ v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
16670+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
16671+
16672+ # linker
16673+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
16674+ v['CCLNK_SRC_F'] = ''
16675+ v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
16676+
16677+ v['LIB_ST'] = '-l%s' # template for adding libs
16678+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
16679+ v['STATICLIB_ST'] = '-l%s'
16680+ v['STATICLIBPATH_ST'] = '-L%s'
16681+ v['RPATH_ST'] = '-Wl,-rpath,%s'
16682+ v['CCDEFINES_ST'] = '-D%s'
16683+
16684+ v['SONAME_ST'] = ''
16685+ v['SHLIB_MARKER'] = ''
16686+ v['STATICLIB_MARKER'] = ''
16687+ v['FULLSTATIC_MARKER'] = '-static'
16688+
16689+ # program
16690+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
16691+ v['program_PATTERN'] = '%s'
16692+
16693+ # shared library
16694+ v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
16695+ v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
16696+ v['shlib_PATTERN'] = 'lib%s.so'
16697+
16698+ # static lib
16699+ v['staticlib_LINKFLAGS'] = ''
16700+ v['staticlib_PATTERN'] = 'lib%s.a'
16701+
16702+def detect(conf):
16703+ conf.find_xlc()
16704+ conf.find_cpp()
16705+ conf.find_ar()
16706+ conf.xlc_common_flags()
16707+ conf.cc_load_tools()
16708+ conf.cc_add_flags()
16709+ conf.link_add_flags()
16710diff --git a/buildtools/wafadmin/Tools/xlcxx.py b/buildtools/wafadmin/Tools/xlcxx.py
16711new file mode 100644
16712index 0000000..6e84662
16713--- /dev/null
16714+++ b/buildtools/wafadmin/Tools/xlcxx.py
16715@@ -0,0 +1,78 @@
16716+#!/usr/bin/env python
16717+# encoding: utf-8
16718+# Thomas Nagy, 2006 (ita)
16719+# Ralf Habacker, 2006 (rh)
16720+# Yinon Ehrlich, 2009
16721+# Michael Kuhn, 2009
16722+
16723+import os, sys
16724+import Configure, Options, Utils
16725+import ccroot, ar
16726+from Configure import conftest
16727+
16728+@conftest
16729+def find_xlcxx(conf):
16730+ cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
16731+ cxx = conf.cmd_to_list(cxx)
16732+ conf.env.CXX_NAME = 'xlc++'
16733+ conf.env.CXX = cxx
16734+
16735+@conftest
16736+def find_cpp(conf):
16737+ v = conf.env
16738+ cpp = None
16739+ if v['CPP']: cpp = v['CPP']
16740+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
16741+ #if not cpp: cpp = v['CXX']
16742+ v['CPP'] = cpp
16743+
16744+@conftest
16745+def xlcxx_common_flags(conf):
16746+ v = conf.env
16747+
16748+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
16749+ v['CXXFLAGS_DEBUG'] = ['-g']
16750+ v['CXXFLAGS_RELEASE'] = ['-O2']
16751+
16752+ v['CXX_SRC_F'] = ''
16753+ v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
16754+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
16755+
16756+ # linker
16757+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
16758+ v['CXXLNK_SRC_F'] = ''
16759+ v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
16760+
16761+ v['LIB_ST'] = '-l%s' # template for adding libs
16762+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
16763+ v['STATICLIB_ST'] = '-l%s'
16764+ v['STATICLIBPATH_ST'] = '-L%s'
16765+ v['RPATH_ST'] = '-Wl,-rpath,%s'
16766+ v['CXXDEFINES_ST'] = '-D%s'
16767+
16768+ v['SONAME_ST'] = ''
16769+ v['SHLIB_MARKER'] = ''
16770+ v['STATICLIB_MARKER'] = ''
16771+ v['FULLSTATIC_MARKER'] = '-static'
16772+
16773+ # program
16774+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
16775+ v['program_PATTERN'] = '%s'
16776+
16777+ # shared library
16778+ v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
16779+ v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
16780+ v['shlib_PATTERN'] = 'lib%s.so'
16781+
16782+ # static lib
16783+ v['staticlib_LINKFLAGS'] = ''
16784+ v['staticlib_PATTERN'] = 'lib%s.a'
16785+
16786+def detect(conf):
16787+ conf.find_xlcxx()
16788+ conf.find_cpp()
16789+ conf.find_ar()
16790+ conf.xlcxx_common_flags()
16791+ conf.cxx_load_tools()
16792+ conf.cxx_add_flags()
16793+ conf.link_add_flags()
16794diff --git a/buildtools/wafadmin/Utils.py b/buildtools/wafadmin/Utils.py
16795new file mode 100644
16796index 0000000..41dad57
16797--- /dev/null
16798+++ b/buildtools/wafadmin/Utils.py
16799@@ -0,0 +1,726 @@
16800+#!/usr/bin/env python
16801+# encoding: utf-8
16802+# Thomas Nagy, 2005 (ita)
16803+
16804+"""
16805+Utilities, the stable ones are the following:
16806+
16807+* h_file: compute a unique value for a file (hash), it uses
16808+ the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
16809+ else, md5 (see the python docs)
16810+
16811+ For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
16812+ it is possible to use a hashing based on the path and the size (may give broken cache results)
16813+ The method h_file MUST raise an OSError if the file is a folder
16814+
16815+ import stat
16816+ def h_file(filename):
16817+ st = os.stat(filename)
16818+ if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
16819+ m = Utils.md5()
16820+ m.update(str(st.st_mtime))
16821+ m.update(str(st.st_size))
16822+ m.update(filename)
16823+ return m.digest()
16824+
16825+ To replace the function in your project, use something like this:
16826+ import Utils
16827+ Utils.h_file = h_file
16828+
16829+* h_list
16830+* h_fun
16831+* get_term_cols
16832+* ordered_dict
16833+
16834+"""
16835+
16836+import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
16837+
16838+# In python 3.0 we can get rid of all this
16839+try: from UserDict import UserDict
16840+except ImportError: from collections import UserDict
16841+if sys.hexversion >= 0x2060000 or os.name == 'java':
16842+ import subprocess as pproc
16843+else:
16844+ import pproc
16845+import Logs
16846+from Constants import *
16847+
16848+try:
16849+ from collections import deque
16850+except ImportError:
16851+ class deque(list):
16852+ def popleft(self):
16853+ return self.pop(0)
16854+
16855+is_win32 = sys.platform == 'win32'
16856+
16857+try:
16858+ # defaultdict in python 2.5
16859+ from collections import defaultdict as DefaultDict
16860+except ImportError:
16861+ class DefaultDict(dict):
16862+ def __init__(self, default_factory):
16863+ super(DefaultDict, self).__init__()
16864+ self.default_factory = default_factory
16865+ def __getitem__(self, key):
16866+ try:
16867+ return super(DefaultDict, self).__getitem__(key)
16868+ except KeyError:
16869+ value = self.default_factory()
16870+ self[key] = value
16871+ return value
16872+
16873+class WafError(Exception):
16874+ def __init__(self, *args):
16875+ self.args = args
16876+ try:
16877+ self.stack = traceback.extract_stack()
16878+ except:
16879+ pass
16880+ Exception.__init__(self, *args)
16881+ def __str__(self):
16882+ return str(len(self.args) == 1 and self.args[0] or self.args)
16883+
16884+class WscriptError(WafError):
16885+ def __init__(self, message, wscript_file=None):
16886+ if wscript_file:
16887+ self.wscript_file = wscript_file
16888+ self.wscript_line = None
16889+ else:
16890+ try:
16891+ (self.wscript_file, self.wscript_line) = self.locate_error()
16892+ except:
16893+ (self.wscript_file, self.wscript_line) = (None, None)
16894+
16895+ msg_file_line = ''
16896+ if self.wscript_file:
16897+ msg_file_line = "%s:" % self.wscript_file
16898+ if self.wscript_line:
16899+ msg_file_line += "%s:" % self.wscript_line
16900+ err_message = "%s error: %s" % (msg_file_line, message)
16901+ WafError.__init__(self, err_message)
16902+
16903+ def locate_error(self):
16904+ stack = traceback.extract_stack()
16905+ stack.reverse()
16906+ for frame in stack:
16907+ file_name = os.path.basename(frame[0])
16908+ is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
16909+ if is_wscript:
16910+ return (frame[0], frame[1])
16911+ return (None, None)
16912+
16913+indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
16914+
16915+try:
16916+ from fnv import new as md5
16917+ import Constants
16918+ Constants.SIG_NIL = 'signofnv'
16919+
16920+ def h_file(filename):
16921+ m = md5()
16922+ try:
16923+ m.hfile(filename)
16924+ x = m.digest()
16925+ if x is None: raise OSError("not a file")
16926+ return x
16927+ except SystemError:
16928+ raise OSError("not a file" + filename)
16929+
16930+except ImportError:
16931+ try:
16932+ try:
16933+ from hashlib import md5
16934+ except ImportError:
16935+ from md5 import md5
16936+
16937+ def h_file(filename):
16938+ f = open(filename, 'rb')
16939+ m = md5()
16940+ while (filename):
16941+ filename = f.read(100000)
16942+ m.update(filename)
16943+ f.close()
16944+ return m.digest()
16945+ except ImportError:
16946+ # portability fixes may be added elsewhere (although, md5 should be everywhere by now)
16947+ md5 = None
16948+
16949+class ordered_dict(UserDict):
16950+ def __init__(self, dict = None):
16951+ self.allkeys = []
16952+ UserDict.__init__(self, dict)
16953+
16954+ def __delitem__(self, key):
16955+ self.allkeys.remove(key)
16956+ UserDict.__delitem__(self, key)
16957+
16958+ def __setitem__(self, key, item):
16959+ if key not in self.allkeys: self.allkeys.append(key)
16960+ UserDict.__setitem__(self, key, item)
16961+
16962+def exec_command(s, **kw):
16963+ if 'log' in kw:
16964+ kw['stdout'] = kw['stderr'] = kw['log']
16965+ del(kw['log'])
16966+ kw['shell'] = isinstance(s, str)
16967+
16968+ try:
16969+ proc = pproc.Popen(s, **kw)
16970+ return proc.wait()
16971+ except OSError:
16972+ return -1
16973+
16974+if is_win32:
16975+ def exec_command(s, **kw):
16976+ if 'log' in kw:
16977+ kw['stdout'] = kw['stderr'] = kw['log']
16978+ del(kw['log'])
16979+ kw['shell'] = isinstance(s, str)
16980+
16981+ if len(s) > 2000:
16982+ startupinfo = pproc.STARTUPINFO()
16983+ startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
16984+ kw['startupinfo'] = startupinfo
16985+
16986+ try:
16987+ if 'stdout' not in kw:
16988+ kw['stdout'] = pproc.PIPE
16989+ kw['stderr'] = pproc.PIPE
16990+ kw['universal_newlines'] = True
16991+ proc = pproc.Popen(s,**kw)
16992+ (stdout, stderr) = proc.communicate()
16993+ Logs.info(stdout)
16994+ if stderr:
16995+ Logs.error(stderr)
16996+ return proc.returncode
16997+ else:
16998+ proc = pproc.Popen(s,**kw)
16999+ return proc.wait()
17000+ except OSError:
17001+ return -1
17002+
17003+listdir = os.listdir
17004+if is_win32:
17005+ def listdir_win32(s):
17006+ if re.match('^[A-Za-z]:$', s):
17007+ # os.path.isdir fails if s contains only the drive name... (x:)
17008+ s += os.sep
17009+ if not os.path.isdir(s):
17010+ e = OSError()
17011+ e.errno = errno.ENOENT
17012+ raise e
17013+ return os.listdir(s)
17014+ listdir = listdir_win32
17015+
17016+def waf_version(mini = 0x010000, maxi = 0x100000):
17017+ "Halts if the waf version is wrong"
17018+ ver = HEXVERSION
17019+ try: min_val = mini + 0
17020+ except TypeError: min_val = int(mini.replace('.', '0'), 16)
17021+
17022+ if min_val > ver:
17023+ Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
17024+ sys.exit(1)
17025+
17026+ try: max_val = maxi + 0
17027+ except TypeError: max_val = int(maxi.replace('.', '0'), 16)
17028+
17029+ if max_val < ver:
17030+ Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
17031+ sys.exit(1)
17032+
17033+def python_24_guard():
17034+ if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
17035+ raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
17036+
17037+def ex_stack():
17038+ exc_type, exc_value, tb = sys.exc_info()
17039+ if Logs.verbose > 1:
17040+ exc_lines = traceback.format_exception(exc_type, exc_value, tb)
17041+ return ''.join(exc_lines)
17042+ return str(exc_value)
17043+
17044+def to_list(sth):
17045+ if isinstance(sth, str):
17046+ return sth.split()
17047+ else:
17048+ return sth
17049+
17050+g_loaded_modules = {}
17051+"index modules by absolute path"
17052+
17053+g_module=None
17054+"the main module is special"
17055+
17056+def load_module(file_path, name=WSCRIPT_FILE):
17057+ "this function requires an absolute path"
17058+ try:
17059+ return g_loaded_modules[file_path]
17060+ except KeyError:
17061+ pass
17062+
17063+ module = imp.new_module(name)
17064+
17065+ try:
17066+ code = readf(file_path, m='rU')
17067+ except (IOError, OSError):
17068+ raise WscriptError('Could not read the file %r' % file_path)
17069+
17070+ module.waf_hash_val = code
17071+
17072+ dt = os.path.dirname(file_path)
17073+ sys.path.insert(0, dt)
17074+ try:
17075+ exec(compile(code, file_path, 'exec'), module.__dict__)
17076+ except Exception:
17077+ exc_type, exc_value, tb = sys.exc_info()
17078+ raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
17079+ sys.path.remove(dt)
17080+
17081+ g_loaded_modules[file_path] = module
17082+
17083+ return module
17084+
17085+def set_main_module(file_path):
17086+ "Load custom options, if defined"
17087+ global g_module
17088+ g_module = load_module(file_path, 'wscript_main')
17089+ g_module.root_path = file_path
17090+
17091+ try:
17092+ g_module.APPNAME
17093+ except:
17094+ g_module.APPNAME = 'noname'
17095+ try:
17096+ g_module.VERSION
17097+ except:
17098+ g_module.VERSION = '1.0'
17099+
17100+ # note: to register the module globally, use the following:
17101+ # sys.modules['wscript_main'] = g_module
17102+
17103+def to_hashtable(s):
17104+ "used for importing env files"
17105+ tbl = {}
17106+ lst = s.split('\n')
17107+ for line in lst:
17108+ if not line: continue
17109+ mems = line.split('=')
17110+ tbl[mems[0]] = mems[1]
17111+ return tbl
17112+
17113+def get_term_cols():
17114+ "console width"
17115+ return 80
17116+try:
17117+ import struct, fcntl, termios
17118+except ImportError:
17119+ pass
17120+else:
17121+ if Logs.got_tty:
17122+ def myfun():
17123+ dummy_lines, cols = struct.unpack("HHHH", \
17124+ fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
17125+ struct.pack("HHHH", 0, 0, 0, 0)))[:2]
17126+ return cols
17127+ # we actually try the function once to see if it is suitable
17128+ try:
17129+ myfun()
17130+ except:
17131+ pass
17132+ else:
17133+ get_term_cols = myfun
17134+
17135+rot_idx = 0
17136+rot_chr = ['\\', '|', '/', '-']
17137+"the rotation character in the progress bar"
17138+
17139+
17140+def split_path(path):
17141+ return path.split('/')
17142+
17143+def split_path_cygwin(path):
17144+ if path.startswith('//'):
17145+ ret = path.split('/')[2:]
17146+ ret[0] = '/' + ret[0]
17147+ return ret
17148+ return path.split('/')
17149+
17150+re_sp = re.compile('[/\\\\]')
17151+def split_path_win32(path):
17152+ if path.startswith('\\\\'):
17153+ ret = re.split(re_sp, path)[2:]
17154+ ret[0] = '\\' + ret[0]
17155+ return ret
17156+ return re.split(re_sp, path)
17157+
17158+if sys.platform == 'cygwin':
17159+ split_path = split_path_cygwin
17160+elif is_win32:
17161+ split_path = split_path_win32
17162+
17163+def copy_attrs(orig, dest, names, only_if_set=False):
17164+ for a in to_list(names):
17165+ u = getattr(orig, a, ())
17166+ if u or not only_if_set:
17167+ setattr(dest, a, u)
17168+
17169+def def_attrs(cls, **kw):
17170+ '''
17171+ set attributes for class.
17172+ @param cls [any class]: the class to update the given attributes in.
17173+ @param kw [dictionary]: dictionary of attributes names and values.
17174+
17175+ if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
17176+ '''
17177+ for k, v in kw.iteritems():
17178+ if not hasattr(cls, k):
17179+ setattr(cls, k, v)
17180+
17181+def quote_define_name(path):
17182+ fu = re.compile("[^a-zA-Z0-9]").sub("_", path)
17183+ fu = fu.upper()
17184+ return fu
17185+
17186+def quote_whitespace(path):
17187+ return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
17188+
17189+def trimquotes(s):
17190+ if not s: return ''
17191+ s = s.rstrip()
17192+ if s[0] == "'" and s[-1] == "'": return s[1:-1]
17193+ return s
17194+
17195+def h_list(lst):
17196+ m = md5()
17197+ m.update(str(lst))
17198+ return m.digest()
17199+
17200+def h_fun(fun):
17201+ try:
17202+ return fun.code
17203+ except AttributeError:
17204+ try:
17205+ h = inspect.getsource(fun)
17206+ except IOError:
17207+ h = "nocode"
17208+ try:
17209+ fun.code = h
17210+ except AttributeError:
17211+ pass
17212+ return h
17213+
17214+def pprint(col, str, label='', sep='\n'):
17215+ "print messages in color"
17216+ sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
17217+
17218+def check_dir(dir):
17219+ """If a folder doesn't exists, create it."""
17220+ try:
17221+ os.stat(dir)
17222+ except OSError:
17223+ try:
17224+ os.makedirs(dir)
17225+ except OSError, e:
17226+ raise WafError("Cannot create folder '%s' (original error: %s)" % (dir, e))
17227+
17228+def cmd_output(cmd, **kw):
17229+
17230+ silent = False
17231+ if 'silent' in kw:
17232+ silent = kw['silent']
17233+ del(kw['silent'])
17234+
17235+ if 'e' in kw:
17236+ tmp = kw['e']
17237+ del(kw['e'])
17238+ kw['env'] = tmp
17239+
17240+ kw['shell'] = isinstance(cmd, str)
17241+ kw['stdout'] = pproc.PIPE
17242+ if silent:
17243+ kw['stderr'] = pproc.PIPE
17244+
17245+ try:
17246+ p = pproc.Popen(cmd, **kw)
17247+ output = p.communicate()[0]
17248+ except OSError, e:
17249+ raise ValueError(str(e))
17250+
17251+ if p.returncode:
17252+ if not silent:
17253+ msg = "command execution failed: %s -> %r" % (cmd, str(output))
17254+ raise ValueError(msg)
17255+ output = ''
17256+ return output
17257+
17258+reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
17259+def subst_vars(expr, params):
17260+ "substitute ${PREFIX}/bin in /usr/local/bin"
17261+ def repl_var(m):
17262+ if m.group(1):
17263+ return '\\'
17264+ if m.group(2):
17265+ return '$'
17266+ try:
17267+ # environments may contain lists
17268+ return params.get_flat(m.group(3))
17269+ except AttributeError:
17270+ return params[m.group(3)]
17271+ return reg_subst.sub(repl_var, expr)
17272+
17273+def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
17274+ "infers the binary format from the unversioned_sys_platform name."
17275+
17276+ if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos', 'gnu'):
17277+ return 'elf'
17278+ elif unversioned_sys_platform == 'darwin':
17279+ return 'mac-o'
17280+ elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
17281+ return 'pe'
17282+ # TODO we assume all other operating systems are elf, which is not true.
17283+ # we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
17284+ return 'elf'
17285+
17286+def unversioned_sys_platform():
17287+ """returns an unversioned name from sys.platform.
17288+ sys.plaform is not very well defined and depends directly on the python source tree.
17289+ The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
17290+ i.e., it's possible to get freebsd7 on a freebsd8 system.
17291+ So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
17292+ Some possible values of sys.platform are, amongst others:
17293+ aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
17294+ generic gnu0 irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
17295+ Investigating the python source tree may reveal more values.
17296+ """
17297+ s = sys.platform
17298+ if s == 'java':
17299+ # The real OS is hidden under the JVM.
17300+ from java.lang import System
17301+ s = System.getProperty('os.name')
17302+ # see http://lopica.sourceforge.net/os.html for a list of possible values
17303+ if s == 'Mac OS X':
17304+ return 'darwin'
17305+ elif s.startswith('Windows '):
17306+ return 'win32'
17307+ elif s == 'OS/2':
17308+ return 'os2'
17309+ elif s == 'HP-UX':
17310+ return 'hpux'
17311+ elif s in ('SunOS', 'Solaris'):
17312+ return 'sunos'
17313+ else: s = s.lower()
17314+ if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
17315+ return re.split('\d+$', s)[0]
17316+
17317+#@deprecated('use unversioned_sys_platform instead')
17318+def detect_platform():
17319+ """this function has been in the Utils module for some time.
17320+ It's hard to guess what people have used it for.
17321+ It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
17322+ For example, the version is not removed on freebsd and netbsd, amongst others.
17323+ """
17324+ s = sys.platform
17325+
17326+ # known POSIX
17327+ for x in 'cygwin linux irix sunos hpux aix darwin gnu'.split():
17328+ # sys.platform may be linux2
17329+ if s.find(x) >= 0:
17330+ return x
17331+
17332+ # unknown POSIX
17333+ if os.name in 'posix java os2'.split():
17334+ return os.name
17335+
17336+ return s
17337+
17338+def load_tool(tool, tooldir=None):
17339+ '''
17340+ load_tool: import a Python module, optionally using several directories.
17341+ @param tool [string]: name of tool to import.
17342+ @param tooldir [list]: directories to look for the tool.
17343+ @return: the loaded module.
17344+
17345+ Warning: this function is not thread-safe: plays with sys.path,
17346+ so must run in sequence.
17347+ '''
17348+ if tooldir:
17349+ assert isinstance(tooldir, list)
17350+ sys.path = tooldir + sys.path
17351+ else:
17352+ tooldir = []
17353+ try:
17354+ return __import__(tool)
17355+ finally:
17356+ for dt in tooldir:
17357+ sys.path.remove(dt)
17358+
17359+def readf(fname, m='r'):
17360+ "get the contents of a file, it is not used anywhere for the moment"
17361+ f = open(fname, m)
17362+ try:
17363+ txt = f.read()
17364+ finally:
17365+ f.close()
17366+ return txt
17367+
17368+def nada(*k, **kw):
17369+ """A function that does nothing"""
17370+ pass
17371+
17372+def diff_path(top, subdir):
17373+ """difference between two absolute paths"""
17374+ top = os.path.normpath(top).replace('\\', '/').split('/')
17375+ subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
17376+ if len(top) == len(subdir): return ''
17377+ diff = subdir[len(top) - len(subdir):]
17378+ return os.path.join(*diff)
17379+
17380+class Context(object):
17381+ """A base class for commands to be executed from Waf scripts"""
17382+
17383+ def set_curdir(self, dir):
17384+ self.curdir_ = dir
17385+
17386+ def get_curdir(self):
17387+ try:
17388+ return self.curdir_
17389+ except AttributeError:
17390+ self.curdir_ = os.getcwd()
17391+ return self.get_curdir()
17392+
17393+ curdir = property(get_curdir, set_curdir)
17394+
17395+ def recurse(self, dirs, name=''):
17396+ """The function for calling scripts from folders, it tries to call wscript + function_name
17397+ and if that file does not exist, it will call the method 'function_name' from a file named wscript
17398+ the dirs can be a list of folders or a string containing space-separated folder paths
17399+ """
17400+ if not name:
17401+ name = inspect.stack()[1][3]
17402+
17403+ if isinstance(dirs, str):
17404+ dirs = to_list(dirs)
17405+
17406+ for x in dirs:
17407+ if os.path.isabs(x):
17408+ nexdir = x
17409+ else:
17410+ nexdir = os.path.join(self.curdir, x)
17411+
17412+ base = os.path.join(nexdir, WSCRIPT_FILE)
17413+ file_path = base + '_' + name
17414+
17415+ try:
17416+ txt = readf(file_path, m='rU')
17417+ except (OSError, IOError):
17418+ try:
17419+ module = load_module(base)
17420+ except OSError:
17421+ raise WscriptError('No such script %s' % base)
17422+
17423+ try:
17424+ f = module.__dict__[name]
17425+ except KeyError:
17426+ raise WscriptError('No function %s defined in %s' % (name, base))
17427+
17428+ if getattr(self.__class__, 'pre_recurse', None):
17429+ self.pre_recurse(f, base, nexdir)
17430+ old = self.curdir
17431+ self.curdir = nexdir
17432+ try:
17433+ f(self)
17434+ finally:
17435+ self.curdir = old
17436+ if getattr(self.__class__, 'post_recurse', None):
17437+ self.post_recurse(module, base, nexdir)
17438+ else:
17439+ dc = {'ctx': self}
17440+ if getattr(self.__class__, 'pre_recurse', None):
17441+ dc = self.pre_recurse(txt, file_path, nexdir)
17442+ old = self.curdir
17443+ self.curdir = nexdir
17444+ try:
17445+ try:
17446+ exec(compile(txt, file_path, 'exec'), dc)
17447+ except Exception:
17448+ exc_type, exc_value, tb = sys.exc_info()
17449+ raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
17450+ finally:
17451+ self.curdir = old
17452+ if getattr(self.__class__, 'post_recurse', None):
17453+ self.post_recurse(txt, file_path, nexdir)
17454+
17455+if is_win32:
17456+ old = shutil.copy2
17457+ def copy2(src, dst):
17458+ old(src, dst)
17459+ shutil.copystat(src, src)
17460+ setattr(shutil, 'copy2', copy2)
17461+
17462+def zip_folder(dir, zip_file_name, prefix):
17463+ """
17464+ prefix represents the app to add in the archive
17465+ """
17466+ import zipfile
17467+ zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
17468+ base = os.path.abspath(dir)
17469+
17470+ if prefix:
17471+ if prefix[-1] != os.sep:
17472+ prefix += os.sep
17473+
17474+ n = len(base)
17475+ for root, dirs, files in os.walk(base):
17476+ for f in files:
17477+ archive_name = prefix + root[n:] + os.sep + f
17478+ zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
17479+ zip.close()
17480+
17481+def get_elapsed_time(start):
17482+ "Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
17483+ delta = datetime.datetime.now() - start
17484+ # cast to int necessary for python 3.0
17485+ days = int(delta.days)
17486+ hours = int(delta.seconds / 3600)
17487+ minutes = int((delta.seconds - hours * 3600) / 60)
17488+ seconds = delta.seconds - hours * 3600 - minutes * 60 \
17489+ + float(delta.microseconds) / 1000 / 1000
17490+ result = ''
17491+ if days:
17492+ result += '%dd' % days
17493+ if days or hours:
17494+ result += '%dh' % hours
17495+ if days or hours or minutes:
17496+ result += '%dm' % minutes
17497+ return '%s%.3fs' % (result, seconds)
17498+
17499+if os.name == 'java':
17500+ # For Jython (they should really fix the inconsistency)
17501+ try:
17502+ gc.disable()
17503+ gc.enable()
17504+ except NotImplementedError:
17505+ gc.disable = gc.enable
17506+
17507+def run_once(fun):
17508+ """
17509+ decorator, make a function cache its results, use like this:
17510+
17511+ @run_once
17512+ def foo(k):
17513+ return 345*2343
17514+ """
17515+ cache = {}
17516+ def wrap(k):
17517+ try:
17518+ return cache[k]
17519+ except KeyError:
17520+ ret = fun(k)
17521+ cache[k] = ret
17522+ return ret
17523+ wrap.__cache__ = cache
17524+ return wrap
17525+
17526diff --git a/buildtools/wafadmin/__init__.py b/buildtools/wafadmin/__init__.py
17527new file mode 100644
17528index 0000000..01273cf
17529--- /dev/null
17530+++ b/buildtools/wafadmin/__init__.py
17531@@ -0,0 +1,3 @@
17532+#!/usr/bin/env python
17533+# encoding: utf-8
17534+# Thomas Nagy, 2005 (ita)
17535diff --git a/buildtools/wafadmin/ansiterm.py b/buildtools/wafadmin/ansiterm.py
17536new file mode 100644
17537index 0000000..720b79c
17538--- /dev/null
17539+++ b/buildtools/wafadmin/ansiterm.py
17540@@ -0,0 +1,236 @@
17541+import sys, os
17542+try:
17543+ if (not sys.stderr.isatty()) or (not sys.stdout.isatty()):
17544+ raise ValueError('not a tty')
17545+
17546+ from ctypes import *
17547+
17548+ class COORD(Structure):
17549+ _fields_ = [("X", c_short), ("Y", c_short)]
17550+
17551+ class SMALL_RECT(Structure):
17552+ _fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
17553+
17554+ class CONSOLE_SCREEN_BUFFER_INFO(Structure):
17555+ _fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
17556+
17557+ class CONSOLE_CURSOR_INFO(Structure):
17558+ _fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
17559+
17560+ sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17561+ csinfo = CONSOLE_CURSOR_INFO()
17562+ hconsole = windll.kernel32.GetStdHandle(-11)
17563+ windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
17564+ if sbinfo.Size.X < 10 or sbinfo.Size.Y < 10: raise Exception('small console')
17565+ windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
17566+except Exception:
17567+ pass
17568+else:
17569+ import re, threading
17570+
17571+ to_int = lambda number, default: number and int(number) or default
17572+ wlock = threading.Lock()
17573+
17574+ STD_OUTPUT_HANDLE = -11
17575+ STD_ERROR_HANDLE = -12
17576+
17577+ class AnsiTerm(object):
17578+ def __init__(self):
17579+ self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
17580+ self.cursor_history = []
17581+ self.orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17582+ self.orig_csinfo = CONSOLE_CURSOR_INFO()
17583+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self.orig_sbinfo))
17584+ windll.kernel32.GetConsoleCursorInfo(hconsole, byref(self.orig_csinfo))
17585+
17586+
17587+ def screen_buffer_info(self):
17588+ sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17589+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
17590+ return sbinfo
17591+
17592+ def clear_line(self, param):
17593+ mode = param and int(param) or 0
17594+ sbinfo = self.screen_buffer_info()
17595+ if mode == 1: # Clear from begining of line to cursor position
17596+ line_start = COORD(0, sbinfo.CursorPosition.Y)
17597+ line_length = sbinfo.Size.X
17598+ elif mode == 2: # Clear entire line
17599+ line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
17600+ line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
17601+ else: # Clear from cursor position to end of line
17602+ line_start = sbinfo.CursorPosition
17603+ line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
17604+ chars_written = c_int()
17605+ windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), line_length, line_start, byref(chars_written))
17606+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
17607+
17608+ def clear_screen(self, param):
17609+ mode = to_int(param, 0)
17610+ sbinfo = self.screen_buffer_info()
17611+ if mode == 1: # Clear from begining of screen to cursor position
17612+ clear_start = COORD(0, 0)
17613+ clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
17614+ elif mode == 2: # Clear entire screen and return cursor to home
17615+ clear_start = COORD(0, 0)
17616+ clear_length = sbinfo.Size.X * sbinfo.Size.Y
17617+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
17618+ else: # Clear from cursor position to end of screen
17619+ clear_start = sbinfo.CursorPosition
17620+ clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
17621+ chars_written = c_int()
17622+ windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), clear_length, clear_start, byref(chars_written))
17623+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
17624+
17625+ def push_cursor(self, param):
17626+ sbinfo = self.screen_buffer_info()
17627+ self.cursor_history.push(sbinfo.CursorPosition)
17628+
17629+ def pop_cursor(self, param):
17630+ if self.cursor_history:
17631+ old_pos = self.cursor_history.pop()
17632+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
17633+
17634+ def set_cursor(self, param):
17635+ x, sep, y = param.partition(';')
17636+ x = to_int(x, 1) - 1
17637+ y = to_int(y, 1) - 1
17638+ sbinfo = self.screen_buffer_info()
17639+ new_pos = COORD(
17640+ min(max(0, x), sbinfo.Size.X),
17641+ min(max(0, y), sbinfo.Size.Y)
17642+ )
17643+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17644+
17645+ def set_column(self, param):
17646+ x = to_int(param, 1) - 1
17647+ sbinfo = self.screen_buffer_info()
17648+ new_pos = COORD(
17649+ min(max(0, x), sbinfo.Size.X),
17650+ sbinfo.CursorPosition.Y
17651+ )
17652+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17653+
17654+ def move_cursor(self, x_offset=0, y_offset=0):
17655+ sbinfo = self.screen_buffer_info()
17656+ new_pos = COORD(
17657+ min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
17658+ min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
17659+ )
17660+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17661+
17662+ def move_up(self, param):
17663+ self.move_cursor(y_offset = -to_int(param, 1))
17664+
17665+ def move_down(self, param):
17666+ self.move_cursor(y_offset = to_int(param, 1))
17667+
17668+ def move_left(self, param):
17669+ self.move_cursor(x_offset = -to_int(param, 1))
17670+
17671+ def move_right(self, param):
17672+ self.move_cursor(x_offset = to_int(param, 1))
17673+
17674+ def next_line(self, param):
17675+ sbinfo = self.screen_buffer_info()
17676+ self.move_cursor(
17677+ x_offset = -sbinfo.CursorPosition.X,
17678+ y_offset = to_int(param, 1)
17679+ )
17680+
17681+ def prev_line(self, param):
17682+ sbinfo = self.screen_buffer_info()
17683+ self.move_cursor(
17684+ x_offset = -sbinfo.CursorPosition.X,
17685+ y_offset = -to_int(param, 1)
17686+ )
17687+
17688+ escape_to_color = { (0, 30): 0x0, #black
17689+ (0, 31): 0x4, #red
17690+ (0, 32): 0x2, #green
17691+ (0, 33): 0x4+0x2, #dark yellow
17692+ (0, 34): 0x1, #blue
17693+ (0, 35): 0x1+0x4, #purple
17694+ (0, 36): 0x2+0x4, #cyan
17695+ (0, 37): 0x1+0x2+0x4, #grey
17696+ (1, 30): 0x1+0x2+0x4, #dark gray
17697+ (1, 31): 0x4+0x8, #red
17698+ (1, 32): 0x2+0x8, #light green
17699+ (1, 33): 0x4+0x2+0x8, #yellow
17700+ (1, 34): 0x1+0x8, #light blue
17701+ (1, 35): 0x1+0x4+0x8, #light purple
17702+ (1, 36): 0x1+0x2+0x8, #light cyan
17703+ (1, 37): 0x1+0x2+0x4+0x8, #white
17704+ }
17705+
17706+ def set_color(self, param):
17707+ cols = param.split(';')
17708+ attr = self.orig_sbinfo.Attributes
17709+ for c in cols:
17710+ c = to_int(c, 0)
17711+ if c in range(30,38):
17712+ attr = (attr & 0xf0) | (self.escape_to_color.get((0,c), 0x7))
17713+ elif c in range(40,48):
17714+ attr = (attr & 0x0f) | (self.escape_to_color.get((0,c), 0x7) << 8)
17715+ elif c in range(90,98):
17716+ attr = (attr & 0xf0) | (self.escape_to_color.get((1,c-60), 0x7))
17717+ elif c in range(100,108):
17718+ attr = (attr & 0x0f) | (self.escape_to_color.get((1,c-60), 0x7) << 8)
17719+ elif c == 1:
17720+ attr |= 0x08
17721+ windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
17722+
17723+ def show_cursor(self,param):
17724+ csinfo.bVisible = 1
17725+ windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
17726+
17727+ def hide_cursor(self,param):
17728+ csinfo.bVisible = 0
17729+ windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
17730+
17731+ ansi_command_table = {
17732+ 'A': move_up,
17733+ 'B': move_down,
17734+ 'C': move_right,
17735+ 'D': move_left,
17736+ 'E': next_line,
17737+ 'F': prev_line,
17738+ 'G': set_column,
17739+ 'H': set_cursor,
17740+ 'f': set_cursor,
17741+ 'J': clear_screen,
17742+ 'K': clear_line,
17743+ 'h': show_cursor,
17744+ 'l': hide_cursor,
17745+ 'm': set_color,
17746+ 's': push_cursor,
17747+ 'u': pop_cursor,
17748+ }
17749+ # Match either the escape sequence or text not containing escape sequence
17750+ ansi_tokans = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
17751+ def write(self, text):
17752+ try:
17753+ wlock.acquire()
17754+ for param, cmd, txt in self.ansi_tokans.findall(text):
17755+ if cmd:
17756+ cmd_func = self.ansi_command_table.get(cmd)
17757+ if cmd_func:
17758+ cmd_func(self, param)
17759+ else:
17760+ chars_written = c_int()
17761+ if isinstance(txt, unicode):
17762+ windll.kernel32.WriteConsoleW(self.hconsole, txt, len(txt), byref(chars_written), None)
17763+ else:
17764+ windll.kernel32.WriteConsoleA(self.hconsole, txt, len(txt), byref(chars_written), None)
17765+ finally:
17766+ wlock.release()
17767+
17768+ def flush(self):
17769+ pass
17770+
17771+ def isatty(self):
17772+ return True
17773+
17774+ sys.stderr = sys.stdout = AnsiTerm()
17775+ os.environ['TERM'] = 'vt100'
17776+
17777diff --git a/buildtools/wafadmin/pproc.py b/buildtools/wafadmin/pproc.py
17778new file mode 100644
17779index 0000000..cb15178
17780--- /dev/null
17781+++ b/buildtools/wafadmin/pproc.py
17782@@ -0,0 +1,620 @@
17783+# borrowed from python 2.5.2c1
17784+# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
17785+# Licensed to PSF under a Contributor Agreement.
17786+
17787+import sys
17788+mswindows = (sys.platform == "win32")
17789+
17790+import os
17791+import types
17792+import traceback
17793+import gc
17794+
17795+class CalledProcessError(Exception):
17796+ def __init__(self, returncode, cmd):
17797+ self.returncode = returncode
17798+ self.cmd = cmd
17799+ def __str__(self):
17800+ return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
17801+
17802+if mswindows:
17803+ import threading
17804+ import msvcrt
17805+ if 0:
17806+ import pywintypes
17807+ from win32api import GetStdHandle, STD_INPUT_HANDLE, \
17808+ STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
17809+ from win32api import GetCurrentProcess, DuplicateHandle, \
17810+ GetModuleFileName, GetVersion
17811+ from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
17812+ from win32pipe import CreatePipe
17813+ from win32process import CreateProcess, STARTUPINFO, \
17814+ GetExitCodeProcess, STARTF_USESTDHANDLES, \
17815+ STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
17816+ from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
17817+ else:
17818+ from _subprocess import *
17819+ class STARTUPINFO:
17820+ dwFlags = 0
17821+ hStdInput = None
17822+ hStdOutput = None
17823+ hStdError = None
17824+ wShowWindow = 0
17825+ class pywintypes:
17826+ error = IOError
17827+else:
17828+ import select
17829+ import errno
17830+ import fcntl
17831+ import pickle
17832+
17833+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
17834+
17835+try:
17836+ MAXFD = os.sysconf("SC_OPEN_MAX")
17837+except:
17838+ MAXFD = 256
17839+
17840+try:
17841+ False
17842+except NameError:
17843+ False = 0
17844+ True = 1
17845+
17846+_active = []
17847+
17848+def _cleanup():
17849+ for inst in _active[:]:
17850+ if inst.poll(_deadstate=sys.maxint) >= 0:
17851+ try:
17852+ _active.remove(inst)
17853+ except ValueError:
17854+ pass
17855+
17856+PIPE = -1
17857+STDOUT = -2
17858+
17859+
17860+def call(*popenargs, **kwargs):
17861+ return Popen(*popenargs, **kwargs).wait()
17862+
17863+def check_call(*popenargs, **kwargs):
17864+ retcode = call(*popenargs, **kwargs)
17865+ cmd = kwargs.get("args")
17866+ if cmd is None:
17867+ cmd = popenargs[0]
17868+ if retcode:
17869+ raise CalledProcessError(retcode, cmd)
17870+ return retcode
17871+
17872+
17873+def list2cmdline(seq):
17874+ result = []
17875+ needquote = False
17876+ for arg in seq:
17877+ bs_buf = []
17878+
17879+ if result:
17880+ result.append(' ')
17881+
17882+ needquote = (" " in arg) or ("\t" in arg) or arg == ""
17883+ if needquote:
17884+ result.append('"')
17885+
17886+ for c in arg:
17887+ if c == '\\':
17888+ bs_buf.append(c)
17889+ elif c == '"':
17890+ result.append('\\' * len(bs_buf)*2)
17891+ bs_buf = []
17892+ result.append('\\"')
17893+ else:
17894+ if bs_buf:
17895+ result.extend(bs_buf)
17896+ bs_buf = []
17897+ result.append(c)
17898+
17899+ if bs_buf:
17900+ result.extend(bs_buf)
17901+
17902+ if needquote:
17903+ result.extend(bs_buf)
17904+ result.append('"')
17905+
17906+ return ''.join(result)
17907+
17908+class Popen(object):
17909+ def __init__(self, args, bufsize=0, executable=None,
17910+ stdin=None, stdout=None, stderr=None,
17911+ preexec_fn=None, close_fds=False, shell=False,
17912+ cwd=None, env=None, universal_newlines=False,
17913+ startupinfo=None, creationflags=0):
17914+ _cleanup()
17915+
17916+ self._child_created = False
17917+ if not isinstance(bufsize, (int, long)):
17918+ raise TypeError("bufsize must be an integer")
17919+
17920+ if mswindows:
17921+ if preexec_fn is not None:
17922+ raise ValueError("preexec_fn is not supported on Windows platforms")
17923+ if close_fds:
17924+ raise ValueError("close_fds is not supported on Windows platforms")
17925+ else:
17926+ if startupinfo is not None:
17927+ raise ValueError("startupinfo is only supported on Windows platforms")
17928+ if creationflags != 0:
17929+ raise ValueError("creationflags is only supported on Windows platforms")
17930+
17931+ self.stdin = None
17932+ self.stdout = None
17933+ self.stderr = None
17934+ self.pid = None
17935+ self.returncode = None
17936+ self.universal_newlines = universal_newlines
17937+
17938+ (p2cread, p2cwrite,
17939+ c2pread, c2pwrite,
17940+ errread, errwrite) = self._get_handles(stdin, stdout, stderr)
17941+
17942+ self._execute_child(args, executable, preexec_fn, close_fds,
17943+ cwd, env, universal_newlines,
17944+ startupinfo, creationflags, shell,
17945+ p2cread, p2cwrite,
17946+ c2pread, c2pwrite,
17947+ errread, errwrite)
17948+
17949+ if mswindows:
17950+ if stdin is None and p2cwrite is not None:
17951+ os.close(p2cwrite)
17952+ p2cwrite = None
17953+ if stdout is None and c2pread is not None:
17954+ os.close(c2pread)
17955+ c2pread = None
17956+ if stderr is None and errread is not None:
17957+ os.close(errread)
17958+ errread = None
17959+
17960+ if p2cwrite:
17961+ self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
17962+ if c2pread:
17963+ if universal_newlines:
17964+ self.stdout = os.fdopen(c2pread, 'rU', bufsize)
17965+ else:
17966+ self.stdout = os.fdopen(c2pread, 'rb', bufsize)
17967+ if errread:
17968+ if universal_newlines:
17969+ self.stderr = os.fdopen(errread, 'rU', bufsize)
17970+ else:
17971+ self.stderr = os.fdopen(errread, 'rb', bufsize)
17972+
17973+
17974+ def _translate_newlines(self, data):
17975+ data = data.replace("\r\n", "\n")
17976+ data = data.replace("\r", "\n")
17977+ return data
17978+
17979+
17980+ def __del__(self, sys=sys):
17981+ if not self._child_created:
17982+ return
17983+ self.poll(_deadstate=sys.maxint)
17984+ if self.returncode is None and _active is not None:
17985+ _active.append(self)
17986+
17987+
17988+ def communicate(self, input=None):
17989+ if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
17990+ stdout = None
17991+ stderr = None
17992+ if self.stdin:
17993+ if input:
17994+ self.stdin.write(input)
17995+ self.stdin.close()
17996+ elif self.stdout:
17997+ stdout = self.stdout.read()
17998+ elif self.stderr:
17999+ stderr = self.stderr.read()
18000+ self.wait()
18001+ return (stdout, stderr)
18002+
18003+ return self._communicate(input)
18004+
18005+
18006+ if mswindows:
18007+ def _get_handles(self, stdin, stdout, stderr):
18008+ if stdin is None and stdout is None and stderr is None:
18009+ return (None, None, None, None, None, None)
18010+
18011+ p2cread, p2cwrite = None, None
18012+ c2pread, c2pwrite = None, None
18013+ errread, errwrite = None, None
18014+
18015+ if stdin is None:
18016+ p2cread = GetStdHandle(STD_INPUT_HANDLE)
18017+ if p2cread is not None:
18018+ pass
18019+ elif stdin is None or stdin == PIPE:
18020+ p2cread, p2cwrite = CreatePipe(None, 0)
18021+ p2cwrite = p2cwrite.Detach()
18022+ p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
18023+ elif isinstance(stdin, int):
18024+ p2cread = msvcrt.get_osfhandle(stdin)
18025+ else:
18026+ p2cread = msvcrt.get_osfhandle(stdin.fileno())
18027+ p2cread = self._make_inheritable(p2cread)
18028+
18029+ if stdout is None:
18030+ c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
18031+ if c2pwrite is not None:
18032+ pass
18033+ elif stdout is None or stdout == PIPE:
18034+ c2pread, c2pwrite = CreatePipe(None, 0)
18035+ c2pread = c2pread.Detach()
18036+ c2pread = msvcrt.open_osfhandle(c2pread, 0)
18037+ elif isinstance(stdout, int):
18038+ c2pwrite = msvcrt.get_osfhandle(stdout)
18039+ else:
18040+ c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
18041+ c2pwrite = self._make_inheritable(c2pwrite)
18042+
18043+ if stderr is None:
18044+ errwrite = GetStdHandle(STD_ERROR_HANDLE)
18045+ if errwrite is not None:
18046+ pass
18047+ elif stderr is None or stderr == PIPE:
18048+ errread, errwrite = CreatePipe(None, 0)
18049+ errread = errread.Detach()
18050+ errread = msvcrt.open_osfhandle(errread, 0)
18051+ elif stderr == STDOUT:
18052+ errwrite = c2pwrite
18053+ elif isinstance(stderr, int):
18054+ errwrite = msvcrt.get_osfhandle(stderr)
18055+ else:
18056+ errwrite = msvcrt.get_osfhandle(stderr.fileno())
18057+ errwrite = self._make_inheritable(errwrite)
18058+
18059+ return (p2cread, p2cwrite,
18060+ c2pread, c2pwrite,
18061+ errread, errwrite)
18062+ def _make_inheritable(self, handle):
18063+ return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
18064+
18065+ def _find_w9xpopen(self):
18066+ w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
18067+ if not os.path.exists(w9xpopen):
18068+ w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
18069+ if not os.path.exists(w9xpopen):
18070+ raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
18071+ return w9xpopen
18072+
18073+ def _execute_child(self, args, executable, preexec_fn, close_fds,
18074+ cwd, env, universal_newlines,
18075+ startupinfo, creationflags, shell,
18076+ p2cread, p2cwrite,
18077+ c2pread, c2pwrite,
18078+ errread, errwrite):
18079+
18080+ if not isinstance(args, types.StringTypes):
18081+ args = list2cmdline(args)
18082+
18083+ if startupinfo is None:
18084+ startupinfo = STARTUPINFO()
18085+ if None not in (p2cread, c2pwrite, errwrite):
18086+ startupinfo.dwFlags |= STARTF_USESTDHANDLES
18087+ startupinfo.hStdInput = p2cread
18088+ startupinfo.hStdOutput = c2pwrite
18089+ startupinfo.hStdError = errwrite
18090+
18091+ if shell:
18092+ startupinfo.dwFlags |= STARTF_USESHOWWINDOW
18093+ startupinfo.wShowWindow = SW_HIDE
18094+ comspec = os.environ.get("COMSPEC", "cmd.exe")
18095+ args = comspec + " /c " + args
18096+ if (GetVersion() >= 0x80000000L or
18097+ os.path.basename(comspec).lower() == "command.com"):
18098+ w9xpopen = self._find_w9xpopen()
18099+ args = '"%s" %s' % (w9xpopen, args)
18100+ creationflags |= CREATE_NEW_CONSOLE
18101+
18102+ try:
18103+ hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
18104+ except pywintypes.error, e:
18105+ raise WindowsError(*e.args)
18106+
18107+ self._child_created = True
18108+ self._handle = hp
18109+ self.pid = pid
18110+ ht.Close()
18111+
18112+ if p2cread is not None:
18113+ p2cread.Close()
18114+ if c2pwrite is not None:
18115+ c2pwrite.Close()
18116+ if errwrite is not None:
18117+ errwrite.Close()
18118+
18119+
18120+ def poll(self, _deadstate=None):
18121+ if self.returncode is None:
18122+ if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
18123+ self.returncode = GetExitCodeProcess(self._handle)
18124+ return self.returncode
18125+
18126+
18127+ def wait(self):
18128+ if self.returncode is None:
18129+ obj = WaitForSingleObject(self._handle, INFINITE)
18130+ self.returncode = GetExitCodeProcess(self._handle)
18131+ return self.returncode
18132+
18133+ def _readerthread(self, fh, buffer):
18134+ buffer.append(fh.read())
18135+
18136+ def _communicate(self, input):
18137+ stdout = None
18138+ stderr = None
18139+
18140+ if self.stdout:
18141+ stdout = []
18142+ stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
18143+ stdout_thread.setDaemon(True)
18144+ stdout_thread.start()
18145+ if self.stderr:
18146+ stderr = []
18147+ stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
18148+ stderr_thread.setDaemon(True)
18149+ stderr_thread.start()
18150+
18151+ if self.stdin:
18152+ if input is not None:
18153+ self.stdin.write(input)
18154+ self.stdin.close()
18155+
18156+ if self.stdout:
18157+ stdout_thread.join()
18158+ if self.stderr:
18159+ stderr_thread.join()
18160+
18161+ if stdout is not None:
18162+ stdout = stdout[0]
18163+ if stderr is not None:
18164+ stderr = stderr[0]
18165+
18166+ if self.universal_newlines and hasattr(file, 'newlines'):
18167+ if stdout:
18168+ stdout = self._translate_newlines(stdout)
18169+ if stderr:
18170+ stderr = self._translate_newlines(stderr)
18171+
18172+ self.wait()
18173+ return (stdout, stderr)
18174+
18175+ else:
18176+ def _get_handles(self, stdin, stdout, stderr):
18177+ p2cread, p2cwrite = None, None
18178+ c2pread, c2pwrite = None, None
18179+ errread, errwrite = None, None
18180+
18181+ if stdin is None:
18182+ pass
18183+ elif stdin == PIPE:
18184+ p2cread, p2cwrite = os.pipe()
18185+ elif isinstance(stdin, int):
18186+ p2cread = stdin
18187+ else:
18188+ p2cread = stdin.fileno()
18189+
18190+ if stdout is None:
18191+ pass
18192+ elif stdout == PIPE:
18193+ c2pread, c2pwrite = os.pipe()
18194+ elif isinstance(stdout, int):
18195+ c2pwrite = stdout
18196+ else:
18197+ c2pwrite = stdout.fileno()
18198+
18199+ if stderr is None:
18200+ pass
18201+ elif stderr == PIPE:
18202+ errread, errwrite = os.pipe()
18203+ elif stderr == STDOUT:
18204+ errwrite = c2pwrite
18205+ elif isinstance(stderr, int):
18206+ errwrite = stderr
18207+ else:
18208+ errwrite = stderr.fileno()
18209+
18210+ return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
18211+
18212+ def _set_cloexec_flag(self, fd):
18213+ try:
18214+ cloexec_flag = fcntl.FD_CLOEXEC
18215+ except AttributeError:
18216+ cloexec_flag = 1
18217+
18218+ old = fcntl.fcntl(fd, fcntl.F_GETFD)
18219+ fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
18220+
18221+ def _close_fds(self, but):
18222+ for i in xrange(3, MAXFD):
18223+ if i == but:
18224+ continue
18225+ try:
18226+ os.close(i)
18227+ except:
18228+ pass
18229+
18230+ def _execute_child(self, args, executable, preexec_fn, close_fds,
18231+ cwd, env, universal_newlines, startupinfo, creationflags, shell,
18232+ p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
18233+
18234+ if isinstance(args, types.StringTypes):
18235+ args = [args]
18236+ else:
18237+ args = list(args)
18238+
18239+ if shell:
18240+ args = ["/bin/sh", "-c"] + args
18241+
18242+ if executable is None:
18243+ executable = args[0]
18244+
18245+ errpipe_read, errpipe_write = os.pipe()
18246+ self._set_cloexec_flag(errpipe_write)
18247+
18248+ gc_was_enabled = gc.isenabled()
18249+ gc.disable()
18250+ try:
18251+ self.pid = os.fork()
18252+ except:
18253+ if gc_was_enabled:
18254+ gc.enable()
18255+ raise
18256+ self._child_created = True
18257+ if self.pid == 0:
18258+ try:
18259+ if p2cwrite:
18260+ os.close(p2cwrite)
18261+ if c2pread:
18262+ os.close(c2pread)
18263+ if errread:
18264+ os.close(errread)
18265+ os.close(errpipe_read)
18266+
18267+ if p2cread:
18268+ os.dup2(p2cread, 0)
18269+ if c2pwrite:
18270+ os.dup2(c2pwrite, 1)
18271+ if errwrite:
18272+ os.dup2(errwrite, 2)
18273+
18274+ if p2cread and p2cread not in (0,):
18275+ os.close(p2cread)
18276+ if c2pwrite and c2pwrite not in (p2cread, 1):
18277+ os.close(c2pwrite)
18278+ if errwrite and errwrite not in (p2cread, c2pwrite, 2):
18279+ os.close(errwrite)
18280+
18281+ if close_fds:
18282+ self._close_fds(but=errpipe_write)
18283+
18284+ if cwd is not None:
18285+ os.chdir(cwd)
18286+
18287+ if preexec_fn:
18288+ apply(preexec_fn)
18289+
18290+ if env is None:
18291+ os.execvp(executable, args)
18292+ else:
18293+ os.execvpe(executable, args, env)
18294+
18295+ except:
18296+ exc_type, exc_value, tb = sys.exc_info()
18297+ exc_lines = traceback.format_exception(exc_type, exc_value, tb)
18298+ exc_value.child_traceback = ''.join(exc_lines)
18299+ os.write(errpipe_write, pickle.dumps(exc_value))
18300+
18301+ os._exit(255)
18302+
18303+ if gc_was_enabled:
18304+ gc.enable()
18305+ os.close(errpipe_write)
18306+ if p2cread and p2cwrite:
18307+ os.close(p2cread)
18308+ if c2pwrite and c2pread:
18309+ os.close(c2pwrite)
18310+ if errwrite and errread:
18311+ os.close(errwrite)
18312+
18313+ data = os.read(errpipe_read, 1048576)
18314+ os.close(errpipe_read)
18315+ if data != "":
18316+ os.waitpid(self.pid, 0)
18317+ child_exception = pickle.loads(data)
18318+ raise child_exception
18319+
18320+ def _handle_exitstatus(self, sts):
18321+ if os.WIFSIGNALED(sts):
18322+ self.returncode = -os.WTERMSIG(sts)
18323+ elif os.WIFEXITED(sts):
18324+ self.returncode = os.WEXITSTATUS(sts)
18325+ else:
18326+ raise RuntimeError("Unknown child exit status!")
18327+
18328+ def poll(self, _deadstate=None):
18329+ if self.returncode is None:
18330+ try:
18331+ pid, sts = os.waitpid(self.pid, os.WNOHANG)
18332+ if pid == self.pid:
18333+ self._handle_exitstatus(sts)
18334+ except os.error:
18335+ if _deadstate is not None:
18336+ self.returncode = _deadstate
18337+ return self.returncode
18338+
18339+ def wait(self):
18340+ if self.returncode is None:
18341+ pid, sts = os.waitpid(self.pid, 0)
18342+ self._handle_exitstatus(sts)
18343+ return self.returncode
18344+
18345+ def _communicate(self, input):
18346+ read_set = []
18347+ write_set = []
18348+ stdout = None
18349+ stderr = None
18350+
18351+ if self.stdin:
18352+ self.stdin.flush()
18353+ if input:
18354+ write_set.append(self.stdin)
18355+ else:
18356+ self.stdin.close()
18357+ if self.stdout:
18358+ read_set.append(self.stdout)
18359+ stdout = []
18360+ if self.stderr:
18361+ read_set.append(self.stderr)
18362+ stderr = []
18363+
18364+ input_offset = 0
18365+ while read_set or write_set:
18366+ rlist, wlist, xlist = select.select(read_set, write_set, [])
18367+
18368+ if self.stdin in wlist:
18369+ bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
18370+ input_offset += bytes_written
18371+ if input_offset >= len(input):
18372+ self.stdin.close()
18373+ write_set.remove(self.stdin)
18374+
18375+ if self.stdout in rlist:
18376+ data = os.read(self.stdout.fileno(), 1024)
18377+ if data == "":
18378+ self.stdout.close()
18379+ read_set.remove(self.stdout)
18380+ stdout.append(data)
18381+
18382+ if self.stderr in rlist:
18383+ data = os.read(self.stderr.fileno(), 1024)
18384+ if data == "":
18385+ self.stderr.close()
18386+ read_set.remove(self.stderr)
18387+ stderr.append(data)
18388+
18389+ if stdout is not None:
18390+ stdout = ''.join(stdout)
18391+ if stderr is not None:
18392+ stderr = ''.join(stderr)
18393+
18394+ if self.universal_newlines and hasattr(file, 'newlines'):
18395+ if stdout:
18396+ stdout = self._translate_newlines(stdout)
18397+ if stderr:
18398+ stderr = self._translate_newlines(stderr)
18399+
18400+ self.wait()
18401+ return (stdout, stderr)
18402+
18403diff --git a/buildtools/wafadmin/py3kfixes.py b/buildtools/wafadmin/py3kfixes.py
18404new file mode 100644
18405index 0000000..2f3c9c2
18406--- /dev/null
18407+++ b/buildtools/wafadmin/py3kfixes.py
18408@@ -0,0 +1,130 @@
18409+#!/usr/bin/env python
18410+# encoding: utf-8
18411+# Thomas Nagy, 2009 (ita)
18412+
18413+"""
18414+Fixes for py3k go here
18415+"""
18416+
18417+import os
18418+
18419+all_modifs = {}
18420+
18421+def modif(dir, name, fun):
18422+ if name == '*':
18423+ lst = []
18424+ for y in '. Tools 3rdparty'.split():
18425+ for x in os.listdir(os.path.join(dir, y)):
18426+ if x.endswith('.py'):
18427+ lst.append(y + os.sep + x)
18428+ #lst = [y + os.sep + x for x in os.listdir(os.path.join(dir, y)) for y in '. Tools 3rdparty'.split() if x.endswith('.py')]
18429+ for x in lst:
18430+ modif(dir, x, fun)
18431+ return
18432+
18433+ filename = os.path.join(dir, name)
18434+ f = open(filename, 'r')
18435+ txt = f.read()
18436+ f.close()
18437+
18438+ txt = fun(txt)
18439+
18440+ f = open(filename, 'w')
18441+ f.write(txt)
18442+ f.close()
18443+
18444+def subst(filename):
18445+ def do_subst(fun):
18446+ global all_modifs
18447+ try:
18448+ all_modifs[filename] += fun
18449+ except KeyError:
18450+ all_modifs[filename] = [fun]
18451+ return fun
18452+ return do_subst
18453+
18454+@subst('Constants.py')
18455+def r1(code):
18456+ code = code.replace("'iluvcuteoverload'", "b'iluvcuteoverload'")
18457+ code = code.replace("ABI=7", "ABI=37")
18458+ return code
18459+
18460+@subst('Tools/ccroot.py')
18461+def r2(code):
18462+ code = code.replace("p.stdin.write('\\n')", "p.stdin.write(b'\\n')")
18463+ code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
18464+ return code
18465+
18466+@subst('Utils.py')
18467+def r3(code):
18468+ code = code.replace("m.update(str(lst))", "m.update(str(lst).encode())")
18469+ code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
18470+ return code
18471+
18472+@subst('ansiterm.py')
18473+def r33(code):
18474+ code = code.replace('unicode', 'str')
18475+ return code
18476+
18477+@subst('Task.py')
18478+def r4(code):
18479+ code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
18480+ code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
18481+ code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
18482+ code = code.replace("up(x.name)", "up(x.name.encode())")
18483+ code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
18484+ code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
18485+ code = code.replace("sig.encode('hex')", 'binascii.hexlify(sig)')
18486+ code = code.replace("os.path.join(Options.cache_global,ssig)", "os.path.join(Options.cache_global,ssig.decode())")
18487+ return code
18488+
18489+@subst('Build.py')
18490+def r5(code):
18491+ code = code.replace("cPickle.dump(data,file,-1)", "cPickle.dump(data,file)")
18492+ code = code.replace('for node in src_dir_node.childs.values():', 'for node in list(src_dir_node.childs.values()):')
18493+ return code
18494+
18495+@subst('*')
18496+def r6(code):
18497+ code = code.replace('xrange', 'range')
18498+ code = code.replace('iteritems', 'items')
18499+ code = code.replace('maxint', 'maxsize')
18500+ code = code.replace('iterkeys', 'keys')
18501+ code = code.replace('Error,e:', 'Error as e:')
18502+ code = code.replace('Exception,e:', 'Exception as e:')
18503+ return code
18504+
18505+@subst('TaskGen.py')
18506+def r7(code):
18507+ code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
18508+ return code
18509+
18510+@subst('Tools/python.py')
18511+def r8(code):
18512+ code = code.replace('proc.communicate()[0]', 'proc.communicate()[0].decode("utf-8")')
18513+ return code
18514+
18515+@subst('Tools/glib2.py')
18516+def r9(code):
18517+ code = code.replace('f.write(c)', 'f.write(c.encode("utf-8"))')
18518+ return code
18519+
18520+@subst('Tools/config_c.py')
18521+def r10(code):
18522+ code = code.replace("key=kw['success']", "key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
18523+ code = code.replace('out=str(out)','out=out.decode("utf-8")')
18524+ code = code.replace('err=str(err)','err=err.decode("utf-8")')
18525+ return code
18526+
18527+@subst('Tools/d.py')
18528+def r11(code):
18529+ code = code.replace('ret.strip()', 'ret.strip().decode("utf-8")')
18530+ return code
18531+
18532+def fixdir(dir):
18533+ global all_modifs
18534+ for k in all_modifs:
18535+ for v in all_modifs[k]:
18536+ modif(os.path.join(dir, 'wafadmin'), k, v)
18537+ #print('substitutions finished')
18538+