summaryrefslogtreecommitdiffstats
path: root/meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch
diff options
context:
space:
mode:
Diffstat (limited to 'meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch')
-rw-r--r--meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch18534
1 files changed, 18534 insertions, 0 deletions
diff --git a/meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch b/meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch
new file mode 100644
index 000000000..985ed5af1
--- /dev/null
+++ b/meta-oe/recipes-connectivity/samba/samba-3.6.24/waf-as-source.patch
@@ -0,0 +1,18534 @@
1Description: Include waf as an extracted source directory, rather than as a one-in-a-file script.
2Author: Jelmer Vernooij <jelmer@samba.org>
3Bug-Debian: http://bugs.debian.org/654499
4Forwarded: yes
5Bug: https://bugzilla.samba.org/show_bug.cgi?id=8923
6
7diff --git a/buildtools/README b/buildtools/README
8new file mode 100644
9index 0000000..eab0382
10--- /dev/null
11+++ b/buildtools/README
12@@ -0,0 +1,12 @@
13+See http://code.google.com/p/waf/ for more information on waf
14+
15+You can get a svn copy of the upstream source with:
16+
17+ svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
18+
19+Samba currently uses waf 1.5, which can be found at:
20+
21+ http://waf.googlecode.com/svn/branches/waf-1.5
22+
23+To update the current copy of waf, use the update-waf.sh script in this
24+directory.
25diff --git a/buildtools/bin/README b/buildtools/bin/README
26deleted file mode 100644
27index 9ef8a1f..0000000
28--- a/buildtools/bin/README
29+++ /dev/null
30@@ -1,16 +0,0 @@
31-This copy of waf-svn is taken from the git mirror of waf
32-at:
33-
34- git://git.samba.org/tridge/waf-svn.git
35-
36-using the waf-samba branch
37-
38-It was built using the command:
39-
40- ./waf-light --zip-type=gz --make-waf
41-
42-See http://code.google.com/p/waf/ for more information on waf
43-
44-You can get a svn copy of the upstream source with:
45-
46- svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
47diff --git a/buildtools/update-waf.sh b/buildtools/update-waf.sh
48new file mode 100755
49index 0000000..bb3a4bf
50--- /dev/null
51+++ b/buildtools/update-waf.sh
52@@ -0,0 +1,13 @@
53+#!/bin/sh
54+# Update our copy of waf
55+
56+TARGETDIR="`dirname $0`"
57+WORKDIR="`mktemp -d`"
58+
59+mkdir -p "$WORKDIR"
60+
61+svn checkout http://waf.googlecode.com/svn/branches/waf-1.5/wafadmin "$WORKDIR/wafadmin"
62+
63+rsync -C -avz --delete "$WORKDIR/wafadmin/" "$TARGETDIR/wafadmin/"
64+
65+rm -rf "$WORKDIR"
66diff --git a/buildtools/wafadmin/3rdparty/ParallelDebug.py b/buildtools/wafadmin/3rdparty/ParallelDebug.py
67new file mode 100644
68index 0000000..9d0493e
69--- /dev/null
70+++ b/buildtools/wafadmin/3rdparty/ParallelDebug.py
71@@ -0,0 +1,299 @@
72+#! /usr/bin/env python
73+# encoding: utf-8
74+# Thomas Nagy, 2007-2010 (ita)
75+
76+"""
77+debugging helpers for parallel compilation, outputs
78+a svg file in the build directory
79+"""
80+
81+import os, time, sys, threading
82+try: from Queue import Queue
83+except: from queue import Queue
84+import Runner, Options, Utils, Task, Logs
85+from Constants import *
86+
87+#import random
88+#random.seed(100)
89+
90+def set_options(opt):
91+ opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
92+ help='title for the svg diagram', dest='dtitle')
93+ opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=1000, dest='dwidth')
94+ opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
95+ opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
96+ opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
97+
98+# red #ff4d4d
99+# green #4da74d
100+# lila #a751ff
101+
102+color2code = {
103+ 'GREEN' : '#4da74d',
104+ 'YELLOW' : '#fefe44',
105+ 'PINK' : '#a751ff',
106+ 'RED' : '#cc1d1d',
107+ 'BLUE' : '#6687bb',
108+ 'CYAN' : '#34e2e2',
109+
110+}
111+
112+mp = {}
113+info = [] # list of (text,color)
114+
115+def map_to_color(name):
116+ if name in mp:
117+ return mp[name]
118+ try:
119+ cls = Task.TaskBase.classes[name]
120+ except KeyError:
121+ return color2code['RED']
122+ if cls.color in mp:
123+ return mp[cls.color]
124+ if cls.color in color2code:
125+ return color2code[cls.color]
126+ return color2code['RED']
127+
128+def loop(self):
129+ while 1:
130+ tsk=Runner.TaskConsumer.ready.get()
131+ tsk.master.set_running(1, id(threading.currentThread()), tsk)
132+ Runner.process_task(tsk)
133+ tsk.master.set_running(-1, id(threading.currentThread()), tsk)
134+Runner.TaskConsumer.loop = loop
135+
136+
137+old_start = Runner.Parallel.start
138+def do_start(self):
139+ print Options.options
140+ try:
141+ Options.options.dband
142+ except AttributeError:
143+ raise ValueError('use def options(opt): opt.load("parallel_debug")!')
144+
145+ self.taskinfo = Queue()
146+ old_start(self)
147+ process_colors(self)
148+Runner.Parallel.start = do_start
149+
150+def set_running(self, by, i, tsk):
151+ self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
152+Runner.Parallel.set_running = set_running
153+
154+def name2class(name):
155+ return name.replace(' ', '_').replace('.', '_')
156+
157+def process_colors(producer):
158+ # first, cast the parameters
159+ tmp = []
160+ try:
161+ while True:
162+ tup = producer.taskinfo.get(False)
163+ tmp.append(list(tup))
164+ except:
165+ pass
166+
167+ try:
168+ ini = float(tmp[0][2])
169+ except:
170+ return
171+
172+ if not info:
173+ seen = []
174+ for x in tmp:
175+ name = x[3]
176+ if not name in seen:
177+ seen.append(name)
178+ else:
179+ continue
180+
181+ info.append((name, map_to_color(name)))
182+ info.sort(key=lambda x: x[0])
183+
184+ thread_count = 0
185+ acc = []
186+ for x in tmp:
187+ thread_count += x[6]
188+ acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
189+ f = open('pdebug.dat', 'w')
190+ #Utils.write('\n'.join(acc))
191+ f.write('\n'.join(acc))
192+
193+ tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
194+
195+ st = {}
196+ for l in tmp:
197+ if not l[0] in st:
198+ st[l[0]] = len(st.keys())
199+ tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
200+ THREAD_AMOUNT = len(st.keys())
201+
202+ st = {}
203+ for l in tmp:
204+ if not l[1] in st:
205+ st[l[1]] = len(st.keys())
206+ tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
207+
208+
209+ BAND = Options.options.dband
210+
211+ seen = {}
212+ acc = []
213+ for x in range(len(tmp)):
214+ line = tmp[x]
215+ id = line[1]
216+
217+ if id in seen:
218+ continue
219+ seen[id] = True
220+
221+ begin = line[2]
222+ thread_id = line[0]
223+ for y in range(x + 1, len(tmp)):
224+ line = tmp[y]
225+ if line[1] == id:
226+ end = line[2]
227+ #print id, thread_id, begin, end
228+ #acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
229+ acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
230+ break
231+
232+ if Options.options.dmaxtime < 0.1:
233+ gwidth = 1
234+ for x in tmp:
235+ m = BAND * x[2]
236+ if m > gwidth:
237+ gwidth = m
238+ else:
239+ gwidth = BAND * Options.options.dmaxtime
240+
241+ ratio = float(Options.options.dwidth) / gwidth
242+ gwidth = Options.options.dwidth
243+
244+ gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
245+
246+ out = []
247+
248+ out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
249+<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
250+\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">
251+<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.0\"
252+ x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
253+ id=\"svg602\" xml:space=\"preserve\">
254+
255+<style type='text/css' media='screen'>
256+ g.over rect { stroke:#FF0000; fill-opacity:0.4 }
257+</style>
258+
259+<script type='text/javascript'><![CDATA[
260+ var svg = document.getElementsByTagName('svg')[0];
261+ var svgNS = svg.getAttribute('xmlns');
262+ svg.addEventListener('mouseover',function(e){
263+ var g = e.target.parentNode;
264+ var x = document.getElementById('r_'+g.id);
265+ if (x) {
266+ g.setAttribute('class', g.getAttribute('class')+' over');
267+ x.setAttribute('class', x.getAttribute('class')+' over');
268+ showInfo(e, g.id);
269+ }
270+ },false);
271+ svg.addEventListener('mouseout',function(e){
272+ var g = e.target.parentNode;
273+ var x = document.getElementById('r_'+g.id);
274+ if (x) {
275+ g.setAttribute('class',g.getAttribute('class').replace(' over',''));
276+ x.setAttribute('class',x.getAttribute('class').replace(' over',''));
277+ hideInfo(e);
278+ }
279+ },false);
280+
281+function showInfo(evt, txt) {
282+ tooltip = document.getElementById('tooltip');
283+
284+ var t = document.getElementById('tooltiptext');
285+ t.firstChild.data = txt;
286+
287+ var x = evt.clientX+10;
288+ if (x > 200) { x -= t.getComputedTextLength() + 16; }
289+ var y = evt.clientY+30;
290+ tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
291+ tooltip.setAttributeNS(null,"visibility","visible");
292+
293+ var r = document.getElementById('tooltiprect');
294+ r.setAttribute('width', t.getComputedTextLength()+6)
295+}
296+
297+
298+function hideInfo(evt) {
299+ tooltip = document.getElementById('tooltip');
300+ tooltip.setAttributeNS(null,"visibility","hidden");
301+}
302+
303+]]></script>
304+
305+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
306+<rect
307+ x='%r' y='%r'
308+ width='%r' height='%r' z-index='10'
309+ style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
310+ />\n
311+
312+""" % (0, 0, gwidth + 4, gheight + 4, 0, 0, gwidth + 4, gheight + 4))
313+
314+ # main title
315+ if Options.options.dtitle:
316+ out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
317+""" % (gwidth/2, gheight - 5, Options.options.dtitle))
318+
319+ # the rectangles
320+ groups = {}
321+ for (x, y, w, h, clsname) in acc:
322+ try:
323+ groups[clsname].append((x, y, w, h))
324+ except:
325+ groups[clsname] = [(x, y, w, h)]
326+
327+ for cls in groups:
328+
329+ out.append("<g id='%s'>\n" % name2class(cls))
330+
331+ for (x, y, w, h) in groups[cls]:
332+ out.append(""" <rect
333+ x='%r' y='%r'
334+ width='%r' height='%r' z-index='11'
335+ style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
336+ />\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
337+
338+ out.append("</g>\n")
339+
340+ # output the caption
341+ cnt = THREAD_AMOUNT
342+
343+ for (text, color) in info:
344+ # caption box
345+ b = BAND/2
346+ out.append("""<g id='r_%s'><rect
347+ x='%r' y='%r'
348+ width='%r' height='%r'
349+ style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
350+ />\n""" % (name2class(text), 2 + BAND, 5 + (cnt + 0.5) * BAND, b, b, color))
351+
352+ # caption text
353+ out.append("""<text
354+ style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
355+ x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
356+ cnt += 1
357+
358+ out.append("""
359+<g transform="translate(0,0)" visibility="hidden" id="tooltip">
360+ <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
361+ <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
362+</g>""")
363+
364+ out.append("\n</svg>")
365+
366+ #node = producer.bld.path.make_node('pdebug.svg')
367+ f = open('pdebug.svg', 'w')
368+ f.write("".join(out))
369+
370+
371diff --git a/buildtools/wafadmin/3rdparty/batched_cc.py b/buildtools/wafadmin/3rdparty/batched_cc.py
372new file mode 100644
373index 0000000..8e31074
374--- /dev/null
375+++ b/buildtools/wafadmin/3rdparty/batched_cc.py
376@@ -0,0 +1,183 @@
377+#!/usr/bin/env python
378+# encoding: utf-8
379+# Thomas Nagy, 2006 (ita)
380+
381+"""
382+Batched builds - compile faster
383+instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
384+cc -c ../file1.c ../file2.c ../file3.c
385+
386+Files are output on the directory where the compiler is called, and dependencies are more difficult
387+to track (do not run the command on all source files if only one file changes)
388+
389+As such, we do as if the files were compiled one by one, but no command is actually run:
390+replace each cc/cpp Task by a TaskSlave
391+A new task called TaskMaster collects the signatures from each slave and finds out the command-line
392+to run.
393+
394+To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
395+it is only necessary to import this module in the configuration (no other change required)
396+"""
397+
398+MAX_BATCH = 50
399+MAXPARALLEL = False
400+
401+EXT_C = ['.c', '.cc', '.cpp', '.cxx']
402+
403+import os, threading
404+import TaskGen, Task, ccroot, Build, Logs
405+from TaskGen import extension, feature, before
406+from Constants import *
407+
408+cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
409+cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
410+
411+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
412+cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
413+
414+count = 70000
415+class batch_task(Task.Task):
416+ color = 'RED'
417+
418+ after = 'cc cxx'
419+ before = 'cc_link cxx_link static_link'
420+
421+ def __str__(self):
422+ return '(batch compilation for %d slaves)\n' % len(self.slaves)
423+
424+ def __init__(self, *k, **kw):
425+ Task.Task.__init__(self, *k, **kw)
426+ self.slaves = []
427+ self.inputs = []
428+ self.hasrun = 0
429+
430+ global count
431+ count += 1
432+ self.idx = count
433+
434+ def add_slave(self, slave):
435+ self.slaves.append(slave)
436+ self.set_run_after(slave)
437+
438+ def runnable_status(self):
439+ for t in self.run_after:
440+ if not t.hasrun:
441+ return ASK_LATER
442+
443+ for t in self.slaves:
444+ #if t.executed:
445+ if t.hasrun != SKIPPED:
446+ return RUN_ME
447+
448+ return SKIP_ME
449+
450+ def run(self):
451+ outputs = []
452+ self.outputs = []
453+
454+ srclst = []
455+ slaves = []
456+ for t in self.slaves:
457+ if t.hasrun != SKIPPED:
458+ slaves.append(t)
459+ srclst.append(t.inputs[0].abspath(self.env))
460+
461+ self.env.SRCLST = srclst
462+ self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
463+
464+ env = self.env
465+ app = env.append_unique
466+ cpppath_st = env['CPPPATH_ST']
467+ env._CCINCFLAGS = env.CXXINCFLAGS = []
468+
469+ # local flags come first
470+ # set the user-defined includes paths
471+ for i in env['INC_PATHS']:
472+ app('_CCINCFLAGS', cpppath_st % i.abspath())
473+ app('_CXXINCFLAGS', cpppath_st % i.abspath())
474+ app('_CCINCFLAGS', cpppath_st % i.abspath(env))
475+ app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
476+
477+ # set the library include paths
478+ for i in env['CPPPATH']:
479+ app('_CCINCFLAGS', cpppath_st % i)
480+ app('_CXXINCFLAGS', cpppath_st % i)
481+
482+ if self.slaves[0].__class__.__name__ == 'cc':
483+ ret = cc_fun(self)
484+ else:
485+ ret = cxx_fun(self)
486+
487+ if ret:
488+ return ret
489+
490+ for t in slaves:
491+ t.old_post_run()
492+
493+from TaskGen import extension, feature, after
494+
495+import cc, cxx
496+def wrap(fun):
497+ def foo(self, node):
498+ # we cannot control the extension, this sucks
499+ self.obj_ext = '.o'
500+
501+ task = fun(self, node)
502+ if not getattr(self, 'masters', None):
503+ self.masters = {}
504+ self.allmasters = []
505+
506+ if not node.parent.id in self.masters:
507+ m = self.masters[node.parent.id] = self.master = self.create_task('batch')
508+ self.allmasters.append(m)
509+ else:
510+ m = self.masters[node.parent.id]
511+ if len(m.slaves) > MAX_BATCH:
512+ m = self.masters[node.parent.id] = self.master = self.create_task('batch')
513+ self.allmasters.append(m)
514+
515+ m.add_slave(task)
516+ return task
517+ return foo
518+
519+c_hook = wrap(cc.c_hook)
520+extension(cc.EXT_CC)(c_hook)
521+
522+cxx_hook = wrap(cxx.cxx_hook)
523+extension(cxx.EXT_CXX)(cxx_hook)
524+
525+
526+@feature('cprogram', 'cshlib', 'cstaticlib')
527+@after('apply_link')
528+def link_after_masters(self):
529+ if getattr(self, 'allmasters', None):
530+ for m in self.allmasters:
531+ self.link_task.set_run_after(m)
532+
533+for c in ['cc', 'cxx']:
534+ t = Task.TaskBase.classes[c]
535+ def run(self):
536+ pass
537+
538+ def post_run(self):
539+ #self.executed=1
540+ pass
541+
542+ def can_retrieve_cache(self):
543+ if self.old_can_retrieve_cache():
544+ for m in self.generator.allmasters:
545+ try:
546+ m.slaves.remove(self)
547+ except ValueError:
548+ pass #this task wasn't included in that master
549+ return 1
550+ else:
551+ return None
552+
553+ setattr(t, 'oldrun', t.__dict__['run'])
554+ setattr(t, 'run', run)
555+ setattr(t, 'old_post_run', t.post_run)
556+ setattr(t, 'post_run', post_run)
557+ setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
558+ setattr(t, 'can_retrieve_cache', can_retrieve_cache)
559+
560diff --git a/buildtools/wafadmin/3rdparty/boost.py b/buildtools/wafadmin/3rdparty/boost.py
561new file mode 100644
562index 0000000..e690a4e
563--- /dev/null
564+++ b/buildtools/wafadmin/3rdparty/boost.py
565@@ -0,0 +1,343 @@
566+#!/usr/bin/env python
567+# encoding: utf-8
568+#
569+# partially based on boost.py written by Gernot Vormayr
570+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
571+# modified by Bjoern Michaelsen, 2008
572+# modified by Luca Fossati, 2008
573+# rewritten for waf 1.5.1, Thomas Nagy, 2008
574+#
575+#def set_options(opt):
576+# opt.tool_options('boost')
577+# # ...
578+#
579+#def configure(conf):
580+# # ... (e.g. conf.check_tool('g++'))
581+# conf.check_tool('boost')
582+# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
583+#
584+#def build(bld):
585+# bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
586+#
587+#ISSUES:
588+# * find_includes should be called only once!
589+# * support mandatory
590+
591+######## boost update ###########
592+## ITA: * the method get_boost_version_number does work
593+## * the rest of the code has not really been tried
594+# * make certain a demo is provided (in demos/adv for example)
595+
596+# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
597+
598+import os.path, glob, types, re, sys
599+import Configure, config_c, Options, Utils, Logs
600+from Logs import warn, debug
601+from Configure import conf
602+
603+boost_code = '''
604+#include <iostream>
605+#include <boost/version.hpp>
606+int main() { std::cout << BOOST_VERSION << std::endl; }
607+'''
608+
609+boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
610+boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
611+
612+STATIC_NOSTATIC = 'nostatic'
613+STATIC_BOTH = 'both'
614+STATIC_ONLYSTATIC = 'onlystatic'
615+
616+is_versiontag = re.compile('^\d+_\d+_?\d*$')
617+is_threadingtag = re.compile('^mt$')
618+is_abitag = re.compile('^[sgydpn]+$')
619+is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
620+is_pythontag=re.compile('^py[0-9]{2}$')
621+
622+def set_options(opt):
623+ opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
624+ opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
625+
626+def string_to_version(s):
627+ version = s.split('.')
628+ if len(version) < 3: return 0
629+ return int(version[0])*100000 + int(version[1])*100 + int(version[2])
630+
631+def version_string(version):
632+ major = version / 100000
633+ minor = version / 100 % 1000
634+ minor_minor = version % 100
635+ if minor_minor == 0:
636+ return "%d_%d" % (major, minor)
637+ else:
638+ return "%d_%d_%d" % (major, minor, minor_minor)
639+
640+def libfiles(lib, pattern, lib_paths):
641+ result = []
642+ for lib_path in lib_paths:
643+ libname = pattern % ('boost_%s[!_]*' % lib)
644+ result += glob.glob(os.path.join(lib_path, libname))
645+ return result
646+
647+@conf
648+def get_boost_version_number(self, dir):
649+ """silently retrieve the boost version number"""
650+ try:
651+ return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
652+ except Configure.ConfigurationError, e:
653+ return -1
654+
655+def set_default(kw, var, val):
656+ if not var in kw:
657+ kw[var] = val
658+
659+def tags_score(tags, kw):
660+ """
661+ checks library tags
662+
663+ see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
664+ """
665+ score = 0
666+ needed_tags = {
667+ 'threading': kw['tag_threading'],
668+ 'abi': kw['tag_abi'],
669+ 'toolset': kw['tag_toolset'],
670+ 'version': kw['tag_version'],
671+ 'python': kw['tag_python']
672+ }
673+
674+ if kw['tag_toolset'] is None:
675+ v = kw['env']
676+ toolset = v['CXX_NAME']
677+ if v['CXX_VERSION']:
678+ version_no = v['CXX_VERSION'].split('.')
679+ toolset += version_no[0]
680+ if len(version_no) > 1:
681+ toolset += version_no[1]
682+ needed_tags['toolset'] = toolset
683+
684+ found_tags = {}
685+ for tag in tags:
686+ if is_versiontag.match(tag): found_tags['version'] = tag
687+ if is_threadingtag.match(tag): found_tags['threading'] = tag
688+ if is_abitag.match(tag): found_tags['abi'] = tag
689+ if is_toolsettag.match(tag): found_tags['toolset'] = tag
690+ if is_pythontag.match(tag): found_tags['python'] = tag
691+
692+ for tagname in needed_tags.iterkeys():
693+ if needed_tags[tagname] is not None and tagname in found_tags:
694+ if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
695+ score += kw['score_' + tagname][0]
696+ else:
697+ score += kw['score_' + tagname][1]
698+ return score
699+
700+@conf
701+def validate_boost(self, kw):
702+ ver = kw.get('version', '')
703+
704+ for x in 'min_version max_version version'.split():
705+ set_default(kw, x, ver)
706+
707+ set_default(kw, 'lib', '')
708+ kw['lib'] = Utils.to_list(kw['lib'])
709+
710+ set_default(kw, 'env', self.env)
711+
712+ set_default(kw, 'libpath', boost_libpath)
713+ set_default(kw, 'cpppath', boost_cpppath)
714+
715+ for x in 'tag_threading tag_version tag_toolset'.split():
716+ set_default(kw, x, None)
717+ set_default(kw, 'tag_abi', '^[^d]*$')
718+
719+ set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
720+ set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
721+
722+ set_default(kw, 'score_threading', (10, -10))
723+ set_default(kw, 'score_abi', (10, -10))
724+ set_default(kw, 'score_python', (10,-10))
725+ set_default(kw, 'score_toolset', (1, -1))
726+ set_default(kw, 'score_version', (100, -100))
727+
728+ set_default(kw, 'score_min', 0)
729+ set_default(kw, 'static', STATIC_NOSTATIC)
730+ set_default(kw, 'found_includes', False)
731+ set_default(kw, 'min_score', 0)
732+
733+ set_default(kw, 'errmsg', 'not found')
734+ set_default(kw, 'okmsg', 'ok')
735+
736+@conf
737+def find_boost_includes(self, kw):
738+ """
739+ check every path in kw['cpppath'] for subdir
740+ that either starts with boost- or is named boost.
741+
742+ Then the version is checked and selected accordingly to
743+ min_version/max_version. The highest possible version number is
744+ selected!
745+
746+ If no versiontag is set the versiontag is set accordingly to the
747+ selected library and CPPPATH_BOOST is set.
748+ """
749+ boostPath = getattr(Options.options, 'boostincludes', '')
750+ if boostPath:
751+ boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
752+ else:
753+ boostPath = Utils.to_list(kw['cpppath'])
754+
755+ min_version = string_to_version(kw.get('min_version', ''))
756+ max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
757+
758+ version = 0
759+ for include_path in boostPath:
760+ boost_paths = [p for p in glob.glob(os.path.join(include_path, 'boost*')) if os.path.isdir(p)]
761+ debug('BOOST Paths: %r' % boost_paths)
762+ for path in boost_paths:
763+ pathname = os.path.split(path)[-1]
764+ ret = -1
765+ if pathname == 'boost':
766+ path = include_path
767+ ret = self.get_boost_version_number(path)
768+ elif pathname.startswith('boost-'):
769+ ret = self.get_boost_version_number(path)
770+ ret = int(ret)
771+
772+ if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
773+ boost_path = path
774+ version = ret
775+ if not version:
776+ self.fatal('boost headers not found! (required version min: %s max: %s)'
777+ % (kw['min_version'], kw['max_version']))
778+ return False
779+
780+ found_version = version_string(version)
781+ versiontag = '^' + found_version + '$'
782+ if kw['tag_version'] is None:
783+ kw['tag_version'] = versiontag
784+ elif kw['tag_version'] != versiontag:
785+ warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
786+ env = self.env
787+ env['CPPPATH_BOOST'] = boost_path
788+ env['BOOST_VERSION'] = found_version
789+ self.found_includes = 1
790+ ret = 'Version %s (%s)' % (found_version, boost_path)
791+ return ret
792+
793+@conf
794+def find_boost_library(self, lib, kw):
795+
796+ def find_library_from_list(lib, files):
797+ lib_pattern = re.compile('.*boost_(.*?)\..*')
798+ result = (None, None)
799+ resultscore = kw['min_score'] - 1
800+ for file in files:
801+ m = lib_pattern.search(file, 1)
802+ if m:
803+ libname = m.group(1)
804+ libtags = libname.split('-')[1:]
805+ currentscore = tags_score(libtags, kw)
806+ if currentscore > resultscore:
807+ result = (libname, file)
808+ resultscore = currentscore
809+ return result
810+
811+ lib_paths = getattr(Options.options, 'boostlibs', '')
812+ if lib_paths:
813+ lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
814+ else:
815+ lib_paths = Utils.to_list(kw['libpath'])
816+
817+ v = kw.get('env', self.env)
818+
819+ (libname, file) = (None, None)
820+ if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
821+ st_env_prefix = 'LIB'
822+ files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
823+ (libname, file) = find_library_from_list(lib, files)
824+ if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
825+ st_env_prefix = 'STATICLIB'
826+ staticLibPattern = v['staticlib_PATTERN']
827+ if self.env['CC_NAME'] == 'msvc':
828+ staticLibPattern = 'lib' + staticLibPattern
829+ files = libfiles(lib, staticLibPattern, lib_paths)
830+ (libname, file) = find_library_from_list(lib, files)
831+ if libname is not None:
832+ v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
833+ if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
834+ v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
835+ else:
836+ v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
837+ return
838+ self.fatal('lib boost_' + lib + ' not found!')
839+
840+@conf
841+def check_boost(self, *k, **kw):
842+ """
843+ This should be the main entry point
844+
845+- min_version
846+- max_version
847+- version
848+- include_path
849+- lib_path
850+- lib
851+- toolsettag - None or a regexp
852+- threadingtag - None or a regexp
853+- abitag - None or a regexp
854+- versiontag - WARNING: you should rather use version or min_version/max_version
855+- static - look for static libs (values:
856+ 'nostatic' or STATIC_NOSTATIC - ignore static libs (default)
857+ 'both' or STATIC_BOTH - find static libs, too
858+ 'onlystatic' or STATIC_ONLYSTATIC - find only static libs
859+- score_version
860+- score_abi
861+- scores_threading
862+- score_toolset
863+ * the scores are tuples (match_score, nomatch_score)
864+ match_score is the added to the score if the tag is matched
865+ nomatch_score is added when a tag is found and does not match
866+- min_score
867+ """
868+
869+ if not self.env['CXX']:
870+ self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
871+ self.validate_boost(kw)
872+ ret = None
873+ try:
874+ if not kw.get('found_includes', None):
875+ self.check_message_1(kw.get('msg_includes', 'boost headers'))
876+ ret = self.find_boost_includes(kw)
877+
878+ except Configure.ConfigurationError, e:
879+ if 'errmsg' in kw:
880+ self.check_message_2(kw['errmsg'], 'YELLOW')
881+ if 'mandatory' in kw:
882+ if Logs.verbose > 1:
883+ raise
884+ else:
885+ self.fatal('the configuration failed (see %r)' % self.log.name)
886+ else:
887+ if 'okmsg' in kw:
888+ self.check_message_2(kw.get('okmsg_includes', ret))
889+
890+ for lib in kw['lib']:
891+ self.check_message_1('library boost_'+lib)
892+ try:
893+ self.find_boost_library(lib, kw)
894+ except Configure.ConfigurationError, e:
895+ ret = False
896+ if 'errmsg' in kw:
897+ self.check_message_2(kw['errmsg'], 'YELLOW')
898+ if 'mandatory' in kw:
899+ if Logs.verbose > 1:
900+ raise
901+ else:
902+ self.fatal('the configuration failed (see %r)' % self.log.name)
903+ else:
904+ if 'okmsg' in kw:
905+ self.check_message_2(kw['okmsg'])
906+
907+ return ret
908+
909diff --git a/buildtools/wafadmin/3rdparty/fluid.py b/buildtools/wafadmin/3rdparty/fluid.py
910new file mode 100644
911index 0000000..117edef
912--- /dev/null
913+++ b/buildtools/wafadmin/3rdparty/fluid.py
914@@ -0,0 +1,27 @@
915+#!/usr/bin/python
916+# encoding: utf-8
917+# Grygoriy Fuchedzhy 2009
918+
919+"""
920+Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
921+"""
922+
923+import Task
924+from TaskGen import extension
925+
926+Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
927+
928+@extension('.fl')
929+def fluid(self, node):
930+ """add the .fl to the source list; the cxx file generated will be compiled when possible"""
931+ cpp = node.change_ext('.cpp')
932+ hpp = node.change_ext('.hpp')
933+ self.create_task('fluid', node, [cpp, hpp])
934+
935+ if 'cxx' in self.features:
936+ self.allnodes.append(cpp)
937+
938+def detect(conf):
939+ fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
940+ conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
941+
942diff --git a/buildtools/wafadmin/3rdparty/gccdeps.py b/buildtools/wafadmin/3rdparty/gccdeps.py
943new file mode 100644
944index 0000000..6600c9c
945--- /dev/null
946+++ b/buildtools/wafadmin/3rdparty/gccdeps.py
947@@ -0,0 +1,128 @@
948+#!/usr/bin/env python
949+# encoding: utf-8
950+# Thomas Nagy, 2008-2010 (ita)
951+
952+"""
953+Execute the tasks with gcc -MD, read the dependencies from the .d file
954+and prepare the dependency calculation for the next run
955+"""
956+
957+import os, re, threading
958+import Task, Logs, Utils, preproc
959+from TaskGen import before, after, feature
960+
961+lock = threading.Lock()
962+
963+preprocessor_flag = '-MD'
964+
965+@feature('cc')
966+@before('apply_core')
967+def add_mmd_cc(self):
968+ if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
969+ self.env.append_value('CCFLAGS', preprocessor_flag)
970+
971+@feature('cxx')
972+@before('apply_core')
973+def add_mmd_cxx(self):
974+ if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
975+ self.env.append_value('CXXFLAGS', preprocessor_flag)
976+
977+def scan(self):
978+ "the scanner does not do anything initially"
979+ nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
980+ names = []
981+ return (nodes, names)
982+
983+re_o = re.compile("\.o$")
984+re_src = re.compile("^(\.\.)[\\/](.*)$")
985+
986+def post_run(self):
987+ # The following code is executed by threads, it is not safe, so a lock is needed...
988+
989+ if getattr(self, 'cached', None):
990+ return Task.Task.post_run(self)
991+
992+ name = self.outputs[0].abspath(self.env)
993+ name = re_o.sub('.d', name)
994+ txt = Utils.readf(name)
995+ #os.unlink(name)
996+
997+ txt = txt.replace('\\\n', '')
998+
999+ lst = txt.strip().split(':')
1000+ val = ":".join(lst[1:])
1001+ val = val.split()
1002+
1003+ nodes = []
1004+ bld = self.generator.bld
1005+
1006+ f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
1007+ for x in val:
1008+ if os.path.isabs(x):
1009+
1010+ if not preproc.go_absolute:
1011+ continue
1012+
1013+ lock.acquire()
1014+ try:
1015+ node = bld.root.find_resource(x)
1016+ finally:
1017+ lock.release()
1018+ else:
1019+ g = re.search(re_src, x)
1020+ if g:
1021+ x = g.group(2)
1022+ lock.acquire()
1023+ try:
1024+ node = bld.bldnode.parent.find_resource(x)
1025+ finally:
1026+ lock.release()
1027+ else:
1028+ g = re.search(f, x)
1029+ if g:
1030+ x = g.group(2)
1031+ lock.acquire()
1032+ try:
1033+ node = bld.srcnode.find_resource(x)
1034+ finally:
1035+ lock.release()
1036+
1037+ if id(node) == id(self.inputs[0]):
1038+ # ignore the source file, it is already in the dependencies
1039+ # this way, successful config tests may be retrieved from the cache
1040+ continue
1041+
1042+ if not node:
1043+ raise ValueError('could not find %r for %r' % (x, self))
1044+ else:
1045+ nodes.append(node)
1046+
1047+ Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
1048+
1049+ bld.node_deps[self.unique_id()] = nodes
1050+ bld.raw_deps[self.unique_id()] = []
1051+
1052+ try:
1053+ del self.cache_sig
1054+ except:
1055+ pass
1056+
1057+ Task.Task.post_run(self)
1058+
1059+import Constants, Utils
1060+def sig_implicit_deps(self):
1061+ try:
1062+ return Task.Task.sig_implicit_deps(self)
1063+ except Utils.WafError:
1064+ return Constants.SIG_NIL
1065+
1066+for name in 'cc cxx'.split():
1067+ try:
1068+ cls = Task.TaskBase.classes[name]
1069+ except KeyError:
1070+ pass
1071+ else:
1072+ cls.post_run = post_run
1073+ cls.scan = scan
1074+ cls.sig_implicit_deps = sig_implicit_deps
1075+
1076diff --git a/buildtools/wafadmin/3rdparty/go.py b/buildtools/wafadmin/3rdparty/go.py
1077new file mode 100644
1078index 0000000..2d8df0d
1079--- /dev/null
1080+++ b/buildtools/wafadmin/3rdparty/go.py
1081@@ -0,0 +1,111 @@
1082+#!/usr/bin/env python
1083+# encoding: utf-8
1084+# go.py - Waf tool for the Go programming language
1085+# By: Tom Wambold <tom5760@gmail.com>
1086+
1087+import platform, os
1088+
1089+import Task
1090+import Utils
1091+from TaskGen import feature, extension, after
1092+
1093+Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
1094+Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
1095+Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
1096+
1097+def detect(conf):
1098+
1099+ def set_def(var, val):
1100+ if not conf.env[var]:
1101+ conf.env[var] = val
1102+
1103+ goarch = os.getenv("GOARCH")
1104+
1105+ if goarch == '386':
1106+ set_def('GO_PLATFORM', 'i386')
1107+ elif goarch == 'amd64':
1108+ set_def('GO_PLATFORM', 'x86_64')
1109+ elif goarch == 'arm':
1110+ set_def('GO_PLATFORM', 'arm')
1111+ else:
1112+ set_def('GO_PLATFORM', platform.machine())
1113+
1114+ if conf.env.GO_PLATFORM == 'x86_64':
1115+ set_def('GO_COMPILER', '6g')
1116+ set_def('GO_LINKER', '6l')
1117+ set_def('GO_EXTENSION', '.6')
1118+ elif conf.env.GO_PLATFORM in ['i386', 'i486', 'i586', 'i686']:
1119+ set_def('GO_COMPILER', '8g')
1120+ set_def('GO_LINKER', '8l')
1121+ set_def('GO_EXTENSION', '.8')
1122+ elif conf.env.GO_PLATFORM == 'arm':
1123+ set_def('GO_COMPILER', '5g')
1124+ set_def('GO_LINKER', '5l')
1125+ set_def('GO_EXTENSION', '.5')
1126+
1127+ if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
1128+ raise conf.fatal('Unsupported platform ' + platform.machine())
1129+
1130+ set_def('GO_PACK', 'gopack')
1131+ set_def('GO_PACK_EXTENSION', '.a')
1132+
1133+ conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
1134+ conf.find_program(conf.env.GO_LINKER, var='GOL', mandatory=True)
1135+ conf.find_program(conf.env.GO_PACK, var='GOP', mandatory=True)
1136+ conf.find_program('cgo', var='CGO', mandatory=True)
1137+
1138+@extension('.go')
1139+def compile_go(self, node):
1140+ try:
1141+ self.go_nodes.append(node)
1142+ except AttributeError:
1143+ self.go_nodes = [node]
1144+
1145+@feature('go')
1146+@after('apply_core')
1147+def apply_compile_go(self):
1148+ try:
1149+ nodes = self.go_nodes
1150+ except AttributeError:
1151+ self.go_compile_task = None
1152+ else:
1153+ self.go_compile_task = self.create_task('gocompile',
1154+ nodes,
1155+ [self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
1156+
1157+@feature('gopackage', 'goprogram')
1158+@after('apply_compile_go')
1159+def apply_goinc(self):
1160+ if not getattr(self, 'go_compile_task', None):
1161+ return
1162+
1163+ names = self.to_list(getattr(self, 'uselib_local', []))
1164+ for name in names:
1165+ obj = self.name_to_obj(name)
1166+ if not obj:
1167+ raise Utils.WafError('object %r was not found in uselib_local '
1168+ '(required by %r)' % (lib_name, self.name))
1169+ obj.post()
1170+ self.go_compile_task.set_run_after(obj.go_package_task)
1171+ self.go_compile_task.dep_nodes.extend(obj.go_package_task.outputs)
1172+ self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
1173+ self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
1174+
1175+@feature('gopackage')
1176+@after('apply_goinc')
1177+def apply_gopackage(self):
1178+ self.go_package_task = self.create_task('gopack',
1179+ self.go_compile_task.outputs[0],
1180+ self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
1181+ self.go_package_task.set_run_after(self.go_compile_task)
1182+ self.go_package_task.dep_nodes.extend(self.go_compile_task.outputs)
1183+
1184+@feature('goprogram')
1185+@after('apply_goinc')
1186+def apply_golink(self):
1187+ self.go_link_task = self.create_task('golink',
1188+ self.go_compile_task.outputs[0],
1189+ self.path.find_or_declare(self.target))
1190+ self.go_link_task.set_run_after(self.go_compile_task)
1191+ self.go_link_task.dep_nodes.extend(self.go_compile_task.outputs)
1192+
1193diff --git a/buildtools/wafadmin/3rdparty/lru_cache.py b/buildtools/wafadmin/3rdparty/lru_cache.py
1194new file mode 100644
1195index 0000000..5b00abc
1196--- /dev/null
1197+++ b/buildtools/wafadmin/3rdparty/lru_cache.py
1198@@ -0,0 +1,97 @@
1199+#! /usr/bin/env python
1200+# encoding: utf-8
1201+# Thomas Nagy 2011
1202+
1203+import os, shutil, re
1204+import Options, Build, Logs
1205+
1206+"""
1207+Apply a least recently used policy to the Waf cache.
1208+
1209+For performance reasons, it is called after the build is complete.
1210+
1211+We assume that the the folders are written atomically
1212+
1213+Do export WAFCACHE=/tmp/foo-xyz where xyz represents the cache size in megabytes
1214+If missing, the default cache size will be set to 10GB
1215+"""
1216+
1217+re_num = re.compile('[a-zA-Z_]+(\d+)')
1218+
1219+CACHESIZE = 10*1024*1024*1024 # in bytes
1220+CLEANRATIO = 0.8
1221+DIRSIZE = 4096
1222+
1223+def compile(self):
1224+ if Options.cache_global and not Options.options.nocache:
1225+ try:
1226+ os.makedirs(Options.cache_global)
1227+ except:
1228+ pass
1229+
1230+ try:
1231+ self.raw_compile()
1232+ finally:
1233+ if Options.cache_global and not Options.options.nocache:
1234+ self.sweep()
1235+
1236+def sweep(self):
1237+ global CACHESIZE
1238+ CACHEDIR = Options.cache_global
1239+
1240+ # get the cache max size from the WAFCACHE filename
1241+ re_num = re.compile('[a-zA-Z_]+(\d+)')
1242+ val = re_num.sub('\\1', os.path.basename(Options.cache_global))
1243+ try:
1244+ CACHESIZE = int(val)
1245+ except:
1246+ pass
1247+
1248+ # map folder names to timestamps
1249+ flist = {}
1250+ for x in os.listdir(CACHEDIR):
1251+ j = os.path.join(CACHEDIR, x)
1252+ if os.path.isdir(j) and len(x) == 32: # dir names are md5 hexdigests
1253+ flist[x] = [os.stat(j).st_mtime, 0]
1254+
1255+ for (x, v) in flist.items():
1256+ cnt = DIRSIZE # each entry takes 4kB
1257+ d = os.path.join(CACHEDIR, x)
1258+ for k in os.listdir(d):
1259+ cnt += os.stat(os.path.join(d, k)).st_size
1260+ flist[x][1] = cnt
1261+
1262+ total = sum([x[1] for x in flist.values()])
1263+ Logs.debug('lru: Cache size is %r' % total)
1264+
1265+ if total >= CACHESIZE:
1266+ Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
1267+
1268+ # make a list to sort the folders by timestamp
1269+ lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
1270+ lst.sort(key=lambda x: x[1]) # sort by timestamp
1271+ lst.reverse()
1272+
1273+ while total >= CACHESIZE * CLEANRATIO:
1274+ (k, t, s) = lst.pop()
1275+ p = os.path.join(CACHEDIR, k)
1276+ v = p + '.del'
1277+ try:
1278+ os.rename(p, v)
1279+ except:
1280+ # someone already did it
1281+ pass
1282+ else:
1283+ try:
1284+ shutil.rmtree(v)
1285+ except:
1286+ # this should not happen, but who knows?
1287+ Logs.warn('If you ever see this message, report it (%r)' % v)
1288+ total -= s
1289+ del flist[k]
1290+ Logs.debug('lru: Total at the end %r' % total)
1291+
1292+Build.BuildContext.raw_compile = Build.BuildContext.compile
1293+Build.BuildContext.compile = compile
1294+Build.BuildContext.sweep = sweep
1295+
1296diff --git a/buildtools/wafadmin/3rdparty/paranoid.py b/buildtools/wafadmin/3rdparty/paranoid.py
1297new file mode 100644
1298index 0000000..ead64ea
1299--- /dev/null
1300+++ b/buildtools/wafadmin/3rdparty/paranoid.py
1301@@ -0,0 +1,35 @@
1302+#!/usr/bin/env python
1303+# encoding: utf-8
1304+# ita 2010
1305+
1306+import Logs, Utils, Build, Task
1307+
1308+def say(txt):
1309+ Logs.warn("^o^: %s" % txt)
1310+
1311+try:
1312+ ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
1313+except Exception, e:
1314+ pass
1315+else:
1316+ def say(txt):
1317+ f = Utils.cmd_output([ret, txt])
1318+ Utils.pprint('PINK', f)
1319+
1320+say('you make the errors, we detect them')
1321+
1322+def check_task_classes(self):
1323+ for x in Task.TaskBase.classes:
1324+ if isinstance(x, Task.Task):
1325+ if not getattr(cls, 'ext_in', None) or getattr(cls, 'before', None):
1326+ say('class %s has no precedence constraints (ext_in/before)')
1327+ if not getattr(cls, 'ext_out', None) or getattr(cls, 'after', None):
1328+ say('class %s has no precedence constraints (ext_out/after)')
1329+
1330+comp = Build.BuildContext.compile
1331+def compile(self):
1332+ if not getattr(self, 'magic', None):
1333+ check_task_classes(self)
1334+ return comp(self)
1335+Build.BuildContext.compile = compile
1336+
1337diff --git a/buildtools/wafadmin/3rdparty/swig.py b/buildtools/wafadmin/3rdparty/swig.py
1338new file mode 100644
1339index 0000000..c0a4108
1340--- /dev/null
1341+++ b/buildtools/wafadmin/3rdparty/swig.py
1342@@ -0,0 +1,190 @@
1343+#! /usr/bin/env python
1344+# encoding: UTF-8
1345+# Petar Forai
1346+# Thomas Nagy 2008
1347+
1348+import re
1349+import Task, Utils, Logs
1350+from TaskGen import extension
1351+from Configure import conf
1352+import preproc
1353+
1354+"""
1355+Welcome in the hell of adding tasks dynamically
1356+
1357+swig interface files may be created at runtime, the module name may be unknown in advance
1358+
1359+rev 5859 is much more simple
1360+"""
1361+
1362+SWIG_EXTS = ['.swig', '.i']
1363+
1364+swig_str = '${SWIG} ${SWIGFLAGS} ${_CCINCFLAGS} ${_CXXINCFLAGS} ${_CCDEFFLAGS} ${_CXXDEFFLAGS} ${SRC}'
1365+cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
1366+
1367+def runnable_status(self):
1368+ for t in self.run_after:
1369+ if not t.hasrun:
1370+ return ASK_LATER
1371+
1372+ if not getattr(self, 'init_outputs', None):
1373+ self.init_outputs = True
1374+ if not getattr(self, 'module', None):
1375+ # search the module name
1376+ txt = self.inputs[0].read(self.env)
1377+ m = re_module.search(txt)
1378+ if not m:
1379+ raise ValueError("could not find the swig module name")
1380+ self.module = m.group(1)
1381+
1382+ swig_c(self)
1383+
1384+ # add the language-specific output files as nodes
1385+ # call funs in the dict swig_langs
1386+ for x in self.env['SWIGFLAGS']:
1387+ # obtain the language
1388+ x = x[1:]
1389+ try:
1390+ fun = swig_langs[x]
1391+ except KeyError:
1392+ pass
1393+ else:
1394+ fun(self)
1395+
1396+ return Task.Task.runnable_status(self)
1397+setattr(cls, 'runnable_status', runnable_status)
1398+
1399+re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
1400+
1401+re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
1402+re_2 = re.compile('%include "(.*)"', re.M)
1403+re_3 = re.compile('#include "(.*)"', re.M)
1404+
1405+def scan(self):
1406+ "scan for swig dependencies, climb the .i files"
1407+ env = self.env
1408+
1409+ lst_src = []
1410+
1411+ seen = []
1412+ to_see = [self.inputs[0]]
1413+
1414+ while to_see:
1415+ node = to_see.pop(0)
1416+ if node.id in seen:
1417+ continue
1418+ seen.append(node.id)
1419+ lst_src.append(node)
1420+
1421+ # read the file
1422+ code = node.read(env)
1423+ code = preproc.re_nl.sub('', code)
1424+ code = preproc.re_cpp.sub(preproc.repl, code)
1425+
1426+ # find .i files and project headers
1427+ names = re_2.findall(code) + re_3.findall(code)
1428+ for n in names:
1429+ for d in self.generator.env.INC_PATHS + [node.parent]:
1430+ u = d.find_resource(n)
1431+ if u:
1432+ to_see.append(u)
1433+ break
1434+ else:
1435+ Logs.warn('could not find %r' % n)
1436+
1437+ # list of nodes this one depends on, and module name if present
1438+ if Logs.verbose:
1439+ Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
1440+ return (lst_src, [])
1441+cls.scan = scan
1442+
1443+# provide additional language processing
1444+swig_langs = {}
1445+def swig(fun):
1446+ swig_langs[fun.__name__.replace('swig_', '')] = fun
1447+
1448+def swig_c(self):
1449+ ext = '.swigwrap_%d.c' % self.generator.idx
1450+ flags = self.env['SWIGFLAGS']
1451+ if '-c++' in flags:
1452+ ext += 'xx'
1453+ out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
1454+
1455+ try:
1456+ if '-c++' in flags:
1457+ fun = self.generator.cxx_hook
1458+ else:
1459+ fun = self.generator.c_hook
1460+ except AttributeError:
1461+ raise Utils.WafError('No c%s compiler was found to process swig files' % ('-c++' in flags and '++' or ''))
1462+
1463+ task = fun(out_node)
1464+ task.set_run_after(self)
1465+
1466+ ge = self.generator.bld.generator
1467+ ge.outstanding.insert(0, task)
1468+ ge.total += 1
1469+
1470+ try:
1471+ ltask = self.generator.link_task
1472+ except AttributeError:
1473+ pass
1474+ else:
1475+ ltask.inputs.append(task.outputs[0])
1476+
1477+ self.outputs.append(out_node)
1478+
1479+ if not '-o' in self.env['SWIGFLAGS']:
1480+ self.env.append_value('SWIGFLAGS', '-o')
1481+ self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
1482+
1483+@swig
1484+def swig_python(tsk):
1485+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
1486+
1487+@swig
1488+def swig_ocaml(tsk):
1489+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
1490+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
1491+
1492+@extension(SWIG_EXTS)
1493+def i_file(self, node):
1494+ # the task instance
1495+ tsk = self.create_task('swig')
1496+ tsk.set_inputs(node)
1497+ tsk.module = getattr(self, 'swig_module', None)
1498+
1499+ flags = self.to_list(getattr(self, 'swig_flags', []))
1500+ self.env.append_value('SWIGFLAGS', flags)
1501+
1502+ if not '-outdir' in flags:
1503+ flags.append('-outdir')
1504+ flags.append(node.parent.abspath(self.env))
1505+
1506+@conf
1507+def check_swig_version(conf, minver=None):
1508+ """Check for a minimum swig version like conf.check_swig_version('1.3.28')
1509+ or conf.check_swig_version((1,3,28)) """
1510+ reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
1511+
1512+ swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
1513+
1514+ swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
1515+ if isinstance(minver, basestring):
1516+ minver = [int(s) for s in minver.split(".")]
1517+ if isinstance(minver, tuple):
1518+ minver = [int(s) for s in minver]
1519+ result = (minver is None) or (minver[:3] <= swigver[:3])
1520+ swigver_full = '.'.join(map(str, swigver))
1521+ if result:
1522+ conf.env['SWIG_VERSION'] = swigver_full
1523+ minver_str = '.'.join(map(str, minver))
1524+ if minver is None:
1525+ conf.check_message_custom('swig version', '', swigver_full)
1526+ else:
1527+ conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
1528+ return result
1529+
1530+def detect(conf):
1531+ swig = conf.find_program('swig', var='SWIG', mandatory=True)
1532+
1533diff --git a/buildtools/wafadmin/3rdparty/valadoc.py b/buildtools/wafadmin/3rdparty/valadoc.py
1534new file mode 100644
1535index 0000000..d0a9fe8
1536--- /dev/null
1537+++ b/buildtools/wafadmin/3rdparty/valadoc.py
1538@@ -0,0 +1,113 @@
1539+#! /usr/bin/env python
1540+# encoding: UTF-8
1541+# Nicolas Joseph 2009
1542+
1543+from fnmatch import fnmatchcase
1544+import os, os.path, re, stat
1545+import Task, Utils, Node, Constants
1546+from TaskGen import feature, extension, after
1547+from Logs import debug, warn, error
1548+
1549+VALADOC_STR = '${VALADOC}'
1550+
1551+class valadoc_task(Task.Task):
1552+
1553+ vars = ['VALADOC', 'VALADOCFLAGS']
1554+ color = 'BLUE'
1555+ after = 'cxx_link cc_link'
1556+ quiet = True
1557+
1558+ output_dir = ''
1559+ doclet = ''
1560+ package_name = ''
1561+ package_version = ''
1562+ files = []
1563+ protected = True
1564+ private = False
1565+ inherit = False
1566+ deps = False
1567+ enable_non_null_experimental = False
1568+ force = False
1569+
1570+ def runnable_status(self):
1571+ return True
1572+
1573+ def run(self):
1574+ if self.env['VALADOC']:
1575+ if not self.env['VALADOCFLAGS']:
1576+ self.env['VALADOCFLAGS'] = ''
1577+ cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
1578+ cmd.append ('-o %s' % self.output_dir)
1579+ if getattr(self, 'doclet', None):
1580+ cmd.append ('--doclet %s' % self.doclet)
1581+ cmd.append ('--package-name %s' % self.package_name)
1582+ if getattr(self, 'version', None):
1583+ cmd.append ('--package-version %s' % self.package_version)
1584+ if getattr(self, 'packages', None):
1585+ for package in self.packages:
1586+ cmd.append ('--pkg %s' % package)
1587+ if getattr(self, 'vapi_dirs', None):
1588+ for vapi_dir in self.vapi_dirs:
1589+ cmd.append ('--vapidir %s' % vapi_dir)
1590+ if not getattr(self, 'protected', None):
1591+ cmd.append ('--no-protected')
1592+ if getattr(self, 'private', None):
1593+ cmd.append ('--private')
1594+ if getattr(self, 'inherit', None):
1595+ cmd.append ('--inherit')
1596+ if getattr(self, 'deps', None):
1597+ cmd.append ('--deps')
1598+ if getattr(self, 'enable_non_null_experimental', None):
1599+ cmd.append ('--enable-non-null-experimental')
1600+ if getattr(self, 'force', None):
1601+ cmd.append ('--force')
1602+ cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
1603+ return self.generator.bld.exec_command(' '.join(cmd))
1604+ else:
1605+ error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
1606+ return -1
1607+
1608+@feature('valadoc')
1609+def process_valadoc(self):
1610+ task = getattr(self, 'task', None)
1611+ if not task:
1612+ task = self.create_task('valadoc')
1613+ self.task = task
1614+ if getattr(self, 'output_dir', None):
1615+ task.output_dir = self.output_dir
1616+ else:
1617+ Utils.WafError('no output directory')
1618+ if getattr(self, 'doclet', None):
1619+ task.doclet = self.doclet
1620+ else:
1621+ Utils.WafError('no doclet directory')
1622+ if getattr(self, 'package_name', None):
1623+ task.package_name = self.package_name
1624+ else:
1625+ Utils.WafError('no package name')
1626+ if getattr(self, 'package_version', None):
1627+ task.package_version = self.package_version
1628+ if getattr(self, 'packages', None):
1629+ task.packages = Utils.to_list(self.packages)
1630+ if getattr(self, 'vapi_dirs', None):
1631+ task.vapi_dirs = Utils.to_list(self.vapi_dirs)
1632+ if getattr(self, 'files', None):
1633+ task.files = self.files
1634+ else:
1635+ Utils.WafError('no input file')
1636+ if getattr(self, 'protected', None):
1637+ task.protected = self.protected
1638+ if getattr(self, 'private', None):
1639+ task.private = self.private
1640+ if getattr(self, 'inherit', None):
1641+ task.inherit = self.inherit
1642+ if getattr(self, 'deps', None):
1643+ task.deps = self.deps
1644+ if getattr(self, 'enable_non_null_experimental', None):
1645+ task.enable_non_null_experimental = self.enable_non_null_experimental
1646+ if getattr(self, 'force', None):
1647+ task.force = self.force
1648+
1649+def detect(conf):
1650+ conf.find_program('valadoc', var='VALADOC', mandatory=False)
1651+
1652diff --git a/buildtools/wafadmin/Build.py b/buildtools/wafadmin/Build.py
1653new file mode 100644
1654index 0000000..8e7c72c
1655--- /dev/null
1656+++ b/buildtools/wafadmin/Build.py
1657@@ -0,0 +1,1033 @@
1658+#!/usr/bin/env python
1659+# encoding: utf-8
1660+# Thomas Nagy, 2005 (ita)
1661+
1662+"""
1663+Dependency tree holder
1664+
1665+The class Build holds all the info related to a build:
1666+* file system representation (tree of Node instances)
1667+* various cached objects (task signatures, file scan results, ..)
1668+
1669+There is only one Build object at a time (bld singleton)
1670+"""
1671+
1672+import os, sys, errno, re, glob, gc, datetime, shutil
1673+try: import cPickle
1674+except: import pickle as cPickle
1675+import Runner, TaskGen, Node, Scripting, Utils, Environment, Task, Logs, Options
1676+from Logs import debug, error, info
1677+from Constants import *
1678+
1679+SAVED_ATTRS = 'root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
1680+"Build class members to save"
1681+
1682+bld = None
1683+"singleton - safe to use when Waf is not used as a library"
1684+
1685+class BuildError(Utils.WafError):
1686+ def __init__(self, b=None, t=[]):
1687+ self.bld = b
1688+ self.tasks = t
1689+ self.ret = 1
1690+ Utils.WafError.__init__(self, self.format_error())
1691+
1692+ def format_error(self):
1693+ lst = ['Build failed:']
1694+ for tsk in self.tasks:
1695+ txt = tsk.format_error()
1696+ if txt: lst.append(txt)
1697+ sep = ' '
1698+ if len(lst) > 2:
1699+ sep = '\n'
1700+ return sep.join(lst)
1701+
1702+def group_method(fun):
1703+ """
1704+ sets a build context method to execute after the current group has finished executing
1705+ this is useful for installing build files:
1706+ * calling install_files/install_as will fail if called too early
1707+ * people do not want to define install method in their task classes
1708+
1709+ TODO: try it
1710+ """
1711+ def f(*k, **kw):
1712+ if not k[0].is_install:
1713+ return False
1714+
1715+ postpone = True
1716+ if 'postpone' in kw:
1717+ postpone = kw['postpone']
1718+ del kw['postpone']
1719+
1720+ # TODO waf 1.6 in theory there should be no reference to the TaskManager internals here
1721+ if postpone:
1722+ m = k[0].task_manager
1723+ if not m.groups: m.add_group()
1724+ m.groups[m.current_group].post_funs.append((fun, k, kw))
1725+ if not 'cwd' in kw:
1726+ kw['cwd'] = k[0].path
1727+ else:
1728+ fun(*k, **kw)
1729+ return f
1730+
1731+class BuildContext(Utils.Context):
1732+ "holds the dependency tree"
1733+ def __init__(self):
1734+
1735+ # not a singleton, but provided for compatibility
1736+ global bld
1737+ bld = self
1738+
1739+ self.task_manager = Task.TaskManager()
1740+
1741+ # instead of hashing the nodes, we assign them a unique id when they are created
1742+ self.id_nodes = 0
1743+ self.idx = {}
1744+
1745+ # map names to environments, the 'default' must be defined
1746+ self.all_envs = {}
1747+
1748+ # ======================================= #
1749+ # code for reading the scripts
1750+
1751+ # project build directory - do not reset() from load_dirs()
1752+ self.bdir = ''
1753+
1754+ # the current directory from which the code is run
1755+ # the folder changes everytime a wscript is read
1756+ self.path = None
1757+
1758+ # Manual dependencies.
1759+ self.deps_man = Utils.DefaultDict(list)
1760+
1761+ # ======================================= #
1762+ # cache variables
1763+
1764+ # local cache for absolute paths - cache_node_abspath[variant][node]
1765+ self.cache_node_abspath = {}
1766+
1767+ # list of folders that are already scanned
1768+ # so that we do not need to stat them one more time
1769+ self.cache_scanned_folders = {}
1770+
1771+ # list of targets to uninstall for removing the empty folders after uninstalling
1772+ self.uninstall = []
1773+
1774+ # ======================================= #
1775+ # tasks and objects
1776+
1777+ # build dir variants (release, debug, ..)
1778+ for v in 'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
1779+ var = {}
1780+ setattr(self, v, var)
1781+
1782+ self.cache_dir_contents = {}
1783+
1784+ self.all_task_gen = []
1785+ self.task_gen_cache_names = {}
1786+ self.cache_sig_vars = {}
1787+ self.log = None
1788+
1789+ self.root = None
1790+ self.srcnode = None
1791+ self.bldnode = None
1792+
1793+ # bind the build context to the nodes in use
1794+ # this means better encapsulation and no build context singleton
1795+ class node_class(Node.Node):
1796+ pass
1797+ self.node_class = node_class
1798+ self.node_class.__module__ = "Node"
1799+ self.node_class.__name__ = "Nodu"
1800+ self.node_class.bld = self
1801+
1802+ self.is_install = None
1803+
1804+ def __copy__(self):
1805+ "nodes are not supposed to be copied"
1806+ raise Utils.WafError('build contexts are not supposed to be cloned')
1807+
1808+ def load(self):
1809+ "load the cache from the disk"
1810+ try:
1811+ env = Environment.Environment(os.path.join(self.cachedir, 'build.config.py'))
1812+ except (IOError, OSError):
1813+ pass
1814+ else:
1815+ if env['version'] < HEXVERSION:
1816+ raise Utils.WafError('Version mismatch! reconfigure the project')
1817+ for t in env['tools']:
1818+ self.setup(**t)
1819+
1820+ try:
1821+ gc.disable()
1822+ f = data = None
1823+
1824+ Node.Nodu = self.node_class
1825+
1826+ try:
1827+ f = open(os.path.join(self.bdir, DBFILE), 'rb')
1828+ except (IOError, EOFError):
1829+ # handle missing file/empty file
1830+ pass
1831+
1832+ try:
1833+ if f: data = cPickle.load(f)
1834+ except AttributeError:
1835+ # handle file of an old Waf version
1836+ # that has an attribute which no longer exist
1837+ # (e.g. AttributeError: 'module' object has no attribute 'BuildDTO')
1838+ if Logs.verbose > 1: raise
1839+
1840+ if data:
1841+ for x in SAVED_ATTRS: setattr(self, x, data[x])
1842+ else:
1843+ debug('build: Build cache loading failed')
1844+
1845+ finally:
1846+ if f: f.close()
1847+ gc.enable()
1848+
1849+ def save(self):
1850+ "store the cache on disk, see self.load"
1851+ gc.disable()
1852+ self.root.__class__.bld = None
1853+
1854+ # some people are very nervous with ctrl+c so we have to make a temporary file
1855+ Node.Nodu = self.node_class
1856+ db = os.path.join(self.bdir, DBFILE)
1857+ file = open(db + '.tmp', 'wb')
1858+ data = {}
1859+ for x in SAVED_ATTRS: data[x] = getattr(self, x)
1860+ cPickle.dump(data, file, -1)
1861+ file.close()
1862+
1863+ # do not use shutil.move
1864+ try: os.unlink(db)
1865+ except OSError: pass
1866+ os.rename(db + '.tmp', db)
1867+ self.root.__class__.bld = self
1868+ gc.enable()
1869+
1870+ # ======================================= #
1871+
1872+ def clean(self):
1873+ debug('build: clean called')
1874+
1875+ # does not clean files created during the configuration
1876+ precious = set([])
1877+ for env in self.all_envs.values():
1878+ for x in env[CFG_FILES]:
1879+ node = self.srcnode.find_resource(x)
1880+ if node:
1881+ precious.add(node.id)
1882+
1883+ def clean_rec(node):
1884+ for x in list(node.childs.keys()):
1885+ nd = node.childs[x]
1886+
1887+ tp = nd.id & 3
1888+ if tp == Node.DIR:
1889+ clean_rec(nd)
1890+ elif tp == Node.BUILD:
1891+ if nd.id in precious: continue
1892+ for env in self.all_envs.values():
1893+ try: os.remove(nd.abspath(env))
1894+ except OSError: pass
1895+ node.childs.__delitem__(x)
1896+
1897+ clean_rec(self.srcnode)
1898+
1899+ for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
1900+ setattr(self, v, {})
1901+
1902+ def compile(self):
1903+ """The cache file is not written if nothing was build at all (build is up to date)"""
1904+ debug('build: compile called')
1905+
1906+ """
1907+ import cProfile, pstats
1908+ cProfile.run("import Build\nBuild.bld.flush()", 'profi.txt')
1909+ p = pstats.Stats('profi.txt')
1910+ p.sort_stats('cumulative').print_stats(80)
1911+ """
1912+ self.flush()
1913+ #"""
1914+
1915+ self.generator = Runner.Parallel(self, Options.options.jobs)
1916+
1917+ def dw(on=True):
1918+ if Options.options.progress_bar:
1919+ if on: sys.stderr.write(Logs.colors.cursor_on)
1920+ else: sys.stderr.write(Logs.colors.cursor_off)
1921+
1922+ debug('build: executor starting')
1923+
1924+ back = os.getcwd()
1925+ os.chdir(self.bldnode.abspath())
1926+
1927+ try:
1928+ try:
1929+ dw(on=False)
1930+ self.generator.start()
1931+ except KeyboardInterrupt:
1932+ dw()
1933+ # if self.generator.processed != 1: TODO
1934+ self.save()
1935+ raise
1936+ except Exception:
1937+ dw()
1938+ # do not store anything, for something bad happened
1939+ raise
1940+ else:
1941+ dw()
1942+ #if self.generator.processed != 1: TODO
1943+ self.save()
1944+
1945+ if self.generator.error:
1946+ raise BuildError(self, self.task_manager.tasks_done)
1947+
1948+ finally:
1949+ os.chdir(back)
1950+
1951+ def install(self):
1952+ "this function is called for both install and uninstall"
1953+ debug('build: install called')
1954+
1955+ self.flush()
1956+
1957+ # remove empty folders after uninstalling
1958+ if self.is_install < 0:
1959+ lst = []
1960+ for x in self.uninstall:
1961+ dir = os.path.dirname(x)
1962+ if not dir in lst: lst.append(dir)
1963+ lst.sort()
1964+ lst.reverse()
1965+
1966+ nlst = []
1967+ for y in lst:
1968+ x = y
1969+ while len(x) > 4:
1970+ if not x in nlst: nlst.append(x)
1971+ x = os.path.dirname(x)
1972+
1973+ nlst.sort()
1974+ nlst.reverse()
1975+ for x in nlst:
1976+ try: os.rmdir(x)
1977+ except OSError: pass
1978+
1979+ def new_task_gen(self, *k, **kw):
1980+ if self.task_gen_cache_names:
1981+ self.task_gen_cache_names = {}
1982+
1983+ kw['bld'] = self
1984+ if len(k) == 0:
1985+ ret = TaskGen.task_gen(*k, **kw)
1986+ else:
1987+ cls_name = k[0]
1988+
1989+ try: cls = TaskGen.task_gen.classes[cls_name]
1990+ except KeyError: raise Utils.WscriptError('%s is not a valid task generator -> %s' %
1991+ (cls_name, [x for x in TaskGen.task_gen.classes]))
1992+ ret = cls(*k, **kw)
1993+ return ret
1994+
1995+ def __call__(self, *k, **kw):
1996+ if self.task_gen_cache_names:
1997+ self.task_gen_cache_names = {}
1998+
1999+ kw['bld'] = self
2000+ return TaskGen.task_gen(*k, **kw)
2001+
2002+ def load_envs(self):
2003+ try:
2004+ lst = Utils.listdir(self.cachedir)
2005+ except OSError, e:
2006+ if e.errno == errno.ENOENT:
2007+ raise Utils.WafError('The project was not configured: run "waf configure" first!')
2008+ else:
2009+ raise
2010+
2011+ if not lst:
2012+ raise Utils.WafError('The cache directory is empty: reconfigure the project')
2013+
2014+ for file in lst:
2015+ if file.endswith(CACHE_SUFFIX):
2016+ env = Environment.Environment(os.path.join(self.cachedir, file))
2017+ name = file[:-len(CACHE_SUFFIX)]
2018+
2019+ self.all_envs[name] = env
2020+
2021+ self.init_variants()
2022+
2023+ for env in self.all_envs.values():
2024+ for f in env[CFG_FILES]:
2025+ newnode = self.path.find_or_declare(f)
2026+ try:
2027+ hash = Utils.h_file(newnode.abspath(env))
2028+ except (IOError, AttributeError):
2029+ error("cannot find "+f)
2030+ hash = SIG_NIL
2031+ self.node_sigs[env.variant()][newnode.id] = hash
2032+
2033+ # TODO: hmmm, these nodes are removed from the tree when calling rescan()
2034+ self.bldnode = self.root.find_dir(self.bldnode.abspath())
2035+ self.path = self.srcnode = self.root.find_dir(self.srcnode.abspath())
2036+ self.cwd = self.bldnode.abspath()
2037+
2038+ def setup(self, tool, tooldir=None, funs=None):
2039+ "setup tools for build process"
2040+ if isinstance(tool, list):
2041+ for i in tool: self.setup(i, tooldir)
2042+ return
2043+
2044+ if not tooldir: tooldir = Options.tooldir
2045+
2046+ module = Utils.load_tool(tool, tooldir)
2047+ if hasattr(module, "setup"): module.setup(self)
2048+
2049+ def init_variants(self):
2050+ debug('build: init variants')
2051+
2052+ lstvariants = []
2053+ for env in self.all_envs.values():
2054+ if not env.variant() in lstvariants:
2055+ lstvariants.append(env.variant())
2056+ self.lst_variants = lstvariants
2057+
2058+ debug('build: list of variants is %r', lstvariants)
2059+
2060+ for name in lstvariants+[0]:
2061+ for v in 'node_sigs cache_node_abspath'.split():
2062+ var = getattr(self, v)
2063+ if not name in var:
2064+ var[name] = {}
2065+
2066+ # ======================================= #
2067+ # node and folder handling
2068+
2069+ # this should be the main entry point
2070+ def load_dirs(self, srcdir, blddir, load_cache=1):
2071+ "this functions should be the start of everything"
2072+
2073+ assert(os.path.isabs(srcdir))
2074+ assert(os.path.isabs(blddir))
2075+
2076+ self.cachedir = os.path.join(blddir, CACHE_DIR)
2077+
2078+ if srcdir == blddir:
2079+ raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))
2080+
2081+ self.bdir = blddir
2082+
2083+ # try to load the cache file, if it does not exist, nothing happens
2084+ self.load()
2085+
2086+ if not self.root:
2087+ Node.Nodu = self.node_class
2088+ self.root = Node.Nodu('', None, Node.DIR)
2089+
2090+ if not self.srcnode:
2091+ self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
2092+ debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)
2093+
2094+ self.path = self.srcnode
2095+
2096+ # create this build dir if necessary
2097+ try: os.makedirs(blddir)
2098+ except OSError: pass
2099+
2100+ if not self.bldnode:
2101+ self.bldnode = self.root.ensure_dir_node_from_path(blddir)
2102+
2103+ self.init_variants()
2104+
2105+ def rescan(self, src_dir_node):
2106+ """
2107+ look the contents of a (folder)node and update its list of childs
2108+
2109+ The intent is to perform the following steps
2110+ * remove the nodes for the files that have disappeared
2111+ * remove the signatures for the build files that have disappeared
2112+ * cache the results of os.listdir
2113+ * create the build folder equivalent (mkdir) for each variant
2114+ src/bar -> build/default/src/bar, build/release/src/bar
2115+
2116+ when a folder in the source directory is removed, we do not check recursively
2117+ to remove the unused nodes. To do that, call 'waf clean' and build again.
2118+ """
2119+
2120+ # do not rescan over and over again
2121+ # TODO use a single variable in waf 1.6
2122+ if self.cache_scanned_folders.get(src_dir_node.id, None): return
2123+ self.cache_scanned_folders[src_dir_node.id] = True
2124+
2125+ # TODO remove in waf 1.6
2126+ if hasattr(self, 'repository'): self.repository(src_dir_node)
2127+
2128+ if not src_dir_node.name and sys.platform == 'win32':
2129+ # the root has no name, contains drive letters, and cannot be listed
2130+ return
2131+
2132+
2133+ # first, take the case of the source directory
2134+ parent_path = src_dir_node.abspath()
2135+ try:
2136+ lst = set(Utils.listdir(parent_path))
2137+ except OSError:
2138+ lst = set([])
2139+
2140+ # TODO move this at the bottom
2141+ self.cache_dir_contents[src_dir_node.id] = lst
2142+
2143+ # hash the existing source files, remove the others
2144+ cache = self.node_sigs[0]
2145+ for x in src_dir_node.childs.values():
2146+ if x.id & 3 != Node.FILE: continue
2147+ if x.name in lst:
2148+ try:
2149+ cache[x.id] = Utils.h_file(x.abspath())
2150+ except IOError:
2151+ raise Utils.WafError('The file %s is not readable or has become a dir' % x.abspath())
2152+ else:
2153+ try: del cache[x.id]
2154+ except KeyError: pass
2155+
2156+ del src_dir_node.childs[x.name]
2157+
2158+
2159+ # first obtain the differences between srcnode and src_dir_node
2160+ h1 = self.srcnode.height()
2161+ h2 = src_dir_node.height()
2162+
2163+ lst = []
2164+ child = src_dir_node
2165+ while h2 > h1:
2166+ lst.append(child.name)
2167+ child = child.parent
2168+ h2 -= 1
2169+ lst.reverse()
2170+
2171+ # list the files in the build dirs
2172+ try:
2173+ for variant in self.lst_variants:
2174+ sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
2175+ self.listdir_bld(src_dir_node, sub_path, variant)
2176+ except OSError:
2177+
2178+ # listdir failed, remove the build node signatures for all variants
2179+ for node in src_dir_node.childs.values():
2180+ if node.id & 3 != Node.BUILD:
2181+ continue
2182+
2183+ for dct in self.node_sigs.values():
2184+ if node.id in dct:
2185+ dct.__delitem__(node.id)
2186+
2187+ # the policy is to avoid removing nodes representing directories
2188+ src_dir_node.childs.__delitem__(node.name)
2189+
2190+ for variant in self.lst_variants:
2191+ sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
2192+ try:
2193+ os.makedirs(sub_path)
2194+ except OSError:
2195+ pass
2196+
2197+ # ======================================= #
2198+ def listdir_src(self, parent_node):
2199+ """do not use, kept for compatibility"""
2200+ pass
2201+
2202+ def remove_node(self, node):
2203+ """do not use, kept for compatibility"""
2204+ pass
2205+
2206+ def listdir_bld(self, parent_node, path, variant):
2207+ """in this method we do not add timestamps but we remove them
2208+ when the files no longer exist (file removed in the build dir)"""
2209+
2210+ i_existing_nodes = [x for x in parent_node.childs.values() if x.id & 3 == Node.BUILD]
2211+
2212+ lst = set(Utils.listdir(path))
2213+ node_names = set([x.name for x in i_existing_nodes])
2214+ remove_names = node_names - lst
2215+
2216+ # remove the stamps of the build nodes that no longer exist on the filesystem
2217+ ids_to_remove = [x.id for x in i_existing_nodes if x.name in remove_names]
2218+ cache = self.node_sigs[variant]
2219+ for nid in ids_to_remove:
2220+ if nid in cache:
2221+ cache.__delitem__(nid)
2222+
2223+ def get_env(self):
2224+ return self.env_of_name('default')
2225+ def set_env(self, name, val):
2226+ self.all_envs[name] = val
2227+
2228+ env = property(get_env, set_env)
2229+
2230+ def add_manual_dependency(self, path, value):
2231+ if isinstance(path, Node.Node):
2232+ node = path
2233+ elif os.path.isabs(path):
2234+ node = self.root.find_resource(path)
2235+ else:
2236+ node = self.path.find_resource(path)
2237+ self.deps_man[node.id].append(value)
2238+
2239+ def launch_node(self):
2240+ """return the launch directory as a node"""
2241+ # p_ln is kind of private, but public in case if
2242+ try:
2243+ return self.p_ln
2244+ except AttributeError:
2245+ self.p_ln = self.root.find_dir(Options.launch_dir)
2246+ return self.p_ln
2247+
2248+ def glob(self, pattern, relative=True):
2249+ "files matching the pattern, seen from the current folder"
2250+ path = self.path.abspath()
2251+ files = [self.root.find_resource(x) for x in glob.glob(path+os.sep+pattern)]
2252+ if relative:
2253+ files = [x.path_to_parent(self.path) for x in files if x]
2254+ else:
2255+ files = [x.abspath() for x in files if x]
2256+ return files
2257+
2258+ ## the following methods are candidates for the stable apis ##
2259+
2260+ def add_group(self, *k):
2261+ self.task_manager.add_group(*k)
2262+
2263+ def set_group(self, *k, **kw):
2264+ self.task_manager.set_group(*k, **kw)
2265+
2266+ def hash_env_vars(self, env, vars_lst):
2267+ """hash environment variables
2268+ ['CXX', ..] -> [env['CXX'], ..] -> md5()"""
2269+
2270+ # ccroot objects use the same environment for building the .o at once
2271+ # the same environment and the same variables are used
2272+
2273+ idx = str(id(env)) + str(vars_lst)
2274+ try: return self.cache_sig_vars[idx]
2275+ except KeyError: pass
2276+
2277+ lst = [str(env[a]) for a in vars_lst]
2278+ ret = Utils.h_list(lst)
2279+ debug('envhash: %r %r', ret, lst)
2280+
2281+ # next time
2282+ self.cache_sig_vars[idx] = ret
2283+ return ret
2284+
2285+ def name_to_obj(self, name, env):
2286+ """retrieve a task generator from its name or its target name
2287+ remember that names must be unique"""
2288+ cache = self.task_gen_cache_names
2289+ if not cache:
2290+ # create the index lazily
2291+ for x in self.all_task_gen:
2292+ vt = x.env.variant() + '_'
2293+ if x.name:
2294+ cache[vt + x.name] = x
2295+ else:
2296+ if isinstance(x.target, str):
2297+ target = x.target
2298+ else:
2299+ target = ' '.join(x.target)
2300+ v = vt + target
2301+ if not cache.get(v, None):
2302+ cache[v] = x
2303+ return cache.get(env.variant() + '_' + name, None)
2304+
2305+ def flush(self, all=1):
2306+ """tell the task generators to create the tasks"""
2307+
2308+ self.ini = datetime.datetime.now()
2309+ # force the initialization of the mapping name->object in flush
2310+ # name_to_obj can be used in userland scripts, in that case beware of incomplete mapping
2311+ self.task_gen_cache_names = {}
2312+ self.name_to_obj('', self.env)
2313+
2314+ debug('build: delayed operation TaskGen.flush() called')
2315+
2316+ if Options.options.compile_targets:
2317+ debug('task_gen: posting objects %r listed in compile_targets', Options.options.compile_targets)
2318+
2319+ mana = self.task_manager
2320+ to_post = []
2321+ min_grp = 0
2322+
2323+ # ensure the target names exist, fail before any post()
2324+ target_objects = Utils.DefaultDict(list)
2325+ for target_name in Options.options.compile_targets.split(','):
2326+ # trim target_name (handle cases when the user added spaces to targets)
2327+ target_name = target_name.strip()
2328+ for env in self.all_envs.values():
2329+ tg = self.name_to_obj(target_name, env)
2330+ if tg:
2331+ target_objects[target_name].append(tg)
2332+
2333+ m = mana.group_idx(tg)
2334+ if m > min_grp:
2335+ min_grp = m
2336+ to_post = [tg]
2337+ elif m == min_grp:
2338+ to_post.append(tg)
2339+
2340+ if not target_name in target_objects and all:
2341+ raise Utils.WafError("target '%s' does not exist" % target_name)
2342+
2343+ debug('group: Forcing up to group %s for target %s', mana.group_name(min_grp), Options.options.compile_targets)
2344+
2345+ # post all the task generators in previous groups
2346+ for i in xrange(len(mana.groups)):
2347+ mana.current_group = i
2348+ if i == min_grp:
2349+ break
2350+ g = mana.groups[i]
2351+ debug('group: Forcing group %s', mana.group_name(g))
2352+ for t in g.tasks_gen:
2353+ debug('group: Posting %s', t.name or t.target)
2354+ t.post()
2355+
2356+ # then post the task generators listed in compile_targets in the last group
2357+ for t in to_post:
2358+ t.post()
2359+
2360+ else:
2361+ debug('task_gen: posting objects (normal)')
2362+ ln = self.launch_node()
2363+ # if the build is started from the build directory, do as if it was started from the top-level
2364+ # for the pretty-printing (Node.py), the two lines below cannot be moved to Build::launch_node
2365+ if ln.is_child_of(self.bldnode) or not ln.is_child_of(self.srcnode):
2366+ ln = self.srcnode
2367+
2368+ # if the project file is located under the source directory, build all targets by default
2369+ # else 'waf configure build' does nothing
2370+ proj_node = self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
2371+ if proj_node.id != self.srcnode.id:
2372+ ln = self.srcnode
2373+
2374+ for i in xrange(len(self.task_manager.groups)):
2375+ g = self.task_manager.groups[i]
2376+ self.task_manager.current_group = i
2377+ if Logs.verbose:
2378+ groups = [x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x]) == id(g)]
2379+ name = groups and groups[0] or 'unnamed'
2380+ Logs.debug('group: group', name)
2381+ for tg in g.tasks_gen:
2382+ if not tg.path.is_child_of(ln):
2383+ continue
2384+ if Logs.verbose:
2385+ Logs.debug('group: %s' % tg)
2386+ tg.post()
2387+
2388+ def env_of_name(self, name):
2389+ try:
2390+ return self.all_envs[name]
2391+ except KeyError:
2392+ error('no such environment: '+name)
2393+ return None
2394+
2395+ def progress_line(self, state, total, col1, col2):
2396+ n = len(str(total))
2397+
2398+ Utils.rot_idx += 1
2399+ ind = Utils.rot_chr[Utils.rot_idx % 4]
2400+
2401+ ini = self.ini
2402+
2403+ pc = (100.*state)/total
2404+ eta = Utils.get_elapsed_time(ini)
2405+ fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind)
2406+ left = fs % (state, total, col1, pc, col2)
2407+ right = '][%s%s%s]' % (col1, eta, col2)
2408+
2409+ cols = Utils.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
2410+ if cols < 7: cols = 7
2411+
2412+ ratio = int((cols*state)/total) - 1
2413+
2414+ bar = ('='*ratio+'>').ljust(cols)
2415+ msg = Utils.indicator % (left, bar, right)
2416+
2417+ return msg
2418+
2419+
2420+ # do_install is not used anywhere
2421+ def do_install(self, src, tgt, chmod=O644):
2422+ """returns true if the file was effectively installed or uninstalled, false otherwise"""
2423+ if self.is_install > 0:
2424+ if not Options.options.force:
2425+ # check if the file is already there to avoid a copy
2426+ try:
2427+ st1 = os.stat(tgt)
2428+ st2 = os.stat(src)
2429+ except OSError:
2430+ pass
2431+ else:
2432+ # same size and identical timestamps -> make no copy
2433+ if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
2434+ return False
2435+
2436+ srclbl = src.replace(self.srcnode.abspath(None)+os.sep, '')
2437+ info("* installing %s as %s" % (srclbl, tgt))
2438+
2439+ # following is for shared libs and stale inodes (-_-)
2440+ try: os.remove(tgt)
2441+ except OSError: pass
2442+
2443+ try:
2444+ shutil.copy2(src, tgt)
2445+ os.chmod(tgt, chmod)
2446+ except IOError:
2447+ try:
2448+ os.stat(src)
2449+ except (OSError, IOError):
2450+ error('File %r does not exist' % src)
2451+ raise Utils.WafError('Could not install the file %r' % tgt)
2452+ return True
2453+
2454+ elif self.is_install < 0:
2455+ info("* uninstalling %s" % tgt)
2456+
2457+ self.uninstall.append(tgt)
2458+
2459+ try:
2460+ os.remove(tgt)
2461+ except OSError, e:
2462+ if e.errno != errno.ENOENT:
2463+ if not getattr(self, 'uninstall_error', None):
2464+ self.uninstall_error = True
2465+ Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
2466+ if Logs.verbose > 1:
2467+ Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
2468+ return True
2469+
2470+ red = re.compile(r"^([A-Za-z]:)?[/\\\\]*")
2471+ def get_install_path(self, path, env=None):
2472+ "installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
2473+ if not env: env = self.env
2474+ destdir = env.get_destdir()
2475+ path = path.replace('/', os.sep)
2476+ destpath = Utils.subst_vars(path, env)
2477+ if destdir:
2478+ destpath = os.path.join(destdir, self.red.sub('', destpath))
2479+ return destpath
2480+
2481+ def install_dir(self, path, env=None):
2482+ """
2483+ create empty folders for the installation (very rarely used)
2484+ """
2485+ if env:
2486+ assert isinstance(env, Environment.Environment), "invalid parameter"
2487+ else:
2488+ env = self.env
2489+
2490+ if not path:
2491+ return []
2492+
2493+ destpath = self.get_install_path(path, env)
2494+
2495+ if self.is_install > 0:
2496+ info('* creating %s' % destpath)
2497+ Utils.check_dir(destpath)
2498+ elif self.is_install < 0:
2499+ info('* removing %s' % destpath)
2500+ self.uninstall.append(destpath + '/xxx') # yes, ugly
2501+
2502+ def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
2503+ """To install files only after they have been built, put the calls in a method named
2504+ post_build on the top-level wscript
2505+
2506+ The files must be a list and contain paths as strings or as Nodes
2507+
2508+ The relative_trick flag can be set to install folders, use bld.path.ant_glob() with it
2509+ """
2510+ if env:
2511+ assert isinstance(env, Environment.Environment), "invalid parameter"
2512+ else:
2513+ env = self.env
2514+
2515+ if not path: return []
2516+
2517+ if not cwd:
2518+ cwd = self.path
2519+
2520+ if isinstance(files, str) and '*' in files:
2521+ gl = cwd.abspath() + os.sep + files
2522+ lst = glob.glob(gl)
2523+ else:
2524+ lst = Utils.to_list(files)
2525+
2526+ if not getattr(lst, '__iter__', False):
2527+ lst = [lst]
2528+
2529+ destpath = self.get_install_path(path, env)
2530+
2531+ Utils.check_dir(destpath)
2532+
2533+ installed_files = []
2534+ for filename in lst:
2535+ if isinstance(filename, str) and os.path.isabs(filename):
2536+ alst = Utils.split_path(filename)
2537+ destfile = os.path.join(destpath, alst[-1])
2538+ else:
2539+ if isinstance(filename, Node.Node):
2540+ nd = filename
2541+ else:
2542+ nd = cwd.find_resource(filename)
2543+ if not nd:
2544+ raise Utils.WafError("Unable to install the file %r (not found in %s)" % (filename, cwd))
2545+
2546+ if relative_trick:
2547+ destfile = os.path.join(destpath, filename)
2548+ Utils.check_dir(os.path.dirname(destfile))
2549+ else:
2550+ destfile = os.path.join(destpath, nd.name)
2551+
2552+ filename = nd.abspath(env)
2553+
2554+ if self.do_install(filename, destfile, chmod):
2555+ installed_files.append(destfile)
2556+ return installed_files
2557+
2558+ def install_as(self, path, srcfile, env=None, chmod=O644, cwd=None):
2559+ """
2560+ srcfile may be a string or a Node representing the file to install
2561+
2562+ returns True if the file was effectively installed, False otherwise
2563+ """
2564+ if env:
2565+ assert isinstance(env, Environment.Environment), "invalid parameter"
2566+ else:
2567+ env = self.env
2568+
2569+ if not path:
2570+ raise Utils.WafError("where do you want to install %r? (%r?)" % (srcfile, path))
2571+
2572+ if not cwd:
2573+ cwd = self.path
2574+
2575+ destpath = self.get_install_path(path, env)
2576+
2577+ dir, name = os.path.split(destpath)
2578+ Utils.check_dir(dir)
2579+
2580+ # the source path
2581+ if isinstance(srcfile, Node.Node):
2582+ src = srcfile.abspath(env)
2583+ else:
2584+ src = srcfile
2585+ if not os.path.isabs(srcfile):
2586+ node = cwd.find_resource(srcfile)
2587+ if not node:
2588+ raise Utils.WafError("Unable to install the file %r (not found in %s)" % (srcfile, cwd))
2589+ src = node.abspath(env)
2590+
2591+ return self.do_install(src, destpath, chmod)
2592+
2593+ def symlink_as(self, path, src, env=None, cwd=None):
2594+ """example: bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') """
2595+
2596+ if sys.platform == 'win32':
2597+ # well, this *cannot* work
2598+ return
2599+
2600+ if not path:
2601+ raise Utils.WafError("where do you want to install %r? (%r?)" % (src, path))
2602+
2603+ tgt = self.get_install_path(path, env)
2604+
2605+ dir, name = os.path.split(tgt)
2606+ Utils.check_dir(dir)
2607+
2608+ if self.is_install > 0:
2609+ link = False
2610+ if not os.path.islink(tgt):
2611+ link = True
2612+ elif os.readlink(tgt) != src:
2613+ link = True
2614+
2615+ if link:
2616+ try: os.remove(tgt)
2617+ except OSError: pass
2618+
2619+ info('* symlink %s (-> %s)' % (tgt, src))
2620+ os.symlink(src, tgt)
2621+ return 0
2622+
2623+ else: # UNINSTALL
2624+ try:
2625+ info('* removing %s' % (tgt))
2626+ os.remove(tgt)
2627+ return 0
2628+ except OSError:
2629+ return 1
2630+
2631+ def exec_command(self, cmd, **kw):
2632+ # 'runner' zone is printed out for waf -v, see wafadmin/Options.py
2633+ debug('runner: system command -> %s', cmd)
2634+ if self.log:
2635+ self.log.write('%s\n' % cmd)
2636+ kw['log'] = self.log
2637+ try:
2638+ if not kw.get('cwd', None):
2639+ kw['cwd'] = self.cwd
2640+ except AttributeError:
2641+ self.cwd = kw['cwd'] = self.bldnode.abspath()
2642+ return Utils.exec_command(cmd, **kw)
2643+
2644+ def printout(self, s):
2645+ f = self.log or sys.stderr
2646+ f.write(s)
2647+ f.flush()
2648+
2649+ def add_subdirs(self, dirs):
2650+ self.recurse(dirs, 'build')
2651+
2652+ def pre_recurse(self, name_or_mod, path, nexdir):
2653+ if not hasattr(self, 'oldpath'):
2654+ self.oldpath = []
2655+ self.oldpath.append(self.path)
2656+ self.path = self.root.find_dir(nexdir)
2657+ return {'bld': self, 'ctx': self}
2658+
2659+ def post_recurse(self, name_or_mod, path, nexdir):
2660+ self.path = self.oldpath.pop()
2661+
2662+ ###### user-defined behaviour
2663+
2664+ def pre_build(self):
2665+ if hasattr(self, 'pre_funs'):
2666+ for m in self.pre_funs:
2667+ m(self)
2668+
2669+ def post_build(self):
2670+ if hasattr(self, 'post_funs'):
2671+ for m in self.post_funs:
2672+ m(self)
2673+
2674+ def add_pre_fun(self, meth):
2675+ try: self.pre_funs.append(meth)
2676+ except AttributeError: self.pre_funs = [meth]
2677+
2678+ def add_post_fun(self, meth):
2679+ try: self.post_funs.append(meth)
2680+ except AttributeError: self.post_funs = [meth]
2681+
2682+ def use_the_magic(self):
2683+ Task.algotype = Task.MAXPARALLEL
2684+ Task.file_deps = Task.extract_deps
2685+ self.magic = True
2686+
2687+ install_as = group_method(install_as)
2688+ install_files = group_method(install_files)
2689+ symlink_as = group_method(symlink_as)
2690+
2691diff --git a/buildtools/wafadmin/Configure.py b/buildtools/wafadmin/Configure.py
2692new file mode 100644
2693index 0000000..35b4e51
2694--- /dev/null
2695+++ b/buildtools/wafadmin/Configure.py
2696@@ -0,0 +1,444 @@
2697+#!/usr/bin/env python
2698+# encoding: utf-8
2699+# Thomas Nagy, 2005-2008 (ita)
2700+
2701+"""
2702+Configuration system
2703+
2704+A configuration instance is created when "waf configure" is called, it is used to:
2705+* create data dictionaries (Environment instances)
2706+* store the list of modules to import
2707+
2708+The old model (copied from Scons) was to store logic (mapping file extensions to functions)
2709+along with the data. In Waf a way was found to separate that logic by adding an indirection
2710+layer (storing the names in the Environment instances)
2711+
2712+In the new model, the logic is more object-oriented, and the user scripts provide the
2713+logic. The data files (Environments) must contain configuration data only (flags, ..).
2714+
2715+Note: the c/c++ related code is in the module config_c
2716+"""
2717+
2718+import os, shlex, sys, time
2719+try: import cPickle
2720+except ImportError: import pickle as cPickle
2721+import Environment, Utils, Options, Logs
2722+from Logs import warn
2723+from Constants import *
2724+
2725+try:
2726+ from urllib import request
2727+except:
2728+ from urllib import urlopen
2729+else:
2730+ urlopen = request.urlopen
2731+
2732+conf_template = '''# project %(app)s configured on %(now)s by
2733+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
2734+# using %(args)s
2735+#
2736+'''
2737+
2738+class ConfigurationError(Utils.WscriptError):
2739+ pass
2740+
2741+autoconfig = False
2742+"reconfigure the project automatically"
2743+
2744+def find_file(filename, path_list):
2745+ """find a file in a list of paths
2746+ @param filename: name of the file to search for
2747+ @param path_list: list of directories to search
2748+ @return: the first occurrence filename or '' if filename could not be found
2749+"""
2750+ for directory in Utils.to_list(path_list):
2751+ if os.path.exists(os.path.join(directory, filename)):
2752+ return directory
2753+ return ''
2754+
2755+def find_program_impl(env, filename, path_list=[], var=None, environ=None):
2756+ """find a program in folders path_lst, and sets env[var]
2757+ @param env: environment
2758+ @param filename: name of the program to search for
2759+ @param path_list: list of directories to search for filename
2760+ @param var: environment value to be checked for in env or os.environ
2761+ @return: either the value that is referenced with [var] in env or os.environ
2762+ or the first occurrence filename or '' if filename could not be found
2763+"""
2764+
2765+ if not environ:
2766+ environ = os.environ
2767+
2768+ try: path_list = path_list.split()
2769+ except AttributeError: pass
2770+
2771+ if var:
2772+ if env[var]: return env[var]
2773+ if var in environ: env[var] = environ[var]
2774+
2775+ if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
2776+
2777+ ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
2778+ for y in [filename+x for x in ext.split(',')]:
2779+ for directory in path_list:
2780+ x = os.path.join(directory, y)
2781+ if os.path.isfile(x):
2782+ if var: env[var] = x
2783+ return x
2784+ return ''
2785+
2786+class ConfigurationContext(Utils.Context):
2787+ tests = {}
2788+ error_handlers = []
2789+ def __init__(self, env=None, blddir='', srcdir=''):
2790+ self.env = None
2791+ self.envname = ''
2792+
2793+ self.environ = dict(os.environ)
2794+
2795+ self.line_just = 40
2796+
2797+ self.blddir = blddir
2798+ self.srcdir = srcdir
2799+ self.all_envs = {}
2800+
2801+ # curdir: necessary for recursion
2802+ self.cwd = self.curdir = os.getcwd()
2803+
2804+ self.tools = [] # tools loaded in the configuration, and that will be loaded when building
2805+
2806+ self.setenv(DEFAULT)
2807+
2808+ self.lastprog = ''
2809+
2810+ self.hash = 0
2811+ self.files = []
2812+
2813+ self.tool_cache = []
2814+
2815+ if self.blddir:
2816+ self.post_init()
2817+
2818+ def post_init(self):
2819+
2820+ self.cachedir = os.path.join(self.blddir, CACHE_DIR)
2821+
2822+ path = os.path.join(self.blddir, WAF_CONFIG_LOG)
2823+ try: os.unlink(path)
2824+ except (OSError, IOError): pass
2825+
2826+ try:
2827+ self.log = open(path, 'w')
2828+ except (OSError, IOError):
2829+ self.fatal('could not open %r for writing' % path)
2830+
2831+ app = Utils.g_module.APPNAME
2832+ if app:
2833+ ver = getattr(Utils.g_module, 'VERSION', '')
2834+ if ver:
2835+ app = "%s (%s)" % (app, ver)
2836+
2837+ now = time.ctime()
2838+ pyver = sys.hexversion
2839+ systype = sys.platform
2840+ args = " ".join(sys.argv)
2841+ wafver = WAFVERSION
2842+ abi = ABI
2843+ self.log.write(conf_template % vars())
2844+
2845+ def __del__(self):
2846+ """cleanup function: close config.log"""
2847+
2848+ # may be ran by the gc, not always after initialization
2849+ if hasattr(self, 'log') and self.log:
2850+ self.log.close()
2851+
2852+ def fatal(self, msg):
2853+ raise ConfigurationError(msg)
2854+
2855+ def check_tool(self, input, tooldir=None, funs=None):
2856+ "load a waf tool"
2857+
2858+ tools = Utils.to_list(input)
2859+ if tooldir: tooldir = Utils.to_list(tooldir)
2860+ for tool in tools:
2861+ tool = tool.replace('++', 'xx')
2862+ if tool == 'java': tool = 'javaw'
2863+ if tool.lower() == 'unittest': tool = 'unittestw'
2864+ # avoid loading the same tool more than once with the same functions
2865+ # used by composite projects
2866+
2867+ mag = (tool, id(self.env), funs)
2868+ if mag in self.tool_cache:
2869+ continue
2870+ self.tool_cache.append(mag)
2871+
2872+ module = None
2873+ try:
2874+ module = Utils.load_tool(tool, tooldir)
2875+ except Exception, e:
2876+ ex = e
2877+ if Options.options.download:
2878+ _3rdparty = os.path.normpath(Options.tooldir[0] + os.sep + '..' + os.sep + '3rdparty')
2879+
2880+ # try to download the tool from the repository then
2881+ # the default is set to false
2882+ for x in Utils.to_list(Options.remote_repo):
2883+ for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
2884+ url = '/'.join((x, sub, tool + '.py'))
2885+ try:
2886+ web = urlopen(url)
2887+ if web.getcode() != 200:
2888+ continue
2889+ except Exception, e:
2890+ # on python3 urlopen throws an exception
2891+ continue
2892+ else:
2893+ loc = None
2894+ try:
2895+ loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
2896+ loc.write(web.read())
2897+ web.close()
2898+ finally:
2899+ if loc:
2900+ loc.close()
2901+ Logs.warn('downloaded %s from %s' % (tool, url))
2902+ try:
2903+ module = Utils.load_tool(tool, tooldir)
2904+ except:
2905+ Logs.warn('module %s from %s is unusable' % (tool, url))
2906+ try:
2907+ os.unlink(_3rdparty + os.sep + tool + '.py')
2908+ except:
2909+ pass
2910+ continue
2911+ else:
2912+ break
2913+
2914+ if not module:
2915+ Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
2916+ raise ex
2917+ else:
2918+ Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s' % (tool, sys.path, e))
2919+ raise ex
2920+
2921+ if funs is not None:
2922+ self.eval_rules(funs)
2923+ else:
2924+ func = getattr(module, 'detect', None)
2925+ if func:
2926+ if type(func) is type(find_file): func(self)
2927+ else: self.eval_rules(func)
2928+
2929+ self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
2930+
2931+ def sub_config(self, k):
2932+ "executes the configure function of a wscript module"
2933+ self.recurse(k, name='configure')
2934+
2935+ def pre_recurse(self, name_or_mod, path, nexdir):
2936+ return {'conf': self, 'ctx': self}
2937+
2938+ def post_recurse(self, name_or_mod, path, nexdir):
2939+ if not autoconfig:
2940+ return
2941+ self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
2942+ self.files.append(path)
2943+
2944+ def store(self, file=''):
2945+ "save the config results into the cache file"
2946+ if not os.path.isdir(self.cachedir):
2947+ os.makedirs(self.cachedir)
2948+
2949+ if not file:
2950+ file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
2951+ file.write('version = 0x%x\n' % HEXVERSION)
2952+ file.write('tools = %r\n' % self.tools)
2953+ file.close()
2954+
2955+ if not self.all_envs:
2956+ self.fatal('nothing to store in the configuration context!')
2957+ for key in self.all_envs:
2958+ tmpenv = self.all_envs[key]
2959+ tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
2960+
2961+ def set_env_name(self, name, env):
2962+ "add a new environment called name"
2963+ self.all_envs[name] = env
2964+ return env
2965+
2966+ def retrieve(self, name, fromenv=None):
2967+ "retrieve an environment called name"
2968+ try:
2969+ env = self.all_envs[name]
2970+ except KeyError:
2971+ env = Environment.Environment()
2972+ env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
2973+ self.all_envs[name] = env
2974+ else:
2975+ if fromenv: warn("The environment %s may have been configured already" % name)
2976+ return env
2977+
2978+ def setenv(self, name):
2979+ "enable the environment called name"
2980+ self.env = self.retrieve(name)
2981+ self.envname = name
2982+
2983+ def add_os_flags(self, var, dest=None):
2984+ # do not use 'get' to make certain the variable is not defined
2985+ try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
2986+ except KeyError: pass
2987+
2988+ def check_message_1(self, sr):
2989+ self.line_just = max(self.line_just, len(sr))
2990+ for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
2991+ self.log.write(x)
2992+ Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
2993+
2994+ def check_message_2(self, sr, color='GREEN'):
2995+ self.log.write(sr)
2996+ self.log.write('\n')
2997+ Utils.pprint(color, sr)
2998+
2999+ def check_message(self, th, msg, state, option=''):
3000+ sr = 'Checking for %s %s' % (th, msg)
3001+ self.check_message_1(sr)
3002+ p = self.check_message_2
3003+ if state: p('ok ' + str(option))
3004+ else: p('not found', 'YELLOW')
3005+
3006+ # FIXME remove in waf 1.6
3007+ # the parameter 'option' is not used (kept for compatibility)
3008+ def check_message_custom(self, th, msg, custom, option='', color='PINK'):
3009+ sr = 'Checking for %s %s' % (th, msg)
3010+ self.check_message_1(sr)
3011+ self.check_message_2(custom, color)
3012+
3013+ def msg(self, msg, result, color=None):
3014+ """Prints a configuration message 'Checking for xxx: ok'"""
3015+ self.start_msg('Checking for ' + msg)
3016+
3017+ if not isinstance(color, str):
3018+ color = result and 'GREEN' or 'YELLOW'
3019+
3020+ self.end_msg(result, color)
3021+
3022+ def start_msg(self, msg):
3023+ try:
3024+ if self.in_msg:
3025+ return
3026+ except:
3027+ self.in_msg = 0
3028+ self.in_msg += 1
3029+
3030+ self.line_just = max(self.line_just, len(msg))
3031+ for x in ('\n', self.line_just * '-', '\n', msg, '\n'):
3032+ self.log.write(x)
3033+ Utils.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
3034+
3035+ def end_msg(self, result, color):
3036+ self.in_msg -= 1
3037+ if self.in_msg:
3038+ return
3039+
3040+ if not color:
3041+ color = 'GREEN'
3042+ if result == True:
3043+ msg = 'ok'
3044+ elif result == False:
3045+ msg = 'not found'
3046+ color = 'YELLOW'
3047+ else:
3048+ msg = str(result)
3049+
3050+ self.log.write(msg)
3051+ self.log.write('\n')
3052+ Utils.pprint(color, msg)
3053+
3054+ def find_program(self, filename, path_list=[], var=None, mandatory=False):
3055+ "wrapper that adds a configuration message"
3056+
3057+ ret = None
3058+ if var:
3059+ if self.env[var]:
3060+ ret = self.env[var]
3061+ elif var in os.environ:
3062+ ret = os.environ[var]
3063+
3064+ if not isinstance(filename, list): filename = [filename]
3065+ if not ret:
3066+ for x in filename:
3067+ ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
3068+ if ret: break
3069+
3070+ self.check_message_1('Checking for program %s' % ' or '.join(filename))
3071+ self.log.write(' find program=%r paths=%r var=%r\n -> %r\n' % (filename, path_list, var, ret))
3072+ if ret:
3073+ Utils.pprint('GREEN', str(ret))
3074+ else:
3075+ Utils.pprint('YELLOW', 'not found')
3076+ if mandatory:
3077+ self.fatal('The program %r is required' % filename)
3078+
3079+ if var:
3080+ self.env[var] = ret
3081+ return ret
3082+
3083+ def cmd_to_list(self, cmd):
3084+ "commands may be written in pseudo shell like 'ccache g++'"
3085+ if isinstance(cmd, str) and cmd.find(' '):
3086+ try:
3087+ os.stat(cmd)
3088+ except OSError:
3089+ return shlex.split(cmd)
3090+ else:
3091+ return [cmd]
3092+ return cmd
3093+
3094+ def __getattr__(self, name):
3095+ r = self.__class__.__dict__.get(name, None)
3096+ if r: return r
3097+ if name and name.startswith('require_'):
3098+
3099+ for k in ['check_', 'find_']:
3100+ n = name.replace('require_', k)
3101+ ret = self.__class__.__dict__.get(n, None)
3102+ if ret:
3103+ def run(*k, **kw):
3104+ r = ret(self, *k, **kw)
3105+ if not r:
3106+ self.fatal('requirement failure')
3107+ return r
3108+ return run
3109+ self.fatal('No such method %r' % name)
3110+
3111+ def eval_rules(self, rules):
3112+ self.rules = Utils.to_list(rules)
3113+ for x in self.rules:
3114+ f = getattr(self, x)
3115+ if not f: self.fatal("No such method '%s'." % x)
3116+ try:
3117+ f()
3118+ except Exception, e:
3119+ ret = self.err_handler(x, e)
3120+ if ret == BREAK:
3121+ break
3122+ elif ret == CONTINUE:
3123+ continue
3124+ else:
3125+ self.fatal(e)
3126+
3127+ def err_handler(self, fun, error):
3128+ pass
3129+
3130+def conf(f):
3131+ "decorator: attach new configuration functions"
3132+ setattr(ConfigurationContext, f.__name__, f)
3133+ return f
3134+
3135+def conftest(f):
3136+ "decorator: attach new configuration tests (registered as strings)"
3137+ ConfigurationContext.tests[f.__name__] = f
3138+ return conf(f)
3139+
3140+
3141diff --git a/buildtools/wafadmin/Constants.py b/buildtools/wafadmin/Constants.py
3142new file mode 100644
3143index 0000000..e67dda6
3144--- /dev/null
3145+++ b/buildtools/wafadmin/Constants.py
3146@@ -0,0 +1,76 @@
3147+#!/usr/bin/env python
3148+# encoding: utf-8
3149+# Yinon dot me gmail 2008
3150+
3151+"""
3152+these constants are somewhat public, try not to mess them
3153+
3154+maintainer: the version number is updated from the top-level wscript file
3155+"""
3156+
3157+# do not touch these three lines, they are updated automatically
3158+HEXVERSION=0x105019
3159+WAFVERSION="1.5.19"
3160+WAFREVISION = "9709M"
3161+ABI = 7
3162+
3163+# permissions
3164+O644 = 420
3165+O755 = 493
3166+
3167+MAXJOBS = 99999999
3168+
3169+CACHE_DIR = 'c4che'
3170+CACHE_SUFFIX = '.cache.py'
3171+DBFILE = '.wafpickle-%d' % ABI
3172+WSCRIPT_FILE = 'wscript'
3173+WSCRIPT_BUILD_FILE = 'wscript_build'
3174+WAF_CONFIG_LOG = 'config.log'
3175+WAF_CONFIG_H = 'config.h'
3176+
3177+SIG_NIL = 'iluvcuteoverload'
3178+
3179+VARIANT = '_VARIANT_'
3180+DEFAULT = 'default'
3181+
3182+SRCDIR = 'srcdir'
3183+BLDDIR = 'blddir'
3184+APPNAME = 'APPNAME'
3185+VERSION = 'VERSION'
3186+
3187+DEFINES = 'defines'
3188+UNDEFINED = ()
3189+
3190+BREAK = "break"
3191+CONTINUE = "continue"
3192+
3193+# task scheduler options
3194+JOBCONTROL = "JOBCONTROL"
3195+MAXPARALLEL = "MAXPARALLEL"
3196+NORMAL = "NORMAL"
3197+
3198+# task state
3199+NOT_RUN = 0
3200+MISSING = 1
3201+CRASHED = 2
3202+EXCEPTION = 3
3203+SKIPPED = 8
3204+SUCCESS = 9
3205+
3206+ASK_LATER = -1
3207+SKIP_ME = -2
3208+RUN_ME = -3
3209+
3210+
3211+LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
3212+HOUR_FORMAT = "%H:%M:%S"
3213+
3214+TEST_OK = True
3215+
3216+CFG_FILES = 'cfg_files'
3217+
3218+# positive '->' install
3219+# negative '<-' uninstall
3220+INSTALL = 1337
3221+UNINSTALL = -1337
3222+
3223diff --git a/buildtools/wafadmin/Environment.py b/buildtools/wafadmin/Environment.py
3224new file mode 100644
3225index 0000000..52c83b4
3226--- /dev/null
3227+++ b/buildtools/wafadmin/Environment.py
3228@@ -0,0 +1,210 @@
3229+#!/usr/bin/env python
3230+# encoding: utf-8
3231+# Thomas Nagy, 2005 (ita)
3232+
3233+"""Environment representation
3234+
3235+There is one gotcha: getitem returns [] if the contents evals to False
3236+This means env['foo'] = {}; print env['foo'] will print [] not {}
3237+"""
3238+
3239+import os, copy, re
3240+import Logs, Options, Utils
3241+from Constants import *
3242+re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
3243+
3244+class Environment(object):
3245+ """A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
3246+ An environment instance can be stored into a file and loaded easily
3247+ """
3248+ __slots__ = ("table", "parent")
3249+ def __init__(self, filename=None):
3250+ self.table = {}
3251+ #self.parent = None
3252+
3253+ if filename:
3254+ self.load(filename)
3255+
3256+ def __contains__(self, key):
3257+ if key in self.table: return True
3258+ try: return self.parent.__contains__(key)
3259+ except AttributeError: return False # parent may not exist
3260+
3261+ def __str__(self):
3262+ keys = set()
3263+ cur = self
3264+ while cur:
3265+ keys.update(cur.table.keys())
3266+ cur = getattr(cur, 'parent', None)
3267+ keys = list(keys)
3268+ keys.sort()
3269+ return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
3270+
3271+ def __getitem__(self, key):
3272+ try:
3273+ while 1:
3274+ x = self.table.get(key, None)
3275+ if not x is None:
3276+ return x
3277+ self = self.parent
3278+ except AttributeError:
3279+ return []
3280+
3281+ def __setitem__(self, key, value):
3282+ self.table[key] = value
3283+
3284+ def __delitem__(self, key):
3285+ del self.table[key]
3286+
3287+ def pop(self, key, *args):
3288+ if len(args):
3289+ return self.table.pop(key, *args)
3290+ return self.table.pop(key)
3291+
3292+ def set_variant(self, name):
3293+ self.table[VARIANT] = name
3294+
3295+ def variant(self):
3296+ try:
3297+ while 1:
3298+ x = self.table.get(VARIANT, None)
3299+ if not x is None:
3300+ return x
3301+ self = self.parent
3302+ except AttributeError:
3303+ return DEFAULT
3304+
3305+ def copy(self):
3306+ # TODO waf 1.6 rename this method derive, #368
3307+ newenv = Environment()
3308+ newenv.parent = self
3309+ return newenv
3310+
3311+ def detach(self):
3312+ """TODO try it
3313+ modifying the original env will not change the copy"""
3314+ tbl = self.get_merged_dict()
3315+ try:
3316+ delattr(self, 'parent')
3317+ except AttributeError:
3318+ pass
3319+ else:
3320+ keys = tbl.keys()
3321+ for x in keys:
3322+ tbl[x] = copy.deepcopy(tbl[x])
3323+ self.table = tbl
3324+
3325+ def get_flat(self, key):
3326+ s = self[key]
3327+ if isinstance(s, str): return s
3328+ return ' '.join(s)
3329+
3330+ def _get_list_value_for_modification(self, key):
3331+ """Gets a value that must be a list for further modification. The
3332+ list may be modified inplace and there is no need to
3333+ "self.table[var] = value" afterwards.
3334+ """
3335+ try:
3336+ value = self.table[key]
3337+ except KeyError:
3338+ try: value = self.parent[key]
3339+ except AttributeError: value = []
3340+ if isinstance(value, list):
3341+ value = value[:]
3342+ else:
3343+ value = [value]
3344+ else:
3345+ if not isinstance(value, list):
3346+ value = [value]
3347+ self.table[key] = value
3348+ return value
3349+
3350+ def append_value(self, var, value):
3351+ current_value = self._get_list_value_for_modification(var)
3352+
3353+ if isinstance(value, list):
3354+ current_value.extend(value)
3355+ else:
3356+ current_value.append(value)
3357+
3358+ def prepend_value(self, var, value):
3359+ current_value = self._get_list_value_for_modification(var)
3360+
3361+ if isinstance(value, list):
3362+ current_value = value + current_value
3363+ # a new list: update the dictionary entry
3364+ self.table[var] = current_value
3365+ else:
3366+ current_value.insert(0, value)
3367+
3368+ # prepend unique would be ambiguous
3369+ def append_unique(self, var, value):
3370+ current_value = self._get_list_value_for_modification(var)
3371+
3372+ if isinstance(value, list):
3373+ for value_item in value:
3374+ if value_item not in current_value:
3375+ current_value.append(value_item)
3376+ else:
3377+ if value not in current_value:
3378+ current_value.append(value)
3379+
3380+ def get_merged_dict(self):
3381+ """compute a merged table"""
3382+ table_list = []
3383+ env = self
3384+ while 1:
3385+ table_list.insert(0, env.table)
3386+ try: env = env.parent
3387+ except AttributeError: break
3388+ merged_table = {}
3389+ for table in table_list:
3390+ merged_table.update(table)
3391+ return merged_table
3392+
3393+ def store(self, filename):
3394+ "Write the variables into a file"
3395+ file = open(filename, 'w')
3396+ merged_table = self.get_merged_dict()
3397+ keys = list(merged_table.keys())
3398+ keys.sort()
3399+ for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
3400+ file.close()
3401+
3402+ def load(self, filename):
3403+ "Retrieve the variables from a file"
3404+ tbl = self.table
3405+ code = Utils.readf(filename)
3406+ for m in re_imp.finditer(code):
3407+ g = m.group
3408+ tbl[g(2)] = eval(g(3))
3409+ Logs.debug('env: %s', self.table)
3410+
3411+ def get_destdir(self):
3412+ "return the destdir, useful for installing"
3413+ if self.__getitem__('NOINSTALL'): return ''
3414+ return Options.options.destdir
3415+
3416+ def update(self, d):
3417+ for k, v in d.iteritems():
3418+ self[k] = v
3419+
3420+
3421+ def __getattr__(self, name):
3422+ if name in self.__slots__:
3423+ return object.__getattr__(self, name)
3424+ else:
3425+ return self[name]
3426+
3427+ def __setattr__(self, name, value):
3428+ if name in self.__slots__:
3429+ object.__setattr__(self, name, value)
3430+ else:
3431+ self[name] = value
3432+
3433+ def __delattr__(self, name):
3434+ if name in self.__slots__:
3435+ object.__delattr__(self, name)
3436+ else:
3437+ del self[name]
3438+
3439diff --git a/buildtools/wafadmin/Logs.py b/buildtools/wafadmin/Logs.py
3440new file mode 100644
3441index 0000000..c160b37
3442--- /dev/null
3443+++ b/buildtools/wafadmin/Logs.py
3444@@ -0,0 +1,134 @@
3445+#!/usr/bin/env python
3446+# encoding: utf-8
3447+# Thomas Nagy, 2005 (ita)
3448+
3449+import ansiterm
3450+import os, re, logging, traceback, sys
3451+from Constants import *
3452+
3453+zones = ''
3454+verbose = 0
3455+
3456+colors_lst = {
3457+'USE' : True,
3458+'BOLD' :'\x1b[01;1m',
3459+'RED' :'\x1b[01;31m',
3460+'GREEN' :'\x1b[32m',
3461+'YELLOW':'\x1b[33m',
3462+'PINK' :'\x1b[35m',
3463+'BLUE' :'\x1b[01;34m',
3464+'CYAN' :'\x1b[36m',
3465+'NORMAL':'\x1b[0m',
3466+'cursor_on' :'\x1b[?25h',
3467+'cursor_off' :'\x1b[?25l',
3468+}
3469+
3470+got_tty = False
3471+term = os.environ.get('TERM', 'dumb')
3472+if not term in ['dumb', 'emacs']:
3473+ try:
3474+ got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
3475+ except AttributeError:
3476+ pass
3477+
3478+import Utils
3479+
3480+if not got_tty or 'NOCOLOR' in os.environ:
3481+ colors_lst['USE'] = False
3482+
3483+# test
3484+#if sys.platform == 'win32':
3485+# colors_lst['USE'] = True
3486+
3487+def get_color(cl):
3488+ if not colors_lst['USE']: return ''
3489+ return colors_lst.get(cl, '')
3490+
3491+class foo(object):
3492+ def __getattr__(self, a):
3493+ return get_color(a)
3494+ def __call__(self, a):
3495+ return get_color(a)
3496+
3497+colors = foo()
3498+
3499+re_log = re.compile(r'(\w+): (.*)', re.M)
3500+class log_filter(logging.Filter):
3501+ def __init__(self, name=None):
3502+ pass
3503+
3504+ def filter(self, rec):
3505+ rec.c1 = colors.PINK
3506+ rec.c2 = colors.NORMAL
3507+ rec.zone = rec.module
3508+ if rec.levelno >= logging.INFO:
3509+ if rec.levelno >= logging.ERROR:
3510+ rec.c1 = colors.RED
3511+ elif rec.levelno >= logging.WARNING:
3512+ rec.c1 = colors.YELLOW
3513+ else:
3514+ rec.c1 = colors.GREEN
3515+ return True
3516+
3517+ zone = ''
3518+ m = re_log.match(rec.msg)
3519+ if m:
3520+ zone = rec.zone = m.group(1)
3521+ rec.msg = m.group(2)
3522+
3523+ if zones:
3524+ return getattr(rec, 'zone', '') in zones or '*' in zones
3525+ elif not verbose > 2:
3526+ return False
3527+ return True
3528+
3529+class formatter(logging.Formatter):
3530+ def __init__(self):
3531+ logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
3532+
3533+ def format(self, rec):
3534+ if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
3535+ try:
3536+ return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
3537+ except:
3538+ return rec.c1+rec.msg+rec.c2
3539+ return logging.Formatter.format(self, rec)
3540+
3541+def debug(*k, **kw):
3542+ if verbose:
3543+ k = list(k)
3544+ k[0] = k[0].replace('\n', ' ')
3545+ logging.debug(*k, **kw)
3546+
3547+def error(*k, **kw):
3548+ logging.error(*k, **kw)
3549+ if verbose > 1:
3550+ if isinstance(k[0], Utils.WafError):
3551+ st = k[0].stack
3552+ else:
3553+ st = traceback.extract_stack()
3554+ if st:
3555+ st = st[:-1]
3556+ buf = []
3557+ for filename, lineno, name, line in st:
3558+ buf.append(' File "%s", line %d, in %s' % (filename, lineno, name))
3559+ if line:
3560+ buf.append(' %s' % line.strip())
3561+ if buf: logging.error("\n".join(buf))
3562+
3563+warn = logging.warn
3564+info = logging.info
3565+
3566+def init_log():
3567+ log = logging.getLogger()
3568+ log.handlers = []
3569+ log.filters = []
3570+ hdlr = logging.StreamHandler()
3571+ hdlr.setFormatter(formatter())
3572+ log.addHandler(hdlr)
3573+ log.addFilter(log_filter())
3574+ log.setLevel(logging.DEBUG)
3575+
3576+# may be initialized more than once
3577+init_log()
3578+
3579diff --git a/buildtools/wafadmin/Node.py b/buildtools/wafadmin/Node.py
3580new file mode 100644
3581index 0000000..236dd0d
3582--- /dev/null
3583+++ b/buildtools/wafadmin/Node.py
3584@@ -0,0 +1,695 @@
3585+#!/usr/bin/env python
3586+# encoding: utf-8
3587+# Thomas Nagy, 2005 (ita)
3588+
3589+"""
3590+Node: filesystem structure, contains lists of nodes
3591+
3592+IMPORTANT:
3593+1. Each file/folder is represented by exactly one node.
3594+
3595+2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
3596+unused class members increase the .wafpickle file size sensibly with lots of objects.
3597+
3598+3. The build is launched from the top of the build dir (for example, in _build_/).
3599+
3600+4. Node should not be instantiated directly.
3601+Each instance of Build.BuildContext has a Node subclass.
3602+(aka: 'Nodu', see BuildContext initializer)
3603+The BuildContext is referenced here as self.__class__.bld
3604+Its Node class is referenced here as self.__class__
3605+
3606+The public and advertised apis are the following:
3607+${TGT} -> dir/to/file.ext
3608+${TGT[0].base()} -> dir/to/file
3609+${TGT[0].dir(env)} -> dir/to
3610+${TGT[0].file()} -> file.ext
3611+${TGT[0].file_base()} -> file
3612+${TGT[0].suffix()} -> .ext
3613+${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
3614+
3615+"""
3616+
3617+import os, sys, fnmatch, re, stat
3618+import Utils, Constants
3619+
3620+UNDEFINED = 0
3621+DIR = 1
3622+FILE = 2
3623+BUILD = 3
3624+
3625+type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
3626+
3627+# These fnmatch expressions are used by default to prune the directory tree
3628+# while doing the recursive traversal in the find_iter method of the Node class.
3629+prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
3630+
3631+# These fnmatch expressions are used by default to exclude files and dirs
3632+# while doing the recursive traversal in the find_iter method of the Node class.
3633+exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
3634+
3635+# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
3636+# while doing the recursive traversal in the ant_glob method of the Node class.
3637+exclude_regs = '''
3638+**/*~
3639+**/#*#
3640+**/.#*
3641+**/%*%
3642+**/._*
3643+**/CVS
3644+**/CVS/**
3645+**/.cvsignore
3646+**/SCCS
3647+**/SCCS/**
3648+**/vssver.scc
3649+**/.svn
3650+**/.svn/**
3651+**/.git
3652+**/.git/**
3653+**/.gitignore
3654+**/.bzr
3655+**/.bzr/**
3656+**/.hg
3657+**/.hg/**
3658+**/_MTN
3659+**/_MTN/**
3660+**/_darcs
3661+**/_darcs/**
3662+**/.DS_Store'''
3663+
3664+class Node(object):
3665+ __slots__ = ("name", "parent", "id", "childs")
3666+ def __init__(self, name, parent, node_type = UNDEFINED):
3667+ self.name = name
3668+ self.parent = parent
3669+
3670+ # assumption: one build object at a time
3671+ self.__class__.bld.id_nodes += 4
3672+ self.id = self.__class__.bld.id_nodes + node_type
3673+
3674+ if node_type == DIR: self.childs = {}
3675+
3676+ # We do not want to add another type attribute (memory)
3677+ # use the id to find out: type = id & 3
3678+ # for setting: new type = type + x - type & 3
3679+
3680+ if parent and name in parent.childs:
3681+ raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
3682+
3683+ if parent: parent.childs[name] = self
3684+
3685+ def __setstate__(self, data):
3686+ if len(data) == 4:
3687+ (self.parent, self.name, self.id, self.childs) = data
3688+ else:
3689+ (self.parent, self.name, self.id) = data
3690+
3691+ def __getstate__(self):
3692+ if getattr(self, 'childs', None) is None:
3693+ return (self.parent, self.name, self.id)
3694+ else:
3695+ return (self.parent, self.name, self.id, self.childs)
3696+
3697+ def __str__(self):
3698+ if not self.parent: return ''
3699+ return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
3700+
3701+ def __repr__(self):
3702+ return self.__str__()
3703+
3704+ def __hash__(self):
3705+ "expensive, make certain it is not used"
3706+ raise Utils.WafError('nodes, you are doing it wrong')
3707+
3708+ def __copy__(self):
3709+ "nodes are not supposed to be copied"
3710+ raise Utils.WafError('nodes are not supposed to be cloned')
3711+
3712+ def get_type(self):
3713+ return self.id & 3
3714+
3715+ def set_type(self, t):
3716+ "dangerous, you are not supposed to use this"
3717+ self.id = self.id + t - self.id & 3
3718+
3719+ def dirs(self):
3720+ return [x for x in self.childs.values() if x.id & 3 == DIR]
3721+
3722+ def files(self):
3723+ return [x for x in self.childs.values() if x.id & 3 == FILE]
3724+
3725+ def get_dir(self, name, default=None):
3726+ node = self.childs.get(name, None)
3727+ if not node or node.id & 3 != DIR: return default
3728+ return node
3729+
3730+ def get_file(self, name, default=None):
3731+ node = self.childs.get(name, None)
3732+ if not node or node.id & 3 != FILE: return default
3733+ return node
3734+
3735+ def get_build(self, name, default=None):
3736+ node = self.childs.get(name, None)
3737+ if not node or node.id & 3 != BUILD: return default
3738+ return node
3739+
3740+ def find_resource(self, lst):
3741+ "Find an existing input file: either a build node declared previously or a source node"
3742+ if isinstance(lst, str):
3743+ lst = Utils.split_path(lst)
3744+
3745+ if len(lst) == 1:
3746+ parent = self
3747+ else:
3748+ parent = self.find_dir(lst[:-1])
3749+ if not parent: return None
3750+ self.__class__.bld.rescan(parent)
3751+
3752+ name = lst[-1]
3753+ node = parent.childs.get(name, None)
3754+ if node:
3755+ tp = node.id & 3
3756+ if tp == FILE or tp == BUILD:
3757+ return node
3758+ else:
3759+ return None
3760+
3761+ tree = self.__class__.bld
3762+ if not name in tree.cache_dir_contents[parent.id]:
3763+ return None
3764+
3765+ path = parent.abspath() + os.sep + name
3766+ try:
3767+ st = Utils.h_file(path)
3768+ except IOError:
3769+ return None
3770+
3771+ child = self.__class__(name, parent, FILE)
3772+ tree.node_sigs[0][child.id] = st
3773+ return child
3774+
3775+ def find_or_declare(self, lst):
3776+ "Used for declaring a build node representing a file being built"
3777+ if isinstance(lst, str):
3778+ lst = Utils.split_path(lst)
3779+
3780+ if len(lst) == 1:
3781+ parent = self
3782+ else:
3783+ parent = self.find_dir(lst[:-1])
3784+ if not parent: return None
3785+ self.__class__.bld.rescan(parent)
3786+
3787+ name = lst[-1]
3788+ node = parent.childs.get(name, None)
3789+ if node:
3790+ tp = node.id & 3
3791+ if tp != BUILD:
3792+ raise Utils.WafError('find_or_declare found a source file where a build file was expected %r' % '/'.join(lst))
3793+ return node
3794+ node = self.__class__(name, parent, BUILD)
3795+ return node
3796+
3797+ def find_dir(self, lst):
3798+ "search a folder in the filesystem"
3799+
3800+ if isinstance(lst, str):
3801+ lst = Utils.split_path(lst)
3802+
3803+ current = self
3804+ for name in lst:
3805+ self.__class__.bld.rescan(current)
3806+ prev = current
3807+
3808+ if not current.parent and name == current.name:
3809+ continue
3810+ elif not name:
3811+ continue
3812+ elif name == '.':
3813+ continue
3814+ elif name == '..':
3815+ current = current.parent or current
3816+ else:
3817+ current = prev.childs.get(name, None)
3818+ if current is None:
3819+ dir_cont = self.__class__.bld.cache_dir_contents
3820+ if prev.id in dir_cont and name in dir_cont[prev.id]:
3821+ if not prev.name:
3822+ if os.sep == '/':
3823+ # cygwin //machine/share
3824+ dirname = os.sep + name
3825+ else:
3826+ # windows c:
3827+ dirname = name
3828+ else:
3829+ # regular path
3830+ dirname = prev.abspath() + os.sep + name
3831+ if not os.path.isdir(dirname):
3832+ return None
3833+ current = self.__class__(name, prev, DIR)
3834+ elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
3835+ # drive letter or \\ path for windows
3836+ current = self.__class__(name, prev, DIR)
3837+ else:
3838+ return None
3839+ else:
3840+ if current.id & 3 != DIR:
3841+ return None
3842+ return current
3843+
3844+ def ensure_dir_node_from_path(self, lst):
3845+ "used very rarely, force the construction of a branch of node instance for representing folders"
3846+
3847+ if isinstance(lst, str):
3848+ lst = Utils.split_path(lst)
3849+
3850+ current = self
3851+ for name in lst:
3852+ if not name:
3853+ continue
3854+ elif name == '.':
3855+ continue
3856+ elif name == '..':
3857+ current = current.parent or current
3858+ else:
3859+ prev = current
3860+ current = prev.childs.get(name, None)
3861+ if current is None:
3862+ current = self.__class__(name, prev, DIR)
3863+ return current
3864+
3865+ def exclusive_build_node(self, path):
3866+ """
3867+ create a hierarchy in the build dir (no source folders) for ill-behaving compilers
3868+ the node is not hashed, so you must do it manually
3869+
3870+ after declaring such a node, find_dir and find_resource should work as expected
3871+ """
3872+ lst = Utils.split_path(path)
3873+ name = lst[-1]
3874+ if len(lst) > 1:
3875+ parent = None
3876+ try:
3877+ parent = self.find_dir(lst[:-1])
3878+ except OSError:
3879+ pass
3880+ if not parent:
3881+ parent = self.ensure_dir_node_from_path(lst[:-1])
3882+ self.__class__.bld.rescan(parent)
3883+ else:
3884+ try:
3885+ self.__class__.bld.rescan(parent)
3886+ except OSError:
3887+ pass
3888+ else:
3889+ parent = self
3890+
3891+ node = parent.childs.get(name, None)
3892+ if not node:
3893+ node = self.__class__(name, parent, BUILD)
3894+
3895+ return node
3896+
3897+ def path_to_parent(self, parent):
3898+ "path relative to a direct ancestor, as string"
3899+ lst = []
3900+ p = self
3901+ h1 = parent.height()
3902+ h2 = p.height()
3903+ while h2 > h1:
3904+ h2 -= 1
3905+ lst.append(p.name)
3906+ p = p.parent
3907+ if lst:
3908+ lst.reverse()
3909+ ret = os.path.join(*lst)
3910+ else:
3911+ ret = ''
3912+ return ret
3913+
3914+ def find_ancestor(self, node):
3915+ "find a common ancestor for two nodes - for the shortest path in hierarchy"
3916+ dist = self.height() - node.height()
3917+ if dist < 0: return node.find_ancestor(self)
3918+ # now the real code
3919+ cand = self
3920+ while dist > 0:
3921+ cand = cand.parent
3922+ dist -= 1
3923+ if cand == node: return cand
3924+ cursor = node
3925+ while cand.parent:
3926+ cand = cand.parent
3927+ cursor = cursor.parent
3928+ if cand == cursor: return cand
3929+
3930+ def relpath_gen(self, from_node):
3931+ "string representing a relative path between self to another node"
3932+
3933+ if self == from_node: return '.'
3934+ if from_node.parent == self: return '..'
3935+
3936+ # up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
3937+ ancestor = self.find_ancestor(from_node)
3938+ lst = []
3939+ cand = self
3940+ while not cand.id == ancestor.id:
3941+ lst.append(cand.name)
3942+ cand = cand.parent
3943+ cand = from_node
3944+ while not cand.id == ancestor.id:
3945+ lst.append('..')
3946+ cand = cand.parent
3947+ lst.reverse()
3948+ return os.sep.join(lst)
3949+
3950+ def nice_path(self, env=None):
3951+ "printed in the console, open files easily from the launch directory"
3952+ tree = self.__class__.bld
3953+ ln = tree.launch_node()
3954+
3955+ if self.id & 3 == FILE: return self.relpath_gen(ln)
3956+ else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
3957+
3958+ def is_child_of(self, node):
3959+ "does this node belong to the subtree node"
3960+ p = self
3961+ diff = self.height() - node.height()
3962+ while diff > 0:
3963+ diff -= 1
3964+ p = p.parent
3965+ return p.id == node.id
3966+
3967+ def variant(self, env):
3968+ "variant, or output directory for this node, a source has for variant 0"
3969+ if not env: return 0
3970+ elif self.id & 3 == FILE: return 0
3971+ else: return env.variant()
3972+
3973+ def height(self):
3974+ "amount of parents"
3975+ # README a cache can be added here if necessary
3976+ d = self
3977+ val = -1
3978+ while d:
3979+ d = d.parent
3980+ val += 1
3981+ return val
3982+
3983+ # helpers for building things
3984+
3985+ def abspath(self, env=None):
3986+ """
3987+ absolute path
3988+ @param env [Environment]:
3989+ * obligatory for build nodes: build/variant/src/dir/bar.o
3990+ * optional for dirs: get either src/dir or build/variant/src/dir
3991+ * excluded for source nodes: src/dir/bar.c
3992+
3993+ Instead of computing the absolute path each time again,
3994+ store the already-computed absolute paths in one of (variants+1) dictionaries:
3995+ bld.cache_node_abspath[0] holds absolute paths for source nodes.
3996+ bld.cache_node_abspath[variant] holds the absolute path for the build nodes
3997+ which reside in the variant given by env.
3998+ """
3999+ ## absolute path - hot zone, so do not touch
4000+
4001+ # less expensive
4002+ variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
4003+
4004+ ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
4005+ if ret: return ret
4006+
4007+ if not variant:
4008+ # source directory
4009+ if not self.parent:
4010+ val = os.sep == '/' and os.sep or ''
4011+ elif not self.parent.name: # root
4012+ val = (os.sep == '/' and os.sep or '') + self.name
4013+ else:
4014+ val = self.parent.abspath() + os.sep + self.name
4015+ else:
4016+ # build directory
4017+ val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
4018+ self.__class__.bld.cache_node_abspath[variant][self.id] = val
4019+ return val
4020+
4021+ def change_ext(self, ext):
4022+ "node of the same path, but with a different extension - hot zone so do not touch"
4023+ name = self.name
4024+ k = name.rfind('.')
4025+ if k >= 0:
4026+ name = name[:k] + ext
4027+ else:
4028+ name = name + ext
4029+
4030+ return self.parent.find_or_declare([name])
4031+
4032+ def src_dir(self, env):
4033+ "src path without the file name"
4034+ return self.parent.srcpath(env)
4035+
4036+ def bld_dir(self, env):
4037+ "build path without the file name"
4038+ return self.parent.bldpath(env)
4039+
4040+ def bld_base(self, env):
4041+ "build path without the extension: src/dir/foo(.cpp)"
4042+ s = os.path.splitext(self.name)[0]
4043+ return os.path.join(self.bld_dir(env), s)
4044+
4045+ def bldpath(self, env=None):
4046+ "path seen from the build dir default/src/foo.cpp"
4047+ if self.id & 3 == FILE:
4048+ return self.relpath_gen(self.__class__.bld.bldnode)
4049+ p = self.path_to_parent(self.__class__.bld.srcnode)
4050+ if p is not '':
4051+ return env.variant() + os.sep + p
4052+ return env.variant()
4053+
4054+ def srcpath(self, env=None):
4055+ "path in the srcdir from the build dir ../src/foo.cpp"
4056+ if self.id & 3 == BUILD:
4057+ return self.bldpath(env)
4058+ return self.relpath_gen(self.__class__.bld.bldnode)
4059+
4060+ def read(self, env):
4061+ "get the contents of a file, it is not used anywhere for the moment"
4062+ return Utils.readf(self.abspath(env))
4063+
4064+ def dir(self, env):
4065+ "scons-like"
4066+ return self.parent.abspath(env)
4067+
4068+ def file(self):
4069+ "scons-like"
4070+ return self.name
4071+
4072+ def file_base(self):
4073+ "scons-like"
4074+ return os.path.splitext(self.name)[0]
4075+
4076+ def suffix(self):
4077+ "scons-like - hot zone so do not touch"
4078+ k = max(0, self.name.rfind('.'))
4079+ return self.name[k:]
4080+
4081+ def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
4082+ """find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
4083+ bld_ctx = self.__class__.bld
4084+ bld_ctx.rescan(self)
4085+ for name in bld_ctx.cache_dir_contents[self.id]:
4086+ if accept_name(self, name):
4087+ node = self.find_resource(name)
4088+ if node:
4089+ if src and node.id & 3 == FILE:
4090+ yield node
4091+ else:
4092+ node = self.find_dir(name)
4093+ if node and node.id != bld_ctx.bldnode.id:
4094+ if dir:
4095+ yield node
4096+ if not is_prune(self, name):
4097+ if maxdepth:
4098+ for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
4099+ yield k
4100+ else:
4101+ if not is_prune(self, name):
4102+ node = self.find_resource(name)
4103+ if not node:
4104+ # not a file, it is a dir
4105+ node = self.find_dir(name)
4106+ if node and node.id != bld_ctx.bldnode.id:
4107+ if maxdepth:
4108+ for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
4109+ yield k
4110+
4111+ if bld:
4112+ for node in self.childs.values():
4113+ if node.id == bld_ctx.bldnode.id:
4114+ continue
4115+ if node.id & 3 == BUILD:
4116+ if accept_name(self, node.name):
4117+ yield node
4118+ raise StopIteration
4119+
4120+ def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
4121+ """find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
4122+
4123+ if not (src or bld or dir):
4124+ raise StopIteration
4125+
4126+ if self.id & 3 != DIR:
4127+ raise StopIteration
4128+
4129+ in_pat = Utils.to_list(in_pat)
4130+ ex_pat = Utils.to_list(ex_pat)
4131+ prune_pat = Utils.to_list(prune_pat)
4132+
4133+ def accept_name(node, name):
4134+ for pat in ex_pat:
4135+ if fnmatch.fnmatchcase(name, pat):
4136+ return False
4137+ for pat in in_pat:
4138+ if fnmatch.fnmatchcase(name, pat):
4139+ return True
4140+ return False
4141+
4142+ def is_prune(node, name):
4143+ for pat in prune_pat:
4144+ if fnmatch.fnmatchcase(name, pat):
4145+ return True
4146+ return False
4147+
4148+ ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
4149+ if flat:
4150+ return " ".join([x.relpath_gen(self) for x in ret])
4151+
4152+ return ret
4153+
4154+ def ant_glob(self, *k, **kw):
4155+ """
4156+ known gotcha: will enumerate the files, but only if the folder exists in the source directory
4157+ """
4158+
4159+ src=kw.get('src', 1)
4160+ bld=kw.get('bld', 0)
4161+ dir=kw.get('dir', 0)
4162+ excl = kw.get('excl', exclude_regs)
4163+ incl = k and k[0] or kw.get('incl', '**')
4164+
4165+ def to_pat(s):
4166+ lst = Utils.to_list(s)
4167+ ret = []
4168+ for x in lst:
4169+ x = x.replace('//', '/')
4170+ if x.endswith('/'):
4171+ x += '**'
4172+ lst2 = x.split('/')
4173+ accu = []
4174+ for k in lst2:
4175+ if k == '**':
4176+ accu.append(k)
4177+ else:
4178+ k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
4179+ k = '^%s$' % k
4180+ #print "pattern", k
4181+ accu.append(re.compile(k))
4182+ ret.append(accu)
4183+ return ret
4184+
4185+ def filtre(name, nn):
4186+ ret = []
4187+ for lst in nn:
4188+ if not lst:
4189+ pass
4190+ elif lst[0] == '**':
4191+ ret.append(lst)
4192+ if len(lst) > 1:
4193+ if lst[1].match(name):
4194+ ret.append(lst[2:])
4195+ else:
4196+ ret.append([])
4197+ elif lst[0].match(name):
4198+ ret.append(lst[1:])
4199+ return ret
4200+
4201+ def accept(name, pats):
4202+ nacc = filtre(name, pats[0])
4203+ nrej = filtre(name, pats[1])
4204+ if [] in nrej:
4205+ nacc = []
4206+ return [nacc, nrej]
4207+
4208+ def ant_iter(nodi, maxdepth=25, pats=[]):
4209+ nodi.__class__.bld.rescan(nodi)
4210+ tmp = list(nodi.__class__.bld.cache_dir_contents[nodi.id])
4211+ tmp.sort()
4212+ for name in tmp:
4213+ npats = accept(name, pats)
4214+ if npats and npats[0]:
4215+ accepted = [] in npats[0]
4216+ #print accepted, nodi, name
4217+
4218+ node = nodi.find_resource(name)
4219+ if node and accepted:
4220+ if src and node.id & 3 == FILE:
4221+ yield node
4222+ else:
4223+ node = nodi.find_dir(name)
4224+ if node and node.id != nodi.__class__.bld.bldnode.id:
4225+ if accepted and dir:
4226+ yield node
4227+ if maxdepth:
4228+ for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
4229+ yield k
4230+ if bld:
4231+ for node in nodi.childs.values():
4232+ if node.id == nodi.__class__.bld.bldnode.id:
4233+ continue
4234+ if node.id & 3 == BUILD:
4235+ npats = accept(node.name, pats)
4236+ if npats and npats[0] and [] in npats[0]:
4237+ yield node
4238+ raise StopIteration
4239+
4240+ ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
4241+
4242+ if kw.get('flat', True):
4243+ return " ".join([x.relpath_gen(self) for x in ret])
4244+
4245+ return ret
4246+
4247+ def update_build_dir(self, env=None):
4248+
4249+ if not env:
4250+ for env in bld.all_envs:
4251+ self.update_build_dir(env)
4252+ return
4253+
4254+ path = self.abspath(env)
4255+
4256+ lst = Utils.listdir(path)
4257+ try:
4258+ self.__class__.bld.cache_dir_contents[self.id].update(lst)
4259+ except KeyError:
4260+ self.__class__.bld.cache_dir_contents[self.id] = set(lst)
4261+ self.__class__.bld.cache_scanned_folders[self.id] = True
4262+
4263+ for k in lst:
4264+ npath = path + os.sep + k
4265+ st = os.stat(npath)
4266+ if stat.S_ISREG(st[stat.ST_MODE]):
4267+ ick = self.find_or_declare(k)
4268+ if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
4269+ self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
4270+ elif stat.S_ISDIR(st[stat.ST_MODE]):
4271+ child = self.find_dir(k)
4272+ if not child:
4273+ child = self.ensure_dir_node_from_path(k)
4274+ child.update_build_dir(env)
4275+
4276+
4277+class Nodu(Node):
4278+ pass
4279+
4280diff --git a/buildtools/wafadmin/Options.py b/buildtools/wafadmin/Options.py
4281new file mode 100644
4282index 0000000..c9ddcfe
4283--- /dev/null
4284+++ b/buildtools/wafadmin/Options.py
4285@@ -0,0 +1,288 @@
4286+#!/usr/bin/env python
4287+# encoding: utf-8
4288+# Scott Newton, 2005 (scottn)
4289+# Thomas Nagy, 2006 (ita)
4290+
4291+"Custom command-line options"
4292+
4293+import os, sys, imp, types, tempfile, optparse
4294+import Logs, Utils
4295+from Constants import *
4296+
4297+cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
4298+
4299+# TODO remove in waf 1.6 the following two
4300+commands = {}
4301+is_install = False
4302+
4303+options = {}
4304+arg_line = []
4305+launch_dir = ''
4306+tooldir = ''
4307+lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
4308+try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
4309+except KeyError: cache_global = ''
4310+platform = Utils.unversioned_sys_platform()
4311+conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
4312+
4313+remote_repo = ['http://waf.googlecode.com/svn/']
4314+"""remote directory for the plugins"""
4315+
4316+
4317+# Such a command-line should work: JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
4318+default_prefix = os.environ.get('PREFIX')
4319+if not default_prefix:
4320+ if platform == 'win32':
4321+ d = tempfile.gettempdir()
4322+ default_prefix = d[0].upper() + d[1:]
4323+ # win32 preserves the case, but gettempdir does not
4324+ else: default_prefix = '/usr/local/'
4325+
4326+default_jobs = os.environ.get('JOBS', -1)
4327+if default_jobs < 1:
4328+ try:
4329+ if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
4330+ default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
4331+ else:
4332+ default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
4333+ except:
4334+ if os.name == 'java': # platform.system() == 'Java'
4335+ from java.lang import Runtime
4336+ default_jobs = Runtime.getRuntime().availableProcessors()
4337+ else:
4338+ # environment var defined on win32
4339+ default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
4340+
4341+default_destdir = os.environ.get('DESTDIR', '')
4342+
4343+def get_usage(self):
4344+ cmds_str = []
4345+ module = Utils.g_module
4346+ if module:
4347+ # create the help messages for commands
4348+ tbl = module.__dict__
4349+ keys = list(tbl.keys())
4350+ keys.sort()
4351+
4352+ if 'build' in tbl:
4353+ if not module.build.__doc__:
4354+ module.build.__doc__ = 'builds the project'
4355+ if 'configure' in tbl:
4356+ if not module.configure.__doc__:
4357+ module.configure.__doc__ = 'configures the project'
4358+
4359+ ban = ['set_options', 'init', 'shutdown']
4360+
4361+ optlst = [x for x in keys if not x in ban
4362+ and type(tbl[x]) is type(parse_args_impl)
4363+ and tbl[x].__doc__
4364+ and not x.startswith('_')]
4365+
4366+ just = max([len(x) for x in optlst])
4367+
4368+ for x in optlst:
4369+ cmds_str.append(' %s: %s' % (x.ljust(just), tbl[x].__doc__))
4370+ ret = '\n'.join(cmds_str)
4371+ else:
4372+ ret = ' '.join(cmds)
4373+ return '''waf [command] [options]
4374+
4375+Main commands (example: ./waf build -j4)
4376+%s
4377+''' % ret
4378+
4379+
4380+setattr(optparse.OptionParser, 'get_usage', get_usage)
4381+
4382+def create_parser(module=None):
4383+ Logs.debug('options: create_parser is called')
4384+ parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
4385+
4386+ parser.formatter.width = Utils.get_term_cols()
4387+ p = parser.add_option
4388+
4389+ p('-j', '--jobs',
4390+ type = 'int',
4391+ default = default_jobs,
4392+ help = 'amount of parallel jobs (%r)' % default_jobs,
4393+ dest = 'jobs')
4394+
4395+ p('-k', '--keep',
4396+ action = 'store_true',
4397+ default = False,
4398+ help = 'keep running happily on independent task groups',
4399+ dest = 'keep')
4400+
4401+ p('-v', '--verbose',
4402+ action = 'count',
4403+ default = 0,
4404+ help = 'verbosity level -v -vv or -vvv [default: 0]',
4405+ dest = 'verbose')
4406+
4407+ p('--nocache',
4408+ action = 'store_true',
4409+ default = False,
4410+ help = 'ignore the WAFCACHE (if set)',
4411+ dest = 'nocache')
4412+
4413+ p('--zones',
4414+ action = 'store',
4415+ default = '',
4416+ help = 'debugging zones (task_gen, deps, tasks, etc)',
4417+ dest = 'zones')
4418+
4419+ p('-p', '--progress',
4420+ action = 'count',
4421+ default = 0,
4422+ help = '-p: progress bar; -pp: ide output',
4423+ dest = 'progress_bar')
4424+
4425+ p('--targets',
4426+ action = 'store',
4427+ default = '',
4428+ help = 'build given task generators, e.g. "target1,target2"',
4429+ dest = 'compile_targets')
4430+
4431+ gr = optparse.OptionGroup(parser, 'configuration options')
4432+ parser.add_option_group(gr)
4433+ gr.add_option('-b', '--blddir',
4434+ action = 'store',
4435+ default = '',
4436+ help = 'out dir for the project (configuration)',
4437+ dest = 'blddir')
4438+ gr.add_option('-s', '--srcdir',
4439+ action = 'store',
4440+ default = '',
4441+ help = 'top dir for the project (configuration)',
4442+ dest = 'srcdir')
4443+ gr.add_option('--prefix',
4444+ help = 'installation prefix (configuration) [default: %r]' % default_prefix,
4445+ default = default_prefix,
4446+ dest = 'prefix')
4447+
4448+ gr.add_option('--download',
4449+ action = 'store_true',
4450+ default = False,
4451+ help = 'try to download the tools if missing',
4452+ dest = 'download')
4453+
4454+ gr = optparse.OptionGroup(parser, 'installation options')
4455+ parser.add_option_group(gr)
4456+ gr.add_option('--destdir',
4457+ help = 'installation root [default: %r]' % default_destdir,
4458+ default = default_destdir,
4459+ dest = 'destdir')
4460+ gr.add_option('-f', '--force',
4461+ action = 'store_true',
4462+ default = False,
4463+ help = 'force file installation',
4464+ dest = 'force')
4465+
4466+ return parser
4467+
4468+def parse_args_impl(parser, _args=None):
4469+ global options, commands, arg_line
4470+ (options, args) = parser.parse_args(args=_args)
4471+
4472+ arg_line = args
4473+ #arg_line = args[:] # copy
4474+
4475+ # By default, 'waf' is equivalent to 'waf build'
4476+ commands = {}
4477+ for var in cmds: commands[var] = 0
4478+ if not args:
4479+ commands['build'] = 1
4480+ args.append('build')
4481+
4482+ # Parse the command arguments
4483+ for arg in args:
4484+ commands[arg] = True
4485+
4486+ # the check thing depends on the build
4487+ if 'check' in args:
4488+ idx = args.index('check')
4489+ try:
4490+ bidx = args.index('build')
4491+ if bidx > idx:
4492+ raise ValueError('build before check')
4493+ except ValueError, e:
4494+ args.insert(idx, 'build')
4495+
4496+ if args[0] != 'init':
4497+ args.insert(0, 'init')
4498+
4499+ # TODO -k => -j0
4500+ if options.keep: options.jobs = 1
4501+ if options.jobs < 1: options.jobs = 1
4502+
4503+ if 'install' in sys.argv or 'uninstall' in sys.argv:
4504+ # absolute path only if set
4505+ options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
4506+
4507+ Logs.verbose = options.verbose
4508+ Logs.init_log()
4509+
4510+ if options.zones:
4511+ Logs.zones = options.zones.split(',')
4512+ if not Logs.verbose: Logs.verbose = 1
4513+ elif Logs.verbose > 0:
4514+ Logs.zones = ['runner']
4515+ if Logs.verbose > 2:
4516+ Logs.zones = ['*']
4517+
4518+# TODO waf 1.6
4519+# 1. rename the class to OptionsContext
4520+# 2. instead of a class attribute, use a module (static 'parser')
4521+# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
4522+
4523+class Handler(Utils.Context):
4524+ """loads wscript modules in folders for adding options
4525+ This class should be named 'OptionsContext'
4526+ A method named 'recurse' is bound when used by the module Scripting"""
4527+
4528+ parser = None
4529+ # make it possible to access the reference, like Build.bld
4530+
4531+ def __init__(self, module=None):
4532+ self.parser = create_parser(module)
4533+ self.cwd = os.getcwd()
4534+ Handler.parser = self
4535+
4536+ def add_option(self, *k, **kw):
4537+ self.parser.add_option(*k, **kw)
4538+
4539+ def add_option_group(self, *k, **kw):
4540+ return self.parser.add_option_group(*k, **kw)
4541+
4542+ def get_option_group(self, opt_str):
4543+ return self.parser.get_option_group(opt_str)
4544+
4545+ def sub_options(self, *k, **kw):
4546+ if not k: raise Utils.WscriptError('folder expected')
4547+ self.recurse(k[0], name='set_options')
4548+
4549+ def tool_options(self, *k, **kw):
4550+ Utils.python_24_guard()
4551+
4552+ if not k[0]:
4553+ raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
4554+ tools = Utils.to_list(k[0])
4555+
4556+ # TODO waf 1.6 remove the global variable tooldir
4557+ path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
4558+
4559+ for tool in tools:
4560+ tool = tool.replace('++', 'xx')
4561+ if tool == 'java': tool = 'javaw'
4562+ if tool.lower() == 'unittest': tool = 'unittestw'
4563+ module = Utils.load_tool(tool, path)
4564+ try:
4565+ fun = module.set_options
4566+ except AttributeError:
4567+ pass
4568+ else:
4569+ fun(kw.get('option_group', self))
4570+
4571+ def parse_args(self, args=None):
4572+ parse_args_impl(self.parser, args)
4573+
4574diff --git a/buildtools/wafadmin/Runner.py b/buildtools/wafadmin/Runner.py
4575new file mode 100644
4576index 0000000..94db0fb
4577--- /dev/null
4578+++ b/buildtools/wafadmin/Runner.py
4579@@ -0,0 +1,236 @@
4580+#!/usr/bin/env python
4581+# encoding: utf-8
4582+# Thomas Nagy, 2005-2008 (ita)
4583+
4584+"Execute the tasks"
4585+
4586+import os, sys, random, time, threading, traceback
4587+try: from Queue import Queue
4588+except ImportError: from queue import Queue
4589+import Build, Utils, Logs, Options
4590+from Logs import debug, error
4591+from Constants import *
4592+
4593+GAP = 15
4594+
4595+run_old = threading.Thread.run
4596+def run(*args, **kwargs):
4597+ try:
4598+ run_old(*args, **kwargs)
4599+ except (KeyboardInterrupt, SystemExit):
4600+ raise
4601+ except:
4602+ sys.excepthook(*sys.exc_info())
4603+threading.Thread.run = run
4604+
4605+def process_task(tsk):
4606+
4607+ m = tsk.master
4608+ if m.stop:
4609+ m.out.put(tsk)
4610+ return
4611+
4612+ try:
4613+ tsk.generator.bld.printout(tsk.display())
4614+ if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
4615+ # actual call to task's run() function
4616+ else: ret = tsk.call_run()
4617+ except Exception, e:
4618+ tsk.err_msg = Utils.ex_stack()
4619+ tsk.hasrun = EXCEPTION
4620+
4621+ # TODO cleanup
4622+ m.error_handler(tsk)
4623+ m.out.put(tsk)
4624+ return
4625+
4626+ if ret:
4627+ tsk.err_code = ret
4628+ tsk.hasrun = CRASHED
4629+ else:
4630+ try:
4631+ tsk.post_run()
4632+ except Utils.WafError:
4633+ pass
4634+ except Exception:
4635+ tsk.err_msg = Utils.ex_stack()
4636+ tsk.hasrun = EXCEPTION
4637+ else:
4638+ tsk.hasrun = SUCCESS
4639+ if tsk.hasrun != SUCCESS:
4640+ m.error_handler(tsk)
4641+
4642+ m.out.put(tsk)
4643+
4644+class TaskConsumer(threading.Thread):
4645+ ready = Queue(0)
4646+ consumers = []
4647+
4648+ def __init__(self):
4649+ threading.Thread.__init__(self)
4650+ self.setDaemon(1)
4651+ self.start()
4652+
4653+ def run(self):
4654+ try:
4655+ self.loop()
4656+ except:
4657+ pass
4658+
4659+ def loop(self):
4660+ while 1:
4661+ tsk = TaskConsumer.ready.get()
4662+ process_task(tsk)
4663+
4664+class Parallel(object):
4665+ """
4666+ keep the consumer threads busy, and avoid consuming cpu cycles
4667+ when no more tasks can be added (end of the build, etc)
4668+ """
4669+ def __init__(self, bld, j=2):
4670+
4671+ # number of consumers
4672+ self.numjobs = j
4673+
4674+ self.manager = bld.task_manager
4675+ self.manager.current_group = 0
4676+
4677+ self.total = self.manager.total()
4678+
4679+ # tasks waiting to be processed - IMPORTANT
4680+ self.outstanding = []
4681+ self.maxjobs = MAXJOBS
4682+
4683+ # tasks that are awaiting for another task to complete
4684+ self.frozen = []
4685+
4686+ # tasks returned by the consumers
4687+ self.out = Queue(0)
4688+
4689+ self.count = 0 # tasks not in the producer area
4690+
4691+ self.processed = 1 # progress indicator
4692+
4693+ self.stop = False # error condition to stop the build
4694+ self.error = False # error flag
4695+
4696+ def get_next(self):
4697+ "override this method to schedule the tasks in a particular order"
4698+ if not self.outstanding:
4699+ return None
4700+ return self.outstanding.pop(0)
4701+
4702+ def postpone(self, tsk):
4703+ "override this method to schedule the tasks in a particular order"
4704+ # TODO consider using a deque instead
4705+ if random.randint(0, 1):
4706+ self.frozen.insert(0, tsk)
4707+ else:
4708+ self.frozen.append(tsk)
4709+
4710+ def refill_task_list(self):
4711+ "called to set the next group of tasks"
4712+
4713+ while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
4714+ self.get_out()
4715+
4716+ while not self.outstanding:
4717+ if self.count:
4718+ self.get_out()
4719+
4720+ if self.frozen:
4721+ self.outstanding += self.frozen
4722+ self.frozen = []
4723+ elif not self.count:
4724+ (jobs, tmp) = self.manager.get_next_set()
4725+ if jobs != None: self.maxjobs = jobs
4726+ if tmp: self.outstanding += tmp
4727+ break
4728+
4729+ def get_out(self):
4730+ "the tasks that are put to execute are all collected using get_out"
4731+ ret = self.out.get()
4732+ self.manager.add_finished(ret)
4733+ if not self.stop and getattr(ret, 'more_tasks', None):
4734+ self.outstanding += ret.more_tasks
4735+ self.total += len(ret.more_tasks)
4736+ self.count -= 1
4737+
4738+ def error_handler(self, tsk):
4739+ "by default, errors make the build stop (not thread safe so be careful)"
4740+ if not Options.options.keep:
4741+ self.stop = True
4742+ self.error = True
4743+
4744+ def start(self):
4745+ "execute the tasks"
4746+
4747+ if TaskConsumer.consumers:
4748+ # the worker pool is usually loaded lazily (see below)
4749+ # in case it is re-used with a different value of numjobs:
4750+ while len(TaskConsumer.consumers) < self.numjobs:
4751+ TaskConsumer.consumers.append(TaskConsumer())
4752+
4753+ while not self.stop:
4754+
4755+ self.refill_task_list()
4756+
4757+ # consider the next task
4758+ tsk = self.get_next()
4759+ if not tsk:
4760+ if self.count:
4761+ # tasks may add new ones after they are run
4762+ continue
4763+ else:
4764+ # no tasks to run, no tasks running, time to exit
4765+ break
4766+
4767+ if tsk.hasrun:
4768+ # if the task is marked as "run", just skip it
4769+ self.processed += 1
4770+ self.manager.add_finished(tsk)
4771+ continue
4772+
4773+ try:
4774+ st = tsk.runnable_status()
4775+ except Exception, e:
4776+ self.processed += 1
4777+ if self.stop and not Options.options.keep:
4778+ tsk.hasrun = SKIPPED
4779+ self.manager.add_finished(tsk)
4780+ continue
4781+ self.error_handler(tsk)
4782+ self.manager.add_finished(tsk)
4783+ tsk.hasrun = EXCEPTION
4784+ tsk.err_msg = Utils.ex_stack()
4785+ continue
4786+
4787+ if st == ASK_LATER:
4788+ self.postpone(tsk)
4789+ elif st == SKIP_ME:
4790+ self.processed += 1
4791+ tsk.hasrun = SKIPPED
4792+ self.manager.add_finished(tsk)
4793+ else:
4794+ # run me: put the task in ready queue
4795+ tsk.position = (self.processed, self.total)
4796+ self.count += 1
4797+ tsk.master = self
4798+ self.processed += 1
4799+
4800+ if self.numjobs == 1:
4801+ process_task(tsk)
4802+ else:
4803+ TaskConsumer.ready.put(tsk)
4804+ # create the consumer threads only if there is something to consume
4805+ if not TaskConsumer.consumers:
4806+ TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
4807+
4808+ # self.count represents the tasks that have been made available to the consumer threads
4809+ # collect all the tasks after an error else the message may be incomplete
4810+ while self.error and self.count:
4811+ self.get_out()
4812+
4813+ #print loop
4814+ assert (self.count == 0 or self.stop)
4815+
4816diff --git a/buildtools/wafadmin/Scripting.py b/buildtools/wafadmin/Scripting.py
4817new file mode 100644
4818index 0000000..d975bd9
4819--- /dev/null
4820+++ b/buildtools/wafadmin/Scripting.py
4821@@ -0,0 +1,586 @@
4822+#!/usr/bin/env python
4823+# encoding: utf-8
4824+# Thomas Nagy, 2005 (ita)
4825+
4826+"Module called for configuring, compiling and installing targets"
4827+
4828+import os, sys, shutil, traceback, datetime, inspect, errno
4829+
4830+import Utils, Configure, Build, Logs, Options, Environment, Task
4831+from Logs import error, warn, info
4832+from Constants import *
4833+
4834+g_gz = 'bz2'
4835+commands = []
4836+
4837+def prepare_impl(t, cwd, ver, wafdir):
4838+ Options.tooldir = [t]
4839+ Options.launch_dir = cwd
4840+
4841+ # some command-line options can be processed immediately
4842+ if '--version' in sys.argv:
4843+ opt_obj = Options.Handler()
4844+ opt_obj.curdir = cwd
4845+ opt_obj.parse_args()
4846+ sys.exit(0)
4847+
4848+ # now find the wscript file
4849+ msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
4850+
4851+ # in theory projects can be configured in an autotool-like manner:
4852+ # mkdir build && cd build && ../waf configure && ../waf
4853+ build_dir_override = None
4854+ candidate = None
4855+
4856+ lst = os.listdir(cwd)
4857+
4858+ search_for_candidate = True
4859+ if WSCRIPT_FILE in lst:
4860+ candidate = cwd
4861+
4862+ elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
4863+ # autotool-like configuration
4864+ calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
4865+ if WSCRIPT_FILE in os.listdir(calldir):
4866+ candidate = calldir
4867+ search_for_candidate = False
4868+ else:
4869+ error('arg[0] directory does not contain a wscript file')
4870+ sys.exit(1)
4871+ build_dir_override = cwd
4872+
4873+ # climb up to find a script if it is not found
4874+ while search_for_candidate:
4875+ if len(cwd) <= 3:
4876+ break # stop at / or c:
4877+ dirlst = os.listdir(cwd)
4878+ if WSCRIPT_FILE in dirlst:
4879+ candidate = cwd
4880+ if 'configure' in sys.argv and candidate:
4881+ break
4882+ if Options.lockfile in dirlst:
4883+ env = Environment.Environment()
4884+ try:
4885+ env.load(os.path.join(cwd, Options.lockfile))
4886+ except:
4887+ error('could not load %r' % Options.lockfile)
4888+ try:
4889+ os.stat(env['cwd'])
4890+ except:
4891+ candidate = cwd
4892+ else:
4893+ candidate = env['cwd']
4894+ break
4895+ cwd = os.path.dirname(cwd) # climb up
4896+
4897+ if not candidate:
4898+ # check if the user only wanted to display the help
4899+ if '-h' in sys.argv or '--help' in sys.argv:
4900+ warn('No wscript file found: the help message may be incomplete')
4901+ opt_obj = Options.Handler()
4902+ opt_obj.curdir = cwd
4903+ opt_obj.parse_args()
4904+ else:
4905+ error(msg1)
4906+ sys.exit(0)
4907+
4908+ # We have found wscript, but there is no guarantee that it is valid
4909+ try:
4910+ os.chdir(candidate)
4911+ except OSError:
4912+ raise Utils.WafError("the folder %r is unreadable" % candidate)
4913+
4914+ # define the main module containing the functions init, shutdown, ..
4915+ Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
4916+
4917+ if build_dir_override:
4918+ d = getattr(Utils.g_module, BLDDIR, None)
4919+ if d:
4920+ # test if user has set the blddir in wscript.
4921+ msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
4922+ warn(msg)
4923+ Utils.g_module.blddir = build_dir_override
4924+
4925+ # bind a few methods and classes by default
4926+
4927+ def set_def(obj, name=''):
4928+ n = name or obj.__name__
4929+ if not n in Utils.g_module.__dict__:
4930+ setattr(Utils.g_module, n, obj)
4931+
4932+ for k in [dist, distclean, distcheck, clean, install, uninstall]:
4933+ set_def(k)
4934+
4935+ set_def(Configure.ConfigurationContext, 'configure_context')
4936+
4937+ for k in ['build', 'clean', 'install', 'uninstall']:
4938+ set_def(Build.BuildContext, k + '_context')
4939+
4940+ # now parse the options from the user wscript file
4941+ opt_obj = Options.Handler(Utils.g_module)
4942+ opt_obj.curdir = candidate
4943+ try:
4944+ f = Utils.g_module.set_options
4945+ except AttributeError:
4946+ pass
4947+ else:
4948+ opt_obj.sub_options([''])
4949+ opt_obj.parse_args()
4950+
4951+ if not 'init' in Utils.g_module.__dict__:
4952+ Utils.g_module.init = Utils.nada
4953+ if not 'shutdown' in Utils.g_module.__dict__:
4954+ Utils.g_module.shutdown = Utils.nada
4955+
4956+ main()
4957+
4958+def prepare(t, cwd, ver, wafdir):
4959+ if WAFVERSION != ver:
4960+ msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
4961+ print('\033[91mError: %s\033[0m' % msg)
4962+ sys.exit(1)
4963+
4964+ #"""
4965+ try:
4966+ prepare_impl(t, cwd, ver, wafdir)
4967+ except Utils.WafError, e:
4968+ error(str(e))
4969+ sys.exit(1)
4970+ except KeyboardInterrupt:
4971+ Utils.pprint('RED', 'Interrupted')
4972+ sys.exit(68)
4973+ """
4974+ import cProfile, pstats
4975+ cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
4976+ {'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
4977+ 'profi.txt')
4978+ p = pstats.Stats('profi.txt')
4979+ p.sort_stats('time').print_stats(45)
4980+ #"""
4981+
4982+def main():
4983+ global commands
4984+ commands = Options.arg_line[:]
4985+
4986+ while commands:
4987+ x = commands.pop(0)
4988+
4989+ ini = datetime.datetime.now()
4990+ if x == 'configure':
4991+ fun = configure
4992+ elif x == 'build':
4993+ fun = build
4994+ else:
4995+ fun = getattr(Utils.g_module, x, None)
4996+
4997+ if not fun:
4998+ raise Utils.WscriptError('No such command %r' % x)
4999+
5000+ ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
5001+
5002+ if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
5003+ # compatibility TODO remove in waf 1.6
5004+ try:
5005+ fun(ctx)
5006+ except TypeError:
5007+ fun()
5008+ else:
5009+ fun(ctx)
5010+
5011+ ela = ''
5012+ if not Options.options.progress_bar:
5013+ ela = ' (%s)' % Utils.get_elapsed_time(ini)
5014+
5015+ if x != 'init' and x != 'shutdown':
5016+ info('%r finished successfully%s' % (x, ela))
5017+
5018+ if not commands and x != 'shutdown':
5019+ commands.append('shutdown')
5020+
5021+def configure(conf):
5022+
5023+ src = getattr(Options.options, SRCDIR, None)
5024+ if not src: src = getattr(Utils.g_module, SRCDIR, None)
5025+ if not src: src = getattr(Utils.g_module, 'top', None)
5026+ if not src:
5027+ src = '.'
5028+ incomplete_src = 1
5029+ src = os.path.abspath(src)
5030+
5031+ bld = getattr(Options.options, BLDDIR, None)
5032+ if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
5033+ if not bld: bld = getattr(Utils.g_module, 'out', None)
5034+ if not bld:
5035+ bld = 'build'
5036+ incomplete_bld = 1
5037+ if bld == '.':
5038+ raise Utils.WafError('Setting blddir="." may cause distclean problems')
5039+ bld = os.path.abspath(bld)
5040+
5041+ try: os.makedirs(bld)
5042+ except OSError: pass
5043+
5044+ # It is not possible to compile specific targets in the configuration
5045+ # this may cause configuration errors if autoconfig is set
5046+ targets = Options.options.compile_targets
5047+ Options.options.compile_targets = None
5048+ Options.is_install = False
5049+
5050+ conf.srcdir = src
5051+ conf.blddir = bld
5052+ conf.post_init()
5053+
5054+ if 'incomplete_src' in vars():
5055+ conf.check_message_1('Setting srcdir to')
5056+ conf.check_message_2(src)
5057+ if 'incomplete_bld' in vars():
5058+ conf.check_message_1('Setting blddir to')
5059+ conf.check_message_2(bld)
5060+
5061+ # calling to main wscript's configure()
5062+ conf.sub_config([''])
5063+
5064+ conf.store()
5065+
5066+ # this will write a configure lock so that subsequent builds will
5067+ # consider the current path as the root directory (see prepare_impl).
5068+ # to remove: use 'waf distclean'
5069+ env = Environment.Environment()
5070+ env[BLDDIR] = bld
5071+ env[SRCDIR] = src
5072+ env['argv'] = sys.argv
5073+ env['commands'] = Options.commands
5074+ env['options'] = Options.options.__dict__
5075+
5076+ # conf.hash & conf.files hold wscript files paths and hash
5077+ # (used only by Configure.autoconfig)
5078+ env['hash'] = conf.hash
5079+ env['files'] = conf.files
5080+ env['environ'] = dict(conf.environ)
5081+ env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
5082+
5083+ if Utils.g_module.root_path != src:
5084+ # in case the source dir is somewhere else
5085+ env.store(os.path.join(src, Options.lockfile))
5086+
5087+ env.store(Options.lockfile)
5088+
5089+ Options.options.compile_targets = targets
5090+
5091+def clean(bld):
5092+ '''removes the build files'''
5093+ try:
5094+ proj = Environment.Environment(Options.lockfile)
5095+ except IOError:
5096+ raise Utils.WafError('Nothing to clean (project not configured)')
5097+
5098+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5099+ bld.load_envs()
5100+
5101+ bld.is_install = 0 # False
5102+
5103+ # read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
5104+ bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
5105+
5106+ try:
5107+ bld.clean()
5108+ finally:
5109+ bld.save()
5110+
5111+def check_configured(bld):
5112+ if not Configure.autoconfig:
5113+ return bld
5114+
5115+ conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
5116+ bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
5117+
5118+ def reconf(proj):
5119+ back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
5120+
5121+ Options.commands = proj['commands']
5122+ Options.options.__dict__ = proj['options']
5123+ conf = conf_cls()
5124+ conf.environ = proj['environ']
5125+ configure(conf)
5126+
5127+ (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
5128+
5129+ try:
5130+ proj = Environment.Environment(Options.lockfile)
5131+ except IOError:
5132+ conf = conf_cls()
5133+ configure(conf)
5134+ else:
5135+ try:
5136+ bld = bld_cls()
5137+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5138+ bld.load_envs()
5139+ except Utils.WafError:
5140+ reconf(proj)
5141+ return bld_cls()
5142+
5143+ try:
5144+ proj = Environment.Environment(Options.lockfile)
5145+ except IOError:
5146+ raise Utils.WafError('Auto-config: project does not configure (bug)')
5147+
5148+ h = 0
5149+ try:
5150+ for file in proj['files']:
5151+ if file.endswith('configure'):
5152+ h = hash((h, Utils.readf(file)))
5153+ else:
5154+ mod = Utils.load_module(file)
5155+ h = hash((h, mod.waf_hash_val))
5156+ except (OSError, IOError):
5157+ warn('Reconfiguring the project: a file is unavailable')
5158+ reconf(proj)
5159+ else:
5160+ if (h != proj['hash']):
5161+ warn('Reconfiguring the project: the configuration has changed')
5162+ reconf(proj)
5163+
5164+ return bld_cls()
5165+
5166+def install(bld):
5167+ '''installs the build files'''
5168+ bld = check_configured(bld)
5169+
5170+ Options.commands['install'] = True
5171+ Options.commands['uninstall'] = False
5172+ Options.is_install = True
5173+
5174+ bld.is_install = INSTALL
5175+
5176+ build_impl(bld)
5177+ bld.install()
5178+
5179+def uninstall(bld):
5180+ '''removes the installed files'''
5181+ Options.commands['install'] = False
5182+ Options.commands['uninstall'] = True
5183+ Options.is_install = True
5184+
5185+ bld.is_install = UNINSTALL
5186+
5187+ try:
5188+ def runnable_status(self):
5189+ return SKIP_ME
5190+ setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
5191+ setattr(Task.Task, 'runnable_status', runnable_status)
5192+
5193+ build_impl(bld)
5194+ bld.install()
5195+ finally:
5196+ setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
5197+
5198+def build(bld):
5199+ bld = check_configured(bld)
5200+
5201+ Options.commands['install'] = False
5202+ Options.commands['uninstall'] = False
5203+ Options.is_install = False
5204+
5205+ bld.is_install = 0 # False
5206+
5207+ return build_impl(bld)
5208+
5209+def build_impl(bld):
5210+ # compile the project and/or install the files
5211+ try:
5212+ proj = Environment.Environment(Options.lockfile)
5213+ except IOError:
5214+ raise Utils.WafError("Project not configured (run 'waf configure' first)")
5215+
5216+ bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
5217+ bld.load_envs()
5218+
5219+ info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
5220+ bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
5221+
5222+ # execute something immediately before the build starts
5223+ bld.pre_build()
5224+
5225+ try:
5226+ bld.compile()
5227+ finally:
5228+ if Options.options.progress_bar: print('')
5229+ info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
5230+
5231+ # execute something immediately after a successful build
5232+ bld.post_build()
5233+
5234+ bld.install()
5235+
5236+excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
5237+dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
5238+def dont_dist(name, src, build_dir):
5239+ global excludes, dist_exts
5240+
5241+ if (name.startswith(',,')
5242+ or name.startswith('++')
5243+ or name.startswith('.waf')
5244+ or (src == '.' and name == Options.lockfile)
5245+ or name in excludes
5246+ or name == build_dir
5247+ ):
5248+ return True
5249+
5250+ for ext in dist_exts:
5251+ if name.endswith(ext):
5252+ return True
5253+
5254+ return False
5255+
5256+# like shutil.copytree
5257+# exclude files and to raise exceptions immediately
5258+def copytree(src, dst, build_dir):
5259+ names = os.listdir(src)
5260+ os.makedirs(dst)
5261+ for name in names:
5262+ srcname = os.path.join(src, name)
5263+ dstname = os.path.join(dst, name)
5264+
5265+ if dont_dist(name, src, build_dir):
5266+ continue
5267+
5268+ if os.path.isdir(srcname):
5269+ copytree(srcname, dstname, build_dir)
5270+ else:
5271+ shutil.copy2(srcname, dstname)
5272+
5273+# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
5274+def distclean(ctx=None):
5275+ '''removes the build directory'''
5276+ global commands
5277+ lst = os.listdir('.')
5278+ for f in lst:
5279+ if f == Options.lockfile:
5280+ try:
5281+ proj = Environment.Environment(f)
5282+ except:
5283+ Logs.warn('could not read %r' % f)
5284+ continue
5285+
5286+ try:
5287+ shutil.rmtree(proj[BLDDIR])
5288+ except IOError:
5289+ pass
5290+ except OSError, e:
5291+ if e.errno != errno.ENOENT:
5292+ Logs.warn('project %r cannot be removed' % proj[BLDDIR])
5293+
5294+ try:
5295+ os.remove(f)
5296+ except OSError, e:
5297+ if e.errno != errno.ENOENT:
5298+ Logs.warn('file %r cannot be removed' % f)
5299+
5300+ # remove the local waf cache
5301+ if not commands and f.startswith('.waf'):
5302+ shutil.rmtree(f, ignore_errors=True)
5303+
5304+# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
5305+def dist(appname='', version=''):
5306+ '''makes a tarball for redistributing the sources'''
5307+ # return return (distdirname, tarballname)
5308+ import tarfile
5309+
5310+ if not appname: appname = Utils.g_module.APPNAME
5311+ if not version: version = Utils.g_module.VERSION
5312+
5313+ tmp_folder = appname + '-' + version
5314+ if g_gz in ['gz', 'bz2']:
5315+ arch_name = tmp_folder + '.tar.' + g_gz
5316+ else:
5317+ arch_name = tmp_folder + '.' + 'zip'
5318+
5319+ # remove the previous dir
5320+ try:
5321+ shutil.rmtree(tmp_folder)
5322+ except (OSError, IOError):
5323+ pass
5324+
5325+ # remove the previous archive
5326+ try:
5327+ os.remove(arch_name)
5328+ except (OSError, IOError):
5329+ pass
5330+
5331+ # copy the files into the temporary folder
5332+ blddir = getattr(Utils.g_module, BLDDIR, None)
5333+ if not blddir:
5334+ blddir = getattr(Utils.g_module, 'out', None)
5335+ copytree('.', tmp_folder, blddir)
5336+
5337+ # undocumented hook for additional cleanup
5338+ dist_hook = getattr(Utils.g_module, 'dist_hook', None)
5339+ if dist_hook:
5340+ back = os.getcwd()
5341+ os.chdir(tmp_folder)
5342+ try:
5343+ dist_hook()
5344+ finally:
5345+ # go back to the root directory
5346+ os.chdir(back)
5347+
5348+ if g_gz in ['gz', 'bz2']:
5349+ tar = tarfile.open(arch_name, 'w:' + g_gz)
5350+ tar.add(tmp_folder)
5351+ tar.close()
5352+ else:
5353+ Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
5354+
5355+ try: from hashlib import sha1 as sha
5356+ except ImportError: from sha import sha
5357+ try:
5358+ digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
5359+ except:
5360+ digest = ''
5361+
5362+ info('New archive created: %s%s' % (arch_name, digest))
5363+
5364+ if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
5365+ return arch_name
5366+
5367+# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
5368+def distcheck(appname='', version='', subdir=''):
5369+ '''checks if the sources compile (tarball from 'dist')'''
5370+ import tempfile, tarfile
5371+
5372+ if not appname: appname = Utils.g_module.APPNAME
5373+ if not version: version = Utils.g_module.VERSION
5374+
5375+ waf = os.path.abspath(sys.argv[0])
5376+ tarball = dist(appname, version)
5377+
5378+ path = appname + '-' + version
5379+
5380+ # remove any previous instance
5381+ if os.path.exists(path):
5382+ shutil.rmtree(path)
5383+
5384+ t = tarfile.open(tarball)
5385+ for x in t: t.extract(x)
5386+ t.close()
5387+
5388+ # build_path is the directory for the waf invocation
5389+ if subdir:
5390+ build_path = os.path.join(path, subdir)
5391+ else:
5392+ build_path = path
5393+
5394+ instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
5395+ ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
5396+ if ret:
5397+ raise Utils.WafError('distcheck failed with code %i' % ret)
5398+
5399+ if os.path.exists(instdir):
5400+ raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
5401+
5402+ shutil.rmtree(path)
5403+
5404+# FIXME remove in Waf 1.6 (kept for compatibility)
5405+def add_subdir(dir, bld):
5406+ bld.recurse(dir, 'build')
5407+
5408diff --git a/buildtools/wafadmin/Task.py b/buildtools/wafadmin/Task.py
5409new file mode 100644
5410index 0000000..5cda2ec
5411--- /dev/null
5412+++ b/buildtools/wafadmin/Task.py
5413@@ -0,0 +1,1200 @@
5414+#!/usr/bin/env python
5415+# encoding: utf-8
5416+# Thomas Nagy, 2005-2008 (ita)
5417+
5418+"""
5419+Running tasks in parallel is a simple problem, but in practice it is more complicated:
5420+* dependencies discovered during the build (dynamic task creation)
5421+* dependencies discovered after files are compiled
5422+* the amount of tasks and dependencies (graph size) can be huge
5423+
5424+This is why the dependency management is split on three different levels:
5425+1. groups of tasks that run all after another group of tasks
5426+2. groups of tasks that can be run in parallel
5427+3. tasks that can run in parallel, but with possible unknown ad-hoc dependencies
5428+
5429+The point #1 represents a strict sequential order between groups of tasks, for example a compiler is produced
5430+and used to compile the rest, whereas #2 and #3 represent partial order constraints where #2 applies to the kind of task
5431+and #3 applies to the task instances.
5432+
5433+#1 is held by the task manager: ordered list of TaskGroups (see bld.add_group)
5434+#2 is held by the task groups and the task types: precedence after/before (topological sort),
5435+ and the constraints extracted from file extensions
5436+#3 is held by the tasks individually (attribute run_after),
5437+ and the scheduler (Runner.py) use Task::runnable_status to reorder the tasks
5438+
5439+--
5440+
5441+To try, use something like this in your code:
5442+import Constants, Task
5443+Task.algotype = Constants.MAXPARALLEL
5444+
5445+--
5446+
5447+There are two concepts with the tasks (individual units of change):
5448+* dependency (if 1 is recompiled, recompile 2)
5449+* order (run 2 after 1)
5450+
5451+example 1: if t1 depends on t2 and t2 depends on t3 it is not necessary to make t1 depend on t3 (dependency is transitive)
5452+example 2: if t1 depends on a node produced by t2, it is not immediately obvious that t1 must run after t2 (order is not obvious)
5453+
5454+The role of the Task Manager is to give the tasks in order (groups of task that may be run in parallel one after the other)
5455+
5456+"""
5457+
5458+import os, shutil, sys, re, random, datetime, tempfile, shlex
5459+from Utils import md5
5460+import Build, Runner, Utils, Node, Logs, Options
5461+from Logs import debug, warn, error
5462+from Constants import *
5463+
5464+algotype = NORMAL
5465+#algotype = JOBCONTROL
5466+#algotype = MAXPARALLEL
5467+
5468+COMPILE_TEMPLATE_SHELL = '''
5469+def f(task):
5470+ env = task.env
5471+ wd = getattr(task, 'cwd', None)
5472+ p = env.get_flat
5473+ cmd = \'\'\' %s \'\'\' % s
5474+ return task.exec_command(cmd, cwd=wd)
5475+'''
5476+
5477+COMPILE_TEMPLATE_NOSHELL = '''
5478+def f(task):
5479+ env = task.env
5480+ wd = getattr(task, 'cwd', None)
5481+ def to_list(xx):
5482+ if isinstance(xx, str): return [xx]
5483+ return xx
5484+ lst = []
5485+ %s
5486+ lst = [x for x in lst if x]
5487+ return task.exec_command(lst, cwd=wd)
5488+'''
5489+
5490+
5491+"""
5492+Enable different kind of dependency algorithms:
5493+1 make groups: first compile all cpps and then compile all links (NORMAL)
5494+2 parallelize all (each link task run after its dependencies) (MAXPARALLEL)
5495+3 like 1 but provide additional constraints for the parallelization (MAXJOBS)
5496+
5497+In theory 1. will be faster than 2 for waf, but might be slower for builds
5498+The scheme 2 will not allow for running tasks one by one so it can cause disk thrashing on huge builds
5499+"""
5500+
5501+file_deps = Utils.nada
5502+"""
5503+Additional dependency pre-check may be added by replacing the function file_deps.
5504+e.g. extract_outputs, extract_deps below.
5505+"""
5506+
5507+class TaskManager(object):
5508+ """The manager is attached to the build object, it holds a list of TaskGroup"""
5509+ def __init__(self):
5510+ self.groups = []
5511+ self.tasks_done = []
5512+ self.current_group = 0
5513+ self.groups_names = {}
5514+
5515+ def group_name(self, g):
5516+ """name for the group g (utility)"""
5517+ if not isinstance(g, TaskGroup):
5518+ g = self.groups[g]
5519+ for x in self.groups_names:
5520+ if id(self.groups_names[x]) == id(g):
5521+ return x
5522+ return ''
5523+
5524+ def group_idx(self, tg):
5525+ """group the task generator tg is in"""
5526+ se = id(tg)
5527+ for i in range(len(self.groups)):
5528+ g = self.groups[i]
5529+ for t in g.tasks_gen:
5530+ if id(t) == se:
5531+ return i
5532+ return None
5533+
5534+ def get_next_set(self):
5535+ """return the next set of tasks to execute
5536+ the first parameter is the maximum amount of parallelization that may occur"""
5537+ ret = None
5538+ while not ret and self.current_group < len(self.groups):
5539+ ret = self.groups[self.current_group].get_next_set()
5540+ if ret: return ret
5541+ else:
5542+ self.groups[self.current_group].process_install()
5543+ self.current_group += 1
5544+ return (None, None)
5545+
5546+ def add_group(self, name=None, set=True):
5547+ #if self.groups and not self.groups[0].tasks:
5548+ # error('add_group: an empty group is already present')
5549+ g = TaskGroup()
5550+
5551+ if name and name in self.groups_names:
5552+ error('add_group: name %s already present' % name)
5553+ self.groups_names[name] = g
5554+ self.groups.append(g)
5555+ if set:
5556+ self.current_group = len(self.groups) - 1
5557+
5558+ def set_group(self, idx):
5559+ if isinstance(idx, str):
5560+ g = self.groups_names[idx]
5561+ for x in xrange(len(self.groups)):
5562+ if id(g) == id(self.groups[x]):
5563+ self.current_group = x
5564+ else:
5565+ self.current_group = idx
5566+
5567+ def add_task_gen(self, tgen):
5568+ if not self.groups: self.add_group()
5569+ self.groups[self.current_group].tasks_gen.append(tgen)
5570+
5571+ def add_task(self, task):
5572+ if not self.groups: self.add_group()
5573+ self.groups[self.current_group].tasks.append(task)
5574+
5575+ def total(self):
5576+ total = 0
5577+ if not self.groups: return 0
5578+ for group in self.groups:
5579+ total += len(group.tasks)
5580+ return total
5581+
5582+ def add_finished(self, tsk):
5583+ self.tasks_done.append(tsk)
5584+ bld = tsk.generator.bld
5585+ if bld.is_install:
5586+ f = None
5587+ if 'install' in tsk.__dict__:
5588+ f = tsk.__dict__['install']
5589+ # install=0 to prevent installation
5590+ if f: f(tsk)
5591+ else:
5592+ tsk.install()
5593+
5594+class TaskGroup(object):
5595+ "the compilation of one group does not begin until the previous group has finished (in the manager)"
5596+ def __init__(self):
5597+ self.tasks = [] # this list will be consumed
5598+ self.tasks_gen = []
5599+
5600+ self.cstr_groups = Utils.DefaultDict(list) # tasks having equivalent constraints
5601+ self.cstr_order = Utils.DefaultDict(set) # partial order between the cstr groups
5602+ self.temp_tasks = [] # tasks put on hold
5603+ self.ready = 0
5604+ self.post_funs = []
5605+
5606+ def reset(self):
5607+ "clears the state of the object (put back the tasks into self.tasks)"
5608+ for x in self.cstr_groups:
5609+ self.tasks += self.cstr_groups[x]
5610+ self.tasks = self.temp_tasks + self.tasks
5611+ self.temp_tasks = []
5612+ self.cstr_groups = Utils.DefaultDict(list)
5613+ self.cstr_order = Utils.DefaultDict(set)
5614+ self.ready = 0
5615+
5616+ def process_install(self):
5617+ for (f, k, kw) in self.post_funs:
5618+ f(*k, **kw)
5619+
5620+ def prepare(self):
5621+ "prepare the scheduling"
5622+ self.ready = 1
5623+ file_deps(self.tasks)
5624+ self.make_cstr_groups()
5625+ self.extract_constraints()
5626+
5627+ def get_next_set(self):
5628+ "next list of tasks to execute using max job settings, returns (maxjobs, task_list)"
5629+ global algotype
5630+ if algotype == NORMAL:
5631+ tasks = self.tasks_in_parallel()
5632+ maxj = MAXJOBS
5633+ elif algotype == JOBCONTROL:
5634+ (maxj, tasks) = self.tasks_by_max_jobs()
5635+ elif algotype == MAXPARALLEL:
5636+ tasks = self.tasks_with_inner_constraints()
5637+ maxj = MAXJOBS
5638+ else:
5639+ raise Utils.WafError("unknown algorithm type %s" % (algotype))
5640+
5641+ if not tasks: return ()
5642+ return (maxj, tasks)
5643+
5644+ def make_cstr_groups(self):
5645+ "unite the tasks that have similar constraints"
5646+ self.cstr_groups = Utils.DefaultDict(list)
5647+ for x in self.tasks:
5648+ h = x.hash_constraints()
5649+ self.cstr_groups[h].append(x)
5650+
5651+ def set_order(self, a, b):
5652+ self.cstr_order[a].add(b)
5653+
5654+ def compare_exts(self, t1, t2):
5655+ "extension production"
5656+ x = "ext_in"
5657+ y = "ext_out"
5658+ in_ = t1.attr(x, ())
5659+ out_ = t2.attr(y, ())
5660+ for k in in_:
5661+ if k in out_:
5662+ return -1
5663+ in_ = t2.attr(x, ())
5664+ out_ = t1.attr(y, ())
5665+ for k in in_:
5666+ if k in out_:
5667+ return 1
5668+ return 0
5669+
5670+ def compare_partial(self, t1, t2):
5671+ "partial relations after/before"
5672+ m = "after"
5673+ n = "before"
5674+ name = t2.__class__.__name__
5675+ if name in Utils.to_list(t1.attr(m, ())): return -1
5676+ elif name in Utils.to_list(t1.attr(n, ())): return 1
5677+ name = t1.__class__.__name__
5678+ if name in Utils.to_list(t2.attr(m, ())): return 1
5679+ elif name in Utils.to_list(t2.attr(n, ())): return -1
5680+ return 0
5681+
5682+ def extract_constraints(self):
5683+ "extract the parallelization constraints from the tasks with different constraints"
5684+ keys = self.cstr_groups.keys()
5685+ max = len(keys)
5686+ # hopefully the length of this list is short
5687+ for i in xrange(max):
5688+ t1 = self.cstr_groups[keys[i]][0]
5689+ for j in xrange(i + 1, max):
5690+ t2 = self.cstr_groups[keys[j]][0]
5691+
5692+ # add the constraints based on the comparisons
5693+ val = (self.compare_exts(t1, t2)
5694+ or self.compare_partial(t1, t2)
5695+ )
5696+ if val > 0:
5697+ self.set_order(keys[i], keys[j])
5698+ elif val < 0:
5699+ self.set_order(keys[j], keys[i])
5700+
5701+ def tasks_in_parallel(self):
5702+ "(NORMAL) next list of tasks that may be executed in parallel"
5703+
5704+ if not self.ready: self.prepare()
5705+
5706+ keys = self.cstr_groups.keys()
5707+
5708+ unconnected = []
5709+ remainder = []
5710+
5711+ for u in keys:
5712+ for k in self.cstr_order.values():
5713+ if u in k:
5714+ remainder.append(u)
5715+ break
5716+ else:
5717+ unconnected.append(u)
5718+
5719+ toreturn = []
5720+ for y in unconnected:
5721+ toreturn.extend(self.cstr_groups[y])
5722+
5723+ # remove stuff only after
5724+ for y in unconnected:
5725+ try: self.cstr_order.__delitem__(y)
5726+ except KeyError: pass
5727+ self.cstr_groups.__delitem__(y)
5728+
5729+ if not toreturn and remainder:
5730+ raise Utils.WafError("circular order constraint detected %r" % remainder)
5731+
5732+ return toreturn
5733+
5734+ def tasks_by_max_jobs(self):
5735+ "(JOBCONTROL) returns the tasks that can run in parallel with the max amount of jobs"
5736+ if not self.ready: self.prepare()
5737+ if not self.temp_tasks: self.temp_tasks = self.tasks_in_parallel()
5738+ if not self.temp_tasks: return (None, None)
5739+
5740+ maxjobs = MAXJOBS
5741+ ret = []
5742+ remaining = []
5743+ for t in self.temp_tasks:
5744+ m = getattr(t, "maxjobs", getattr(self.__class__, "maxjobs", MAXJOBS))
5745+ if m > maxjobs:
5746+ remaining.append(t)
5747+ elif m < maxjobs:
5748+ remaining += ret
5749+ ret = [t]
5750+ maxjobs = m
5751+ else:
5752+ ret.append(t)
5753+ self.temp_tasks = remaining
5754+ return (maxjobs, ret)
5755+
5756+ def tasks_with_inner_constraints(self):
5757+ """(MAXPARALLEL) returns all tasks in this group, but add the constraints on each task instance
5758+ as an optimization, it might be desirable to discard the tasks which do not have to run"""
5759+ if not self.ready: self.prepare()
5760+
5761+ if getattr(self, "done", None): return None
5762+
5763+ for p in self.cstr_order:
5764+ for v in self.cstr_order[p]:
5765+ for m in self.cstr_groups[p]:
5766+ for n in self.cstr_groups[v]:
5767+ n.set_run_after(m)
5768+ self.cstr_order = Utils.DefaultDict(set)
5769+ self.cstr_groups = Utils.DefaultDict(list)
5770+ self.done = 1
5771+ return self.tasks[:] # make a copy
5772+
5773+class store_task_type(type):
5774+ "store the task types that have a name ending in _task into a map (remember the existing task types)"
5775+ def __init__(cls, name, bases, dict):
5776+ super(store_task_type, cls).__init__(name, bases, dict)
5777+ name = cls.__name__
5778+
5779+ if name.endswith('_task'):
5780+ name = name.replace('_task', '')
5781+ if name != 'TaskBase':
5782+ TaskBase.classes[name] = cls
5783+
5784+class TaskBase(object):
5785+ """Base class for all Waf tasks
5786+
5787+ The most important methods are (by usual order of call):
5788+ 1 runnable_status: ask the task if it should be run, skipped, or if we have to ask later
5789+ 2 __str__: string to display to the user
5790+ 3 run: execute the task
5791+ 4 post_run: after the task is run, update the cache about the task
5792+
5793+ This class should be seen as an interface, it provides the very minimum necessary for the scheduler
5794+ so it does not do much.
5795+
5796+ For illustration purposes, TaskBase instances try to execute self.fun (if provided)
5797+ """
5798+
5799+ __metaclass__ = store_task_type
5800+
5801+ color = "GREEN"
5802+ maxjobs = MAXJOBS
5803+ classes = {}
5804+ stat = None
5805+
5806+ def __init__(self, *k, **kw):
5807+ self.hasrun = NOT_RUN
5808+
5809+ try:
5810+ self.generator = kw['generator']
5811+ except KeyError:
5812+ self.generator = self
5813+ self.bld = Build.bld
5814+
5815+ if kw.get('normal', 1):
5816+ self.generator.bld.task_manager.add_task(self)
5817+
5818+ def __repr__(self):
5819+ "used for debugging"
5820+ return '\n\t{task: %s %s}' % (self.__class__.__name__, str(getattr(self, "fun", "")))
5821+
5822+ def __str__(self):
5823+ "string to display to the user"
5824+ if hasattr(self, 'fun'):
5825+ return 'executing: %s\n' % self.fun.__name__
5826+ return self.__class__.__name__ + '\n'
5827+
5828+ def exec_command(self, *k, **kw):
5829+ "use this for executing commands from tasks"
5830+ # TODO in waf 1.6, eliminate bld.exec_command, and move the cwd processing to here
5831+ if self.env['env']:
5832+ kw['env'] = self.env['env']
5833+ return self.generator.bld.exec_command(*k, **kw)
5834+
5835+ def runnable_status(self):
5836+ "RUN_ME SKIP_ME or ASK_LATER"
5837+ return RUN_ME
5838+
5839+ def can_retrieve_cache(self):
5840+ return False
5841+
5842+ def call_run(self):
5843+ if self.can_retrieve_cache():
5844+ return 0
5845+ return self.run()
5846+
5847+ def run(self):
5848+ "called if the task must run"
5849+ if hasattr(self, 'fun'):
5850+ return self.fun(self)
5851+ return 0
5852+
5853+ def post_run(self):
5854+ "update the dependency tree (node stats)"
5855+ pass
5856+
5857+ def display(self):
5858+ "print either the description (using __str__) or the progress bar or the ide output"
5859+ col1 = Logs.colors(self.color)
5860+ col2 = Logs.colors.NORMAL
5861+
5862+ if Options.options.progress_bar == 1:
5863+ return self.generator.bld.progress_line(self.position[0], self.position[1], col1, col2)
5864+
5865+ if Options.options.progress_bar == 2:
5866+ ela = Utils.get_elapsed_time(self.generator.bld.ini)
5867+ try:
5868+ ins = ','.join([n.name for n in self.inputs])
5869+ except AttributeError:
5870+ ins = ''
5871+ try:
5872+ outs = ','.join([n.name for n in self.outputs])
5873+ except AttributeError:
5874+ outs = ''
5875+ return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (self.position[1], self.position[0], ins, outs, ela)
5876+
5877+ total = self.position[1]
5878+ n = len(str(total))
5879+ fs = '[%%%dd/%%%dd] %%s%%s%%s' % (n, n)
5880+ return fs % (self.position[0], self.position[1], col1, str(self), col2)
5881+
5882+ def attr(self, att, default=None):
5883+ "retrieve an attribute from the instance or from the class (microoptimization here)"
5884+ ret = getattr(self, att, self)
5885+ if ret is self: return getattr(self.__class__, att, default)
5886+ return ret
5887+
5888+ def hash_constraints(self):
5889+ "identify a task type for all the constraints relevant for the scheduler: precedence, file production"
5890+ a = self.attr
5891+ sum = hash((self.__class__.__name__,
5892+ str(a('before', '')),
5893+ str(a('after', '')),
5894+ str(a('ext_in', '')),
5895+ str(a('ext_out', '')),
5896+ self.__class__.maxjobs))
5897+ return sum
5898+
5899+ def format_error(self):
5900+ "error message to display to the user (when a build fails)"
5901+ if getattr(self, "err_msg", None):
5902+ return self.err_msg
5903+ elif self.hasrun == CRASHED:
5904+ try:
5905+ return " -> task failed (err #%d): %r" % (self.err_code, self)
5906+ except AttributeError:
5907+ return " -> task failed: %r" % self
5908+ elif self.hasrun == MISSING:
5909+ return " -> missing files: %r" % self
5910+ else:
5911+ return ''
5912+
5913+ def install(self):
5914+ """
5915+ installation is performed by looking at the task attributes:
5916+ * install_path: installation path like "${PREFIX}/bin"
5917+ * filename: install the first node in the outputs as a file with a particular name, be certain to give os.sep
5918+ * chmod: permissions
5919+ """
5920+ bld = self.generator.bld
5921+ d = self.attr('install')
5922+
5923+ if self.attr('install_path'):
5924+ lst = [a.relpath_gen(bld.srcnode) for a in self.outputs]
5925+ perm = self.attr('chmod', O644)
5926+ if self.attr('src'):
5927+ # if src is given, install the sources too
5928+ lst += [a.relpath_gen(bld.srcnode) for a in self.inputs]
5929+ if self.attr('filename'):
5930+ dir = self.install_path.rstrip(os.sep) + os.sep + self.attr('filename')
5931+ bld.install_as(dir, lst[0], self.env, perm)
5932+ else:
5933+ bld.install_files(self.install_path, lst, self.env, perm)
5934+
5935+class Task(TaskBase):
5936+ """The parent class is quite limited, in this version:
5937+ * file system interaction: input and output nodes
5938+ * persistence: do not re-execute tasks that have already run
5939+ * caching: same files can be saved and retrieved from a cache directory
5940+ * dependencies:
5941+ implicit, like .c files depending on .h files
5942+ explicit, like the input nodes or the dep_nodes
5943+ environment variables, like the CXXFLAGS in self.env
5944+ """
5945+ vars = []
5946+ def __init__(self, env, **kw):
5947+ TaskBase.__init__(self, **kw)
5948+ self.env = env
5949+
5950+ # inputs and outputs are nodes
5951+ # use setters when possible
5952+ self.inputs = []
5953+ self.outputs = []
5954+
5955+ self.dep_nodes = []
5956+ self.run_after = []
5957+
5958+ # Additionally, you may define the following
5959+ #self.dep_vars = 'PREFIX DATADIR'
5960+
5961+ def __str__(self):
5962+ "string to display to the user"
5963+ env = self.env
5964+ src_str = ' '.join([a.nice_path(env) for a in self.inputs])
5965+ tgt_str = ' '.join([a.nice_path(env) for a in self.outputs])
5966+ if self.outputs: sep = ' -> '
5967+ else: sep = ''
5968+ return '%s: %s%s%s\n' % (self.__class__.__name__.replace('_task', ''), src_str, sep, tgt_str)
5969+
5970+ def __repr__(self):
5971+ return "".join(['\n\t{task: ', self.__class__.__name__, " ", ",".join([x.name for x in self.inputs]), " -> ", ",".join([x.name for x in self.outputs]), '}'])
5972+
5973+ def unique_id(self):
5974+ "get a unique id: hash the node paths, the variant, the class, the function"
5975+ try:
5976+ return self.uid
5977+ except AttributeError:
5978+ "this is not a real hot zone, but we want to avoid surprizes here"
5979+ m = md5()
5980+ up = m.update
5981+ up(self.__class__.__name__)
5982+ up(self.env.variant())
5983+ p = None
5984+ for x in self.inputs + self.outputs:
5985+ if p != x.parent.id:
5986+ p = x.parent.id
5987+ up(x.parent.abspath())
5988+ up(x.name)
5989+ self.uid = m.digest()
5990+ return self.uid
5991+
5992+ def set_inputs(self, inp):
5993+ if isinstance(inp, list): self.inputs += inp
5994+ else: self.inputs.append(inp)
5995+
5996+ def set_outputs(self, out):
5997+ if isinstance(out, list): self.outputs += out
5998+ else: self.outputs.append(out)
5999+
6000+ def set_run_after(self, task):
6001+ "set (scheduler) order on another task"
6002+ # TODO: handle list or object
6003+ assert isinstance(task, TaskBase)
6004+ self.run_after.append(task)
6005+
6006+ def add_file_dependency(self, filename):
6007+ "TODO user-provided file dependencies"
6008+ node = self.generator.bld.path.find_resource(filename)
6009+ self.dep_nodes.append(node)
6010+
6011+ def signature(self):
6012+ # compute the result one time, and suppose the scan_signature will give the good result
6013+ try: return self.cache_sig[0]
6014+ except AttributeError: pass
6015+
6016+ self.m = md5()
6017+
6018+ # explicit deps
6019+ exp_sig = self.sig_explicit_deps()
6020+
6021+ # env vars
6022+ var_sig = self.sig_vars()
6023+
6024+ # implicit deps
6025+
6026+ imp_sig = SIG_NIL
6027+ if self.scan:
6028+ try:
6029+ imp_sig = self.sig_implicit_deps()
6030+ except ValueError:
6031+ return self.signature()
6032+
6033+ # we now have the signature (first element) and the details (for debugging)
6034+ ret = self.m.digest()
6035+ self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
6036+ return ret
6037+
6038+ def runnable_status(self):
6039+ "SKIP_ME RUN_ME or ASK_LATER"
6040+ #return 0 # benchmarking
6041+
6042+ if self.inputs and (not self.outputs):
6043+ if not getattr(self.__class__, 'quiet', None):
6044+ warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r" % self)
6045+
6046+ for t in self.run_after:
6047+ if not t.hasrun:
6048+ return ASK_LATER
6049+
6050+ env = self.env
6051+ bld = self.generator.bld
6052+
6053+ # first compute the signature
6054+ new_sig = self.signature()
6055+
6056+ # compare the signature to a signature computed previously
6057+ key = self.unique_id()
6058+ try:
6059+ prev_sig = bld.task_sigs[key][0]
6060+ except KeyError:
6061+ debug("task: task %r must run as it was never run before or the task code changed", self)
6062+ return RUN_ME
6063+
6064+ # compare the signatures of the outputs
6065+ for node in self.outputs:
6066+ variant = node.variant(env)
6067+ try:
6068+ if bld.node_sigs[variant][node.id] != new_sig:
6069+ return RUN_ME
6070+ except KeyError:
6071+ debug("task: task %r must run as the output nodes do not exist", self)
6072+ return RUN_ME
6073+
6074+ # debug if asked to
6075+ if Logs.verbose: self.debug_why(bld.task_sigs[key])
6076+
6077+ if new_sig != prev_sig:
6078+ return RUN_ME
6079+ return SKIP_ME
6080+
6081+ def post_run(self):
6082+ "called after a successful task run"
6083+ bld = self.generator.bld
6084+ env = self.env
6085+ sig = self.signature()
6086+ ssig = sig.encode('hex')
6087+
6088+ variant = env.variant()
6089+ for node in self.outputs:
6090+ # check if the node exists ..
6091+ try:
6092+ os.stat(node.abspath(env))
6093+ except OSError:
6094+ self.hasrun = MISSING
6095+ self.err_msg = '-> missing file: %r' % node.abspath(env)
6096+ raise Utils.WafError
6097+
6098+ # important, store the signature for the next run
6099+ bld.node_sigs[variant][node.id] = sig
6100+ bld.task_sigs[self.unique_id()] = self.cache_sig
6101+
6102+ # file caching, if possible
6103+ # try to avoid data corruption as much as possible
6104+ if not Options.cache_global or Options.options.nocache or not self.outputs:
6105+ return None
6106+
6107+ if getattr(self, 'cached', None):
6108+ return None
6109+
6110+ dname = os.path.join(Options.cache_global, ssig)
6111+ tmpdir = tempfile.mkdtemp(prefix=Options.cache_global + os.sep + 'waf')
6112+
6113+ try:
6114+ shutil.rmtree(dname)
6115+ except:
6116+ pass
6117+
6118+ try:
6119+ i = 0
6120+ for node in self.outputs:
6121+ variant = node.variant(env)
6122+ dest = os.path.join(tmpdir, str(i) + node.name)
6123+ shutil.copy2(node.abspath(env), dest)
6124+ i += 1
6125+ except (OSError, IOError):
6126+ try:
6127+ shutil.rmtree(tmpdir)
6128+ except:
6129+ pass
6130+ else:
6131+ try:
6132+ os.rename(tmpdir, dname)
6133+ except OSError:
6134+ try:
6135+ shutil.rmtree(tmpdir)
6136+ except:
6137+ pass
6138+ else:
6139+ try:
6140+ os.chmod(dname, O755)
6141+ except:
6142+ pass
6143+
6144+ def can_retrieve_cache(self):
6145+ """
6146+ Retrieve build nodes from the cache
6147+ update the file timestamps to help cleaning the least used entries from the cache
6148+ additionally, set an attribute 'cached' to avoid re-creating the same cache files
6149+
6150+ suppose there are files in cache/dir1/file1 and cache/dir2/file2
6151+ first, read the timestamp of dir1
6152+ then try to copy the files
6153+ then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
6154+ should an exception occur, ignore the data
6155+ """
6156+ if not Options.cache_global or Options.options.nocache or not self.outputs:
6157+ return None
6158+
6159+ env = self.env
6160+ sig = self.signature()
6161+ ssig = sig.encode('hex')
6162+
6163+ # first try to access the cache folder for the task
6164+ dname = os.path.join(Options.cache_global, ssig)
6165+ try:
6166+ t1 = os.stat(dname).st_mtime
6167+ except OSError:
6168+ return None
6169+
6170+ i = 0
6171+ for node in self.outputs:
6172+ variant = node.variant(env)
6173+
6174+ orig = os.path.join(dname, str(i) + node.name)
6175+ try:
6176+ shutil.copy2(orig, node.abspath(env))
6177+ # mark the cache file as used recently (modified)
6178+ os.utime(orig, None)
6179+ except (OSError, IOError):
6180+ debug('task: failed retrieving file')
6181+ return None
6182+ i += 1
6183+
6184+ # is it the same folder?
6185+ try:
6186+ t2 = os.stat(dname).st_mtime
6187+ except OSError:
6188+ return None
6189+
6190+ if t1 != t2:
6191+ return None
6192+
6193+ for node in self.outputs:
6194+ self.generator.bld.node_sigs[variant][node.id] = sig
6195+ if Options.options.progress_bar < 1:
6196+ self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
6197+
6198+ self.cached = True
6199+ return 1
6200+
6201+ def debug_why(self, old_sigs):
6202+ "explains why a task is run"
6203+
6204+ new_sigs = self.cache_sig
6205+ def v(x):
6206+ return x.encode('hex')
6207+
6208+ debug("Task %r", self)
6209+ msgs = ['Task must run', '* Source file or manual dependency', '* Implicit dependency', '* Environment variable']
6210+ tmp = 'task: -> %s: %s %s'
6211+ for x in xrange(len(msgs)):
6212+ if (new_sigs[x] != old_sigs[x]):
6213+ debug(tmp, msgs[x], v(old_sigs[x]), v(new_sigs[x]))
6214+
6215+ def sig_explicit_deps(self):
6216+ bld = self.generator.bld
6217+ up = self.m.update
6218+
6219+ # the inputs
6220+ for x in self.inputs + getattr(self, 'dep_nodes', []):
6221+ if not x.parent.id in bld.cache_scanned_folders:
6222+ bld.rescan(x.parent)
6223+
6224+ variant = x.variant(self.env)
6225+ try:
6226+ up(bld.node_sigs[variant][x.id])
6227+ except KeyError:
6228+ raise Utils.WafError('Missing node signature for %r (required by %r)' % (x, self))
6229+
6230+ # manual dependencies, they can slow down the builds
6231+ if bld.deps_man:
6232+ additional_deps = bld.deps_man
6233+ for x in self.inputs + self.outputs:
6234+ try:
6235+ d = additional_deps[x.id]
6236+ except KeyError:
6237+ continue
6238+
6239+ for v in d:
6240+ if isinstance(v, Node.Node):
6241+ bld.rescan(v.parent)
6242+ variant = v.variant(self.env)
6243+ try:
6244+ v = bld.node_sigs[variant][v.id]
6245+ except KeyError:
6246+ raise Utils.WafError('Missing node signature for %r (required by %r)' % (v, self))
6247+ elif hasattr(v, '__call__'):
6248+ v = v() # dependency is a function, call it
6249+ up(v)
6250+
6251+ for x in self.dep_nodes:
6252+ v = bld.node_sigs[x.variant(self.env)][x.id]
6253+ up(v)
6254+
6255+ return self.m.digest()
6256+
6257+ def sig_vars(self):
6258+ bld = self.generator.bld
6259+ env = self.env
6260+
6261+ # dependencies on the environment vars
6262+ act_sig = bld.hash_env_vars(env, self.__class__.vars)
6263+ self.m.update(act_sig)
6264+
6265+ # additional variable dependencies, if provided
6266+ dep_vars = getattr(self, 'dep_vars', None)
6267+ if dep_vars:
6268+ self.m.update(bld.hash_env_vars(env, dep_vars))
6269+
6270+ return self.m.digest()
6271+
6272+ #def scan(self, node):
6273+ # """this method returns a tuple containing:
6274+ # * a list of nodes corresponding to real files
6275+ # * a list of names for files not found in path_lst
6276+ # the input parameters may have more parameters that the ones used below
6277+ # """
6278+ # return ((), ())
6279+ scan = None
6280+
6281+ # compute the signature, recompute it if there is no match in the cache
6282+ def sig_implicit_deps(self):
6283+ "the signature obtained may not be the one if the files have changed, we do it in two steps"
6284+
6285+ bld = self.generator.bld
6286+
6287+ # get the task signatures from previous runs
6288+ key = self.unique_id()
6289+ prev_sigs = bld.task_sigs.get(key, ())
6290+ if prev_sigs:
6291+ try:
6292+ # for issue #379
6293+ if prev_sigs[2] == self.compute_sig_implicit_deps():
6294+ return prev_sigs[2]
6295+ except (KeyError, OSError):
6296+ pass
6297+ del bld.task_sigs[key]
6298+ raise ValueError('rescan')
6299+
6300+ # no previous run or the signature of the dependencies has changed, rescan the dependencies
6301+ (nodes, names) = self.scan()
6302+ if Logs.verbose:
6303+ debug('deps: scanner for %s returned %s %s', str(self), str(nodes), str(names))
6304+
6305+ # store the dependencies in the cache
6306+ bld.node_deps[key] = nodes
6307+ bld.raw_deps[key] = names
6308+
6309+ # recompute the signature and return it
6310+ try:
6311+ sig = self.compute_sig_implicit_deps()
6312+ except KeyError:
6313+ try:
6314+ nodes = []
6315+ for k in bld.node_deps.get(self.unique_id(), []):
6316+ if k.id & 3 == 2: # Node.FILE:
6317+ if not k.id in bld.node_sigs[0]:
6318+ nodes.append(k)
6319+ else:
6320+ if not k.id in bld.node_sigs[self.env.variant()]:
6321+ nodes.append(k)
6322+ except:
6323+ nodes = '?'
6324+ raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
6325+
6326+ return sig
6327+
6328+ def compute_sig_implicit_deps(self):
6329+ """it is intended for .cpp and inferred .h files
6330+ there is a single list (no tree traversal)
6331+ this is the hot spot so ... do not touch"""
6332+ upd = self.m.update
6333+
6334+ bld = self.generator.bld
6335+ tstamp = bld.node_sigs
6336+ env = self.env
6337+
6338+ for k in bld.node_deps.get(self.unique_id(), []):
6339+ # unlikely but necessary if it happens
6340+ if not k.parent.id in bld.cache_scanned_folders:
6341+ # if the parent folder is removed, an OSError may be thrown
6342+ bld.rescan(k.parent)
6343+
6344+ # if the parent folder is removed, a KeyError will be thrown
6345+ if k.id & 3 == 2: # Node.FILE:
6346+ upd(tstamp[0][k.id])
6347+ else:
6348+ upd(tstamp[env.variant()][k.id])
6349+
6350+ return self.m.digest()
6351+
6352+def funex(c):
6353+ dc = {}
6354+ exec(c, dc)
6355+ return dc['f']
6356+
6357+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})", re.M)
6358+def compile_fun_shell(name, line):
6359+ """Compiles a string (once) into a function, eg:
6360+ simple_task_type('c++', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
6361+
6362+ The env variables (CXX, ..) on the task must not hold dicts (order)
6363+ The reserved keywords TGT and SRC represent the task input and output nodes
6364+
6365+ quick test:
6366+ bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
6367+ """
6368+
6369+ extr = []
6370+ def repl(match):
6371+ g = match.group
6372+ if g('dollar'): return "$"
6373+ elif g('backslash'): return '\\\\'
6374+ elif g('subst'): extr.append((g('var'), g('code'))); return "%s"
6375+ return None
6376+
6377+ line = reg_act.sub(repl, line) or line
6378+
6379+ parm = []
6380+ dvars = []
6381+ app = parm.append
6382+ for (var, meth) in extr:
6383+ if var == 'SRC':
6384+ if meth: app('task.inputs%s' % meth)
6385+ else: app('" ".join([a.srcpath(env) for a in task.inputs])')
6386+ elif var == 'TGT':
6387+ if meth: app('task.outputs%s' % meth)
6388+ else: app('" ".join([a.bldpath(env) for a in task.outputs])')
6389+ else:
6390+ if not var in dvars: dvars.append(var)
6391+ app("p('%s')" % var)
6392+ if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
6393+ else: parm = ''
6394+
6395+ c = COMPILE_TEMPLATE_SHELL % (line, parm)
6396+
6397+ debug('action: %s', c)
6398+ return (funex(c), dvars)
6399+
6400+def compile_fun_noshell(name, line):
6401+
6402+ extr = []
6403+ def repl(match):
6404+ g = match.group
6405+ if g('dollar'): return "$"
6406+ elif g('subst'): extr.append((g('var'), g('code'))); return "<<|@|>>"
6407+ return None
6408+
6409+ line2 = reg_act.sub(repl, line)
6410+ params = line2.split('<<|@|>>')
6411+
6412+ buf = []
6413+ dvars = []
6414+ app = buf.append
6415+ for x in xrange(len(extr)):
6416+ params[x] = params[x].strip()
6417+ if params[x]:
6418+ app("lst.extend(%r)" % params[x].split())
6419+ (var, meth) = extr[x]
6420+ if var == 'SRC':
6421+ if meth: app('lst.append(task.inputs%s)' % meth)
6422+ else: app("lst.extend([a.srcpath(env) for a in task.inputs])")
6423+ elif var == 'TGT':
6424+ if meth: app('lst.append(task.outputs%s)' % meth)
6425+ else: app("lst.extend([a.bldpath(env) for a in task.outputs])")
6426+ else:
6427+ app('lst.extend(to_list(env[%r]))' % var)
6428+ if not var in dvars: dvars.append(var)
6429+
6430+ if params[-1]:
6431+ app("lst.extend(%r)" % shlex.split(params[-1]))
6432+
6433+ fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
6434+ debug('action: %s', fun)
6435+ return (funex(fun), dvars)
6436+
6437+def compile_fun(name, line, shell=None):
6438+ "commands can be launched by the shell or not"
6439+ if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
6440+ shell = True
6441+ #else:
6442+ # shell = False
6443+
6444+ if shell is None:
6445+ if sys.platform == 'win32':
6446+ shell = False
6447+ else:
6448+ shell = True
6449+
6450+ if shell:
6451+ return compile_fun_shell(name, line)
6452+ else:
6453+ return compile_fun_noshell(name, line)
6454+
6455+def simple_task_type(name, line, color='GREEN', vars=[], ext_in=[], ext_out=[], before=[], after=[], shell=None):
6456+ """return a new Task subclass with the function run compiled from the line given"""
6457+ (fun, dvars) = compile_fun(name, line, shell)
6458+ fun.code = line
6459+ return task_type_from_func(name, fun, vars or dvars, color, ext_in, ext_out, before, after)
6460+
6461+def task_type_from_func(name, func, vars=[], color='GREEN', ext_in=[], ext_out=[], before=[], after=[]):
6462+ """return a new Task subclass with the function run compiled from the line given"""
6463+ params = {
6464+ 'run': func,
6465+ 'vars': vars,
6466+ 'color': color,
6467+ 'name': name,
6468+ 'ext_in': Utils.to_list(ext_in),
6469+ 'ext_out': Utils.to_list(ext_out),
6470+ 'before': Utils.to_list(before),
6471+ 'after': Utils.to_list(after),
6472+ }
6473+
6474+ cls = type(Task)(name, (Task,), params)
6475+ TaskBase.classes[name] = cls
6476+ return cls
6477+
6478+def always_run(cls):
6479+ """Set all task instances of this class to be executed whenever a build is started
6480+ The task signature is calculated, but the result of the comparation between
6481+ task signatures is bypassed
6482+ """
6483+ old = cls.runnable_status
6484+ def always(self):
6485+ ret = old(self)
6486+ if ret == SKIP_ME:
6487+ return RUN_ME
6488+ return ret
6489+ cls.runnable_status = always
6490+
6491+def update_outputs(cls):
6492+ """When a command is always run, it is possible that the output only change
6493+ sometimes. By default the build node have as a hash the signature of the task
6494+ which may not change. With this, the output nodes (produced) are hashed,
6495+ and the hashes are set to the build nodes
6496+
6497+ This may avoid unnecessary recompilations, but it uses more resources
6498+ (hashing the output files) so it is not used by default
6499+ """
6500+ old_post_run = cls.post_run
6501+ def post_run(self):
6502+ old_post_run(self)
6503+ bld = self.generator.bld
6504+ for output in self.outputs:
6505+ bld.node_sigs[self.env.variant()][output.id] = Utils.h_file(output.abspath(self.env))
6506+ bld.task_sigs[output.id] = self.unique_id()
6507+ cls.post_run = post_run
6508+
6509+ old_runnable_status = cls.runnable_status
6510+ def runnable_status(self):
6511+ status = old_runnable_status(self)
6512+ if status != RUN_ME:
6513+ return status
6514+
6515+ uid = self.unique_id()
6516+ try:
6517+ bld = self.outputs[0].__class__.bld
6518+ new_sig = self.signature()
6519+ prev_sig = bld.task_sigs[uid][0]
6520+ if prev_sig == new_sig:
6521+ for x in self.outputs:
6522+ if not x.id in bld.node_sigs[self.env.variant()]:
6523+ return RUN_ME
6524+ if bld.task_sigs[x.id] != uid: # ensure the outputs are associated with *this* task
6525+ return RUN_ME
6526+ return SKIP_ME
6527+ except KeyError:
6528+ pass
6529+ except IndexError:
6530+ pass
6531+ return RUN_ME
6532+ cls.runnable_status = runnable_status
6533+
6534+def extract_outputs(tasks):
6535+ """file_deps: Infer additional dependencies from task input and output nodes
6536+ """
6537+ v = {}
6538+ for x in tasks:
6539+ try:
6540+ (ins, outs) = v[x.env.variant()]
6541+ except KeyError:
6542+ ins = {}
6543+ outs = {}
6544+ v[x.env.variant()] = (ins, outs)
6545+
6546+ for a in getattr(x, 'inputs', []):
6547+ try: ins[a.id].append(x)
6548+ except KeyError: ins[a.id] = [x]
6549+ for a in getattr(x, 'outputs', []):
6550+ try: outs[a.id].append(x)
6551+ except KeyError: outs[a.id] = [x]
6552+
6553+ for (ins, outs) in v.values():
6554+ links = set(ins.iterkeys()).intersection(outs.iterkeys())
6555+ for k in links:
6556+ for a in ins[k]:
6557+ for b in outs[k]:
6558+ a.set_run_after(b)
6559+
6560+def extract_deps(tasks):
6561+ """file_deps: Infer additional dependencies from task input and output nodes and from implicit dependencies
6562+ returned by the scanners - that will only work if all tasks are created
6563+
6564+ this is aimed at people who have pathological builds and who do not care enough
6565+ to implement the build dependencies properly
6566+
6567+ with two loops over the list of tasks, do not expect this to be really fast
6568+ """
6569+
6570+ # first reuse the function above
6571+ extract_outputs(tasks)
6572+
6573+ # map the output nodes to the tasks producing them
6574+ out_to_task = {}
6575+ for x in tasks:
6576+ v = x.env.variant()
6577+ try:
6578+ lst = x.outputs
6579+ except AttributeError:
6580+ pass
6581+ else:
6582+ for node in lst:
6583+ out_to_task[(v, node.id)] = x
6584+
6585+ # map the dependencies found to the tasks compiled
6586+ dep_to_task = {}
6587+ for x in tasks:
6588+ try:
6589+ x.signature()
6590+ except: # this is on purpose
6591+ pass
6592+
6593+ v = x.env.variant()
6594+ key = x.unique_id()
6595+ for k in x.generator.bld.node_deps.get(x.unique_id(), []):
6596+ try: dep_to_task[(v, k.id)].append(x)
6597+ except KeyError: dep_to_task[(v, k.id)] = [x]
6598+
6599+ # now get the intersection
6600+ deps = set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
6601+
6602+ # and add the dependencies from task to task
6603+ for idx in deps:
6604+ for k in dep_to_task[idx]:
6605+ k.set_run_after(out_to_task[idx])
6606+
6607+ # cleanup, remove the signatures
6608+ for x in tasks:
6609+ try:
6610+ delattr(x, 'cache_sig')
6611+ except AttributeError:
6612+ pass
6613+
6614diff --git a/buildtools/wafadmin/TaskGen.py b/buildtools/wafadmin/TaskGen.py
6615new file mode 100644
6616index 0000000..ae1834a
6617--- /dev/null
6618+++ b/buildtools/wafadmin/TaskGen.py
6619@@ -0,0 +1,612 @@
6620+#!/usr/bin/env python
6621+# encoding: utf-8
6622+# Thomas Nagy, 2005-2008 (ita)
6623+
6624+"""
6625+The class task_gen encapsulates the creation of task objects (low-level code)
6626+The instances can have various parameters, but the creation of task nodes (Task.py)
6627+is delayed. To achieve this, various methods are called from the method "apply"
6628+
6629+The class task_gen contains lots of methods, and a configuration table:
6630+* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
6631+* the order of the methods (self.prec or by default task_gen.prec) is configurable
6632+* new methods can be inserted dynamically without pasting old code
6633+
6634+Additionally, task_gen provides the method apply_core
6635+* file extensions are mapped to methods: def meth(self, name_or_node)
6636+* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
6637+* when called, the functions may modify self.allnodes to re-add source to process
6638+* the mappings can map an extension or a filename (see the code below)
6639+
6640+WARNING: subclasses must reimplement the clone method
6641+"""
6642+
6643+import os, traceback, copy
6644+import Build, Task, Utils, Logs, Options
6645+from Logs import debug, error, warn
6646+from Constants import *
6647+
6648+typos = {
6649+'sources':'source',
6650+'targets':'target',
6651+'include':'includes',
6652+'define':'defines',
6653+'importpath':'importpaths',
6654+'install_var':'install_path',
6655+'install_subdir':'install_path',
6656+'inst_var':'install_path',
6657+'inst_dir':'install_path',
6658+'feature':'features',
6659+}
6660+
6661+class register_obj(type):
6662+ """no decorators for classes, so we use a metaclass
6663+ we store into task_gen.classes the classes that inherit task_gen
6664+ and whose names end in '_taskgen'
6665+ """
6666+ def __init__(cls, name, bases, dict):
6667+ super(register_obj, cls).__init__(name, bases, dict)
6668+ name = cls.__name__
6669+ suffix = '_taskgen'
6670+ if name.endswith(suffix):
6671+ task_gen.classes[name.replace(suffix, '')] = cls
6672+
6673+class task_gen(object):
6674+ """
6675+ Most methods are of the form 'def meth(self):' without any parameters
6676+ there are many of them, and they do many different things:
6677+ * task creation
6678+ * task results installation
6679+ * environment modification
6680+ * attribute addition/removal
6681+
6682+ The inheritance approach is complicated
6683+ * mixing several languages at once
6684+ * subclassing is needed even for small changes
6685+ * inserting new methods is complicated
6686+
6687+ This new class uses a configuration table:
6688+ * adding new methods easily
6689+ * obtaining the order in which to call the methods
6690+ * postponing the method calls (post() -> apply)
6691+
6692+ Additionally, a 'traits' static attribute is provided:
6693+ * this list contains methods
6694+ * the methods can remove or add methods from self.meths
6695+ Example1: the attribute 'staticlib' is set on an instance
6696+ a method set in the list of traits is executed when the
6697+ instance is posted, it finds that flag and adds another method for execution
6698+ Example2: a method set in the list of traits finds the msvc
6699+ compiler (from self.env['MSVC']==1); more methods are added to self.meths
6700+ """
6701+
6702+ __metaclass__ = register_obj
6703+ mappings = {}
6704+ mapped = {}
6705+ prec = Utils.DefaultDict(list)
6706+ traits = Utils.DefaultDict(set)
6707+ classes = {}
6708+
6709+ def __init__(self, *kw, **kwargs):
6710+ self.prec = Utils.DefaultDict(list)
6711+ "map precedence of function names to call"
6712+ # so we will have to play with directed acyclic graphs
6713+ # detect cycles, etc
6714+
6715+ self.source = ''
6716+ self.target = ''
6717+
6718+ # list of methods to execute - does not touch it by hand unless you know
6719+ self.meths = []
6720+
6721+ # list of mappings extension -> function
6722+ self.mappings = {}
6723+
6724+ # list of features (see the documentation on traits)
6725+ self.features = list(kw)
6726+
6727+ # not always a good idea
6728+ self.tasks = []
6729+
6730+ self.default_chmod = O644
6731+ self.default_install_path = None
6732+
6733+ # kind of private, beware of what you put in it, also, the contents are consumed
6734+ self.allnodes = []
6735+
6736+ self.bld = kwargs.get('bld', Build.bld)
6737+ self.env = self.bld.env.copy()
6738+
6739+ self.path = self.bld.path # emulate chdir when reading scripts
6740+ self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
6741+
6742+ # provide a unique id
6743+ self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
6744+
6745+ for key, val in kwargs.iteritems():
6746+ setattr(self, key, val)
6747+
6748+ self.bld.task_manager.add_task_gen(self)
6749+ self.bld.all_task_gen.append(self)
6750+
6751+ def __str__(self):
6752+ return ("<task_gen '%s' of type %s defined in %s>"
6753+ % (self.name or self.target, self.__class__.__name__, str(self.path)))
6754+
6755+ def __setattr__(self, name, attr):
6756+ real = typos.get(name, name)
6757+ if real != name:
6758+ warn('typo %s -> %s' % (name, real))
6759+ if Logs.verbose > 0:
6760+ traceback.print_stack()
6761+ object.__setattr__(self, real, attr)
6762+
6763+ def to_list(self, value):
6764+ "helper: returns a list"
6765+ if isinstance(value, str): return value.split()
6766+ else: return value
6767+
6768+ def apply(self):
6769+ "order the methods to execute using self.prec or task_gen.prec"
6770+ keys = set(self.meths)
6771+
6772+ # add the methods listed in the features
6773+ self.features = Utils.to_list(self.features)
6774+ for x in self.features + ['*']:
6775+ st = task_gen.traits[x]
6776+ if not st:
6777+ warn('feature %r does not exist - bind at least one method to it' % x)
6778+ keys.update(st)
6779+
6780+ # copy the precedence table
6781+ prec = {}
6782+ prec_tbl = self.prec or task_gen.prec
6783+ for x in prec_tbl:
6784+ if x in keys:
6785+ prec[x] = prec_tbl[x]
6786+
6787+ # elements disconnected
6788+ tmp = []
6789+ for a in keys:
6790+ for x in prec.values():
6791+ if a in x: break
6792+ else:
6793+ tmp.append(a)
6794+
6795+ # topological sort
6796+ out = []
6797+ while tmp:
6798+ e = tmp.pop()
6799+ if e in keys: out.append(e)
6800+ try:
6801+ nlst = prec[e]
6802+ except KeyError:
6803+ pass
6804+ else:
6805+ del prec[e]
6806+ for x in nlst:
6807+ for y in prec:
6808+ if x in prec[y]:
6809+ break
6810+ else:
6811+ tmp.append(x)
6812+
6813+ if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
6814+ out.reverse()
6815+ self.meths = out
6816+
6817+ # then we run the methods in order
6818+ debug('task_gen: posting %s %d', self, id(self))
6819+ for x in out:
6820+ try:
6821+ v = getattr(self, x)
6822+ except AttributeError:
6823+ raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
6824+ debug('task_gen: -> %s (%d)', x, id(self))
6825+ v()
6826+
6827+ def post(self):
6828+ "runs the code to create the tasks, do not subclass"
6829+ if not self.name:
6830+ if isinstance(self.target, list):
6831+ self.name = ' '.join(self.target)
6832+ else:
6833+ self.name = self.target
6834+
6835+ if getattr(self, 'posted', None):
6836+ #error("OBJECT ALREADY POSTED" + str( self))
6837+ return
6838+
6839+ self.apply()
6840+ self.posted = True
6841+ debug('task_gen: posted %s', self.name)
6842+
6843+ def get_hook(self, ext):
6844+ try: return self.mappings[ext]
6845+ except KeyError:
6846+ try: return task_gen.mappings[ext]
6847+ except KeyError: return None
6848+
6849+ # TODO waf 1.6: always set the environment
6850+ # TODO waf 1.6: create_task(self, name, inputs, outputs)
6851+ def create_task(self, name, src=None, tgt=None, env=None):
6852+ env = env or self.env
6853+ task = Task.TaskBase.classes[name](env.copy(), generator=self)
6854+ if src:
6855+ task.set_inputs(src)
6856+ if tgt:
6857+ task.set_outputs(tgt)
6858+ self.tasks.append(task)
6859+ return task
6860+
6861+ def name_to_obj(self, name):
6862+ return self.bld.name_to_obj(name, self.env)
6863+
6864+ def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
6865+ """
6866+ The attributes "excludes" and "exts" must be lists to avoid the confusion
6867+ find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
6868+
6869+ do not use absolute paths
6870+ do not use paths outside of the source tree
6871+ the files or folder beginning by . are not returned
6872+
6873+ # TODO: remove in Waf 1.6
6874+ """
6875+
6876+ err_msg = "'%s' attribute must be a list"
6877+ if not isinstance(excludes, list):
6878+ raise Utils.WscriptError(err_msg % 'excludes')
6879+ if not isinstance(exts, list):
6880+ raise Utils.WscriptError(err_msg % 'exts')
6881+
6882+ lst = []
6883+
6884+ #make sure dirnames is a list helps with dirnames with spaces
6885+ dirnames = self.to_list(dirnames)
6886+
6887+ ext_lst = exts or list(self.mappings.keys()) + list(task_gen.mappings.keys())
6888+
6889+ for name in dirnames:
6890+ anode = self.path.find_dir(name)
6891+
6892+ if not anode or not anode.is_child_of(self.bld.srcnode):
6893+ raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
6894+ ", or it's not child of '%s'." % (name, self.bld.srcnode))
6895+
6896+ self.bld.rescan(anode)
6897+ for name in self.bld.cache_dir_contents[anode.id]:
6898+
6899+ # ignore hidden files
6900+ if name.startswith('.'):
6901+ continue
6902+
6903+ (base, ext) = os.path.splitext(name)
6904+ if ext in ext_lst and not name in lst and not name in excludes:
6905+ lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
6906+
6907+ lst.sort()
6908+ self.source = self.to_list(self.source)
6909+ if not self.source: self.source = lst
6910+ else: self.source += lst
6911+
6912+ def clone(self, env):
6913+ """when creating a clone in a task generator method,
6914+ make sure to set posted=False on the clone
6915+ else the other task generator will not create its tasks"""
6916+ newobj = task_gen(bld=self.bld)
6917+ for x in self.__dict__:
6918+ if x in ['env', 'bld']:
6919+ continue
6920+ elif x in ["path", "features"]:
6921+ setattr(newobj, x, getattr(self, x))
6922+ else:
6923+ setattr(newobj, x, copy.copy(getattr(self, x)))
6924+
6925+ newobj.__class__ = self.__class__
6926+ if isinstance(env, str):
6927+ newobj.env = self.bld.all_envs[env].copy()
6928+ else:
6929+ newobj.env = env.copy()
6930+
6931+ return newobj
6932+
6933+ def get_inst_path(self):
6934+ return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
6935+
6936+ def set_inst_path(self, val):
6937+ self._install_path = val
6938+
6939+ install_path = property(get_inst_path, set_inst_path)
6940+
6941+
6942+ def get_chmod(self):
6943+ return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
6944+
6945+ def set_chmod(self, val):
6946+ self._chmod = val
6947+
6948+ chmod = property(get_chmod, set_chmod)
6949+
6950+def declare_extension(var, func):
6951+ try:
6952+ for x in Utils.to_list(var):
6953+ task_gen.mappings[x] = func
6954+ except:
6955+ raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
6956+ task_gen.mapped[func.__name__] = func
6957+
6958+def declare_order(*k):
6959+ assert(len(k) > 1)
6960+ n = len(k) - 1
6961+ for i in xrange(n):
6962+ f1 = k[i]
6963+ f2 = k[i+1]
6964+ if not f1 in task_gen.prec[f2]:
6965+ task_gen.prec[f2].append(f1)
6966+
6967+def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
6968+ install=0, before=[], after=[], decider=None, rule=None, scan=None):
6969+ """
6970+ see Tools/flex.py for an example
6971+ while i do not like such wrappers, some people really do
6972+ """
6973+
6974+ action = action or rule
6975+ if isinstance(action, str):
6976+ act = Task.simple_task_type(name, action, color=color)
6977+ else:
6978+ act = Task.task_type_from_func(name, action, color=color)
6979+ act.ext_in = tuple(Utils.to_list(ext_in))
6980+ act.ext_out = tuple(Utils.to_list(ext_out))
6981+ act.before = Utils.to_list(before)
6982+ act.after = Utils.to_list(after)
6983+ act.scan = scan
6984+
6985+ def x_file(self, node):
6986+ if decider:
6987+ ext = decider(self, node)
6988+ else:
6989+ ext = ext_out
6990+
6991+ if isinstance(ext, str):
6992+ out_source = node.change_ext(ext)
6993+ if reentrant:
6994+ self.allnodes.append(out_source)
6995+ elif isinstance(ext, list):
6996+ out_source = [node.change_ext(x) for x in ext]
6997+ if reentrant:
6998+ for i in xrange((reentrant is True) and len(out_source) or reentrant):
6999+ self.allnodes.append(out_source[i])
7000+ else:
7001+ # XXX: useless: it will fail on Utils.to_list above...
7002+ raise Utils.WafError("do not know how to process %s" % str(ext))
7003+
7004+ tsk = self.create_task(name, node, out_source)
7005+
7006+ if node.__class__.bld.is_install:
7007+ tsk.install = install
7008+
7009+ declare_extension(act.ext_in, x_file)
7010+ return x_file
7011+
7012+def bind_feature(name, methods):
7013+ lst = Utils.to_list(methods)
7014+ task_gen.traits[name].update(lst)
7015+
7016+"""
7017+All the following decorators are registration decorators, i.e add an attribute to current class
7018+ (task_gen and its derivatives), with same name as func, which points to func itself.
7019+For example:
7020+ @taskgen
7021+ def sayHi(self):
7022+ print("hi")
7023+Now taskgen.sayHi() may be called
7024+
7025+If python were really smart, it could infer itself the order of methods by looking at the
7026+attributes. A prerequisite for execution is to have the attribute set before.
7027+Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
7028+"""
7029+def taskgen(func):
7030+ """
7031+ register a method as a task generator method
7032+ """
7033+ setattr(task_gen, func.__name__, func)
7034+ return func
7035+
7036+def feature(*k):
7037+ """
7038+ declare a task generator method that will be executed when the
7039+ object attribute 'feature' contains the corresponding key(s)
7040+ """
7041+ def deco(func):
7042+ setattr(task_gen, func.__name__, func)
7043+ for name in k:
7044+ task_gen.traits[name].update([func.__name__])
7045+ return func
7046+ return deco
7047+
7048+def before(*k):
7049+ """
7050+ declare a task generator method which will be executed
7051+ before the functions of given name(s)
7052+ """
7053+ def deco(func):
7054+ setattr(task_gen, func.__name__, func)
7055+ for fun_name in k:
7056+ if not func.__name__ in task_gen.prec[fun_name]:
7057+ task_gen.prec[fun_name].append(func.__name__)
7058+ return func
7059+ return deco
7060+
7061+def after(*k):
7062+ """
7063+ declare a task generator method which will be executed
7064+ after the functions of given name(s)
7065+ """
7066+ def deco(func):
7067+ setattr(task_gen, func.__name__, func)
7068+ for fun_name in k:
7069+ if not fun_name in task_gen.prec[func.__name__]:
7070+ task_gen.prec[func.__name__].append(fun_name)
7071+ return func
7072+ return deco
7073+
7074+def extension(var):
7075+ """
7076+ declare a task generator method which will be invoked during
7077+ the processing of source files for the extension given
7078+ """
7079+ def deco(func):
7080+ setattr(task_gen, func.__name__, func)
7081+ try:
7082+ for x in Utils.to_list(var):
7083+ task_gen.mappings[x] = func
7084+ except:
7085+ raise Utils.WafError('extension takes either a list or a string %r' % var)
7086+ task_gen.mapped[func.__name__] = func
7087+ return func
7088+ return deco
7089+
7090+# TODO make certain the decorators may be used here
7091+
7092+def apply_core(self):
7093+ """Process the attribute source
7094+ transform the names into file nodes
7095+ try to process the files by name first, later by extension"""
7096+ # get the list of folders to use by the scanners
7097+ # all our objects share the same include paths anyway
7098+ find_resource = self.path.find_resource
7099+
7100+ for filename in self.to_list(self.source):
7101+ # if self.mappings or task_gen.mappings contains a file of the same name
7102+ x = self.get_hook(filename)
7103+ if x:
7104+ x(self, filename)
7105+ else:
7106+ node = find_resource(filename)
7107+ if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
7108+ self.allnodes.append(node)
7109+
7110+ for node in self.allnodes:
7111+ # self.mappings or task_gen.mappings map the file extension to a function
7112+ x = self.get_hook(node.suffix())
7113+
7114+ if not x:
7115+ raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
7116+ (str(node), self.__class__.mappings.keys(), self.__class__))
7117+ x(self, node)
7118+feature('*')(apply_core)
7119+
7120+def exec_rule(self):
7121+ """Process the attribute rule, when provided the method apply_core will be disabled
7122+ """
7123+ if not getattr(self, 'rule', None):
7124+ return
7125+
7126+ # someone may have removed it already
7127+ try:
7128+ self.meths.remove('apply_core')
7129+ except ValueError:
7130+ pass
7131+
7132+ # get the function and the variables
7133+ func = self.rule
7134+
7135+ vars2 = []
7136+ if isinstance(func, str):
7137+ # use the shell by default for user-defined commands
7138+ (func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
7139+ func.code = self.rule
7140+
7141+ # create the task class
7142+ name = getattr(self, 'name', None) or self.target or self.rule
7143+ if not isinstance(name, str):
7144+ name = str(self.idx)
7145+ cls = Task.task_type_from_func(name, func, getattr(self, 'vars', vars2))
7146+ cls.color = getattr(self, 'color', 'BLUE')
7147+
7148+ # now create one instance
7149+ tsk = self.create_task(name)
7150+
7151+ dep_vars = getattr(self, 'dep_vars', ['ruledeps'])
7152+ if dep_vars:
7153+ tsk.dep_vars = dep_vars
7154+ if isinstance(self.rule, str):
7155+ tsk.env.ruledeps = self.rule
7156+ else:
7157+ # only works if the function is in a global module such as a waf tool
7158+ tsk.env.ruledeps = Utils.h_fun(self.rule)
7159+
7160+ # we assume that the user knows that without inputs or outputs
7161+ #if not getattr(self, 'target', None) and not getattr(self, 'source', None):
7162+ # cls.quiet = True
7163+
7164+ if getattr(self, 'target', None):
7165+ cls.quiet = True
7166+ tsk.outputs = [self.path.find_or_declare(x) for x in self.to_list(self.target)]
7167+
7168+ if getattr(self, 'source', None):
7169+ cls.quiet = True
7170+ tsk.inputs = []
7171+ for x in self.to_list(self.source):
7172+ y = self.path.find_resource(x)
7173+ if not y:
7174+ raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
7175+ tsk.inputs.append(y)
7176+
7177+ if self.allnodes:
7178+ tsk.inputs.extend(self.allnodes)
7179+
7180+ if getattr(self, 'scan', None):
7181+ cls.scan = self.scan
7182+
7183+ if getattr(self, 'install_path', None):
7184+ tsk.install_path = self.install_path
7185+
7186+ if getattr(self, 'cwd', None):
7187+ tsk.cwd = self.cwd
7188+
7189+ if getattr(self, 'on_results', None):
7190+ Task.update_outputs(cls)
7191+
7192+ if getattr(self, 'always', None):
7193+ Task.always_run(cls)
7194+
7195+ for x in ['after', 'before', 'ext_in', 'ext_out']:
7196+ setattr(cls, x, getattr(self, x, []))
7197+feature('*')(exec_rule)
7198+before('apply_core')(exec_rule)
7199+
7200+def sequence_order(self):
7201+ """
7202+ add a strict sequential constraint between the tasks generated by task generators
7203+ it uses the fact that task generators are posted in order
7204+ it will not post objects which belong to other folders
7205+ there is also an awesome trick for executing the method in last position
7206+
7207+ to use:
7208+ bld(features='javac seq')
7209+ bld(features='jar seq')
7210+
7211+ to start a new sequence, set the attribute seq_start, for example:
7212+ obj.seq_start = True
7213+ """
7214+ if self.meths and self.meths[-1] != 'sequence_order':
7215+ self.meths.append('sequence_order')
7216+ return
7217+
7218+ if getattr(self, 'seq_start', None):
7219+ return
7220+
7221+ # all the tasks previously declared must be run before these
7222+ if getattr(self.bld, 'prev', None):
7223+ self.bld.prev.post()
7224+ for x in self.bld.prev.tasks:
7225+ for y in self.tasks:
7226+ y.set_run_after(x)
7227+
7228+ self.bld.prev = self
7229+
7230+feature('seq')(sequence_order)
7231+
7232diff --git a/buildtools/wafadmin/Tools/__init__.py b/buildtools/wafadmin/Tools/__init__.py
7233new file mode 100644
7234index 0000000..bc6ca23
7235--- /dev/null
7236+++ b/buildtools/wafadmin/Tools/__init__.py
7237@@ -0,0 +1,4 @@
7238+#!/usr/bin/env python
7239+# encoding: utf-8
7240+# Thomas Nagy, 2006 (ita)
7241+
7242diff --git a/buildtools/wafadmin/Tools/ar.py b/buildtools/wafadmin/Tools/ar.py
7243new file mode 100644
7244index 0000000..af9b17f
7245--- /dev/null
7246+++ b/buildtools/wafadmin/Tools/ar.py
7247@@ -0,0 +1,36 @@
7248+#!/usr/bin/env python
7249+# encoding: utf-8
7250+# Thomas Nagy, 2006-2008 (ita)
7251+# Ralf Habacker, 2006 (rh)
7252+
7253+"ar and ranlib"
7254+
7255+import os, sys
7256+import Task, Utils
7257+from Configure import conftest
7258+
7259+ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
7260+cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
7261+cls.maxjobs = 1
7262+cls.install = Utils.nada
7263+
7264+# remove the output in case it already exists
7265+old = cls.run
7266+def wrap(self):
7267+ try: os.remove(self.outputs[0].abspath(self.env))
7268+ except OSError: pass
7269+ return old(self)
7270+setattr(cls, 'run', wrap)
7271+
7272+def detect(conf):
7273+ conf.find_program('ar', var='AR')
7274+ conf.find_program('ranlib', var='RANLIB')
7275+ conf.env.ARFLAGS = 'rcs'
7276+
7277+@conftest
7278+def find_ar(conf):
7279+ v = conf.env
7280+ conf.check_tool('ar')
7281+ if not v['AR']: conf.fatal('ar is required for static libraries - not found')
7282+
7283+
7284diff --git a/buildtools/wafadmin/Tools/bison.py b/buildtools/wafadmin/Tools/bison.py
7285new file mode 100644
7286index 0000000..49c6051
7287--- /dev/null
7288+++ b/buildtools/wafadmin/Tools/bison.py
7289@@ -0,0 +1,38 @@
7290+#!/usr/bin/env python
7291+# encoding: utf-8
7292+# John O'Meara, 2006
7293+# Thomas Nagy 2009
7294+
7295+"Bison processing"
7296+
7297+import Task
7298+from TaskGen import extension
7299+
7300+bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
7301+cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False)
7302+
7303+@extension(['.y', '.yc', '.yy'])
7304+def big_bison(self, node):
7305+ """when it becomes complicated (unlike flex), the old recipes work better (cwd)"""
7306+ has_h = '-d' in self.env['BISONFLAGS']
7307+
7308+ outs = []
7309+ if node.name.endswith('.yc'):
7310+ outs.append(node.change_ext('.tab.cc'))
7311+ if has_h:
7312+ outs.append(node.change_ext('.tab.hh'))
7313+ else:
7314+ outs.append(node.change_ext('.tab.c'))
7315+ if has_h:
7316+ outs.append(node.change_ext('.tab.h'))
7317+
7318+ tsk = self.create_task('bison', node, outs)
7319+ tsk.cwd = node.bld_dir(tsk.env)
7320+
7321+ # and the c/cxx file must be compiled too
7322+ self.allnodes.append(outs[0])
7323+
7324+def detect(conf):
7325+ bison = conf.find_program('bison', var='BISON', mandatory=True)
7326+ conf.env['BISONFLAGS'] = '-d'
7327+
7328diff --git a/buildtools/wafadmin/Tools/cc.py b/buildtools/wafadmin/Tools/cc.py
7329new file mode 100644
7330index 0000000..903a1c5
7331--- /dev/null
7332+++ b/buildtools/wafadmin/Tools/cc.py
7333@@ -0,0 +1,100 @@
7334+#!/usr/bin/env python
7335+# encoding: utf-8
7336+# Thomas Nagy, 2006 (ita)
7337+
7338+"Base for c programs/libraries"
7339+
7340+import os
7341+import TaskGen, Build, Utils, Task
7342+from Logs import debug
7343+import ccroot
7344+from TaskGen import feature, before, extension, after
7345+
7346+g_cc_flag_vars = [
7347+'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
7348+'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
7349+'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
7350+
7351+EXT_CC = ['.c']
7352+
7353+g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
7354+
7355+# TODO remove in waf 1.6
7356+class cc_taskgen(ccroot.ccroot_abstract):
7357+ pass
7358+
7359+@feature('cc')
7360+@before('apply_type_vars')
7361+@after('default_cc')
7362+def init_cc(self):
7363+ self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
7364+ self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
7365+
7366+ if not self.env['CC_NAME']:
7367+ raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
7368+
7369+@feature('cc')
7370+@after('apply_incpaths')
7371+def apply_obj_vars_cc(self):
7372+ """after apply_incpaths for INC_PATHS"""
7373+ env = self.env
7374+ app = env.append_unique
7375+ cpppath_st = env['CPPPATH_ST']
7376+
7377+ # local flags come first
7378+ # set the user-defined includes paths
7379+ for i in env['INC_PATHS']:
7380+ app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
7381+ app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
7382+
7383+ # set the library include paths
7384+ for i in env['CPPPATH']:
7385+ app('_CCINCFLAGS', cpppath_st % i)
7386+
7387+@feature('cc')
7388+@after('apply_lib_vars')
7389+def apply_defines_cc(self):
7390+ """after uselib is set for CCDEFINES"""
7391+ self.defines = getattr(self, 'defines', [])
7392+ lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
7393+ milst = []
7394+
7395+ # now process the local defines
7396+ for defi in lst:
7397+ if not defi in milst:
7398+ milst.append(defi)
7399+
7400+ # CCDEFINES_
7401+ libs = self.to_list(self.uselib)
7402+ for l in libs:
7403+ val = self.env['CCDEFINES_'+l]
7404+ if val: milst += val
7405+ self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
7406+ y = self.env['CCDEFINES_ST']
7407+ self.env.append_unique('_CCDEFFLAGS', [y%x for x in milst])
7408+
7409+@extension(EXT_CC)
7410+def c_hook(self, node):
7411+ # create the compilation task: cpp or cc
7412+ if getattr(self, 'obj_ext', None):
7413+ obj_ext = self.obj_ext
7414+ else:
7415+ obj_ext = '_%d.o' % self.idx
7416+
7417+ task = self.create_task('cc', node, node.change_ext(obj_ext))
7418+ try:
7419+ self.compiled_tasks.append(task)
7420+ except AttributeError:
7421+ raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
7422+ return task
7423+
7424+cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
7425+cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
7426+cls.scan = ccroot.scan
7427+cls.vars.append('CCDEPS')
7428+
7429+link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
7430+cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
7431+cls.maxjobs = 1
7432+cls.install = Utils.nada
7433+
7434diff --git a/buildtools/wafadmin/Tools/ccroot.py b/buildtools/wafadmin/Tools/ccroot.py
7435new file mode 100644
7436index 0000000..f54c82f
7437--- /dev/null
7438+++ b/buildtools/wafadmin/Tools/ccroot.py
7439@@ -0,0 +1,629 @@
7440+#!/usr/bin/env python
7441+# encoding: utf-8
7442+# Thomas Nagy, 2005-2008 (ita)
7443+
7444+"base for all c/c++ programs and libraries"
7445+
7446+import os, sys, re
7447+import TaskGen, Task, Utils, preproc, Logs, Build, Options
7448+from Logs import error, debug, warn
7449+from Utils import md5
7450+from TaskGen import taskgen, after, before, feature
7451+from Constants import *
7452+from Configure import conftest
7453+try:
7454+ from cStringIO import StringIO
7455+except ImportError:
7456+ from io import StringIO
7457+
7458+import config_c # <- necessary for the configuration, do not touch
7459+
7460+USE_TOP_LEVEL = False
7461+
7462+def get_cc_version(conf, cc, gcc=False, icc=False):
7463+
7464+ cmd = cc + ['-dM', '-E', '-']
7465+ try:
7466+ p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
7467+ p.stdin.write('\n')
7468+ out = p.communicate()[0]
7469+ except:
7470+ conf.fatal('could not determine the compiler version %r' % cmd)
7471+
7472+ # PY3K: do not touch
7473+ out = str(out)
7474+
7475+ if gcc:
7476+ if out.find('__INTEL_COMPILER') >= 0:
7477+ conf.fatal('The intel compiler pretends to be gcc')
7478+ if out.find('__GNUC__') < 0:
7479+ conf.fatal('Could not determine the compiler type')
7480+
7481+ if icc and out.find('__INTEL_COMPILER') < 0:
7482+ conf.fatal('Not icc/icpc')
7483+
7484+ k = {}
7485+ if icc or gcc:
7486+ out = out.split('\n')
7487+ import shlex
7488+
7489+ for line in out:
7490+ lst = shlex.split(line)
7491+ if len(lst)>2:
7492+ key = lst[1]
7493+ val = lst[2]
7494+ k[key] = val
7495+
7496+ def isD(var):
7497+ return var in k
7498+
7499+ def isT(var):
7500+ return var in k and k[var] != '0'
7501+
7502+ # Some documentation is available at http://predef.sourceforge.net
7503+ # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
7504+ mp1 = {
7505+ '__linux__' : 'linux',
7506+ '__GNU__' : 'gnu',
7507+ '__FreeBSD__' : 'freebsd',
7508+ '__NetBSD__' : 'netbsd',
7509+ '__OpenBSD__' : 'openbsd',
7510+ '__sun' : 'sunos',
7511+ '__hpux' : 'hpux',
7512+ '__sgi' : 'irix',
7513+ '_AIX' : 'aix',
7514+ '__CYGWIN__' : 'cygwin',
7515+ '__MSYS__' : 'msys',
7516+ '_UWIN' : 'uwin',
7517+ '_WIN64' : 'win32',
7518+ '_WIN32' : 'win32',
7519+ '__POWERPC__' : 'powerpc',
7520+ }
7521+
7522+ for i in mp1:
7523+ if isD(i):
7524+ conf.env.DEST_OS = mp1[i]
7525+ break
7526+ else:
7527+ if isD('__APPLE__') and isD('__MACH__'):
7528+ conf.env.DEST_OS = 'darwin'
7529+ elif isD('__unix__'): # unix must be tested last as it's a generic fallback
7530+ conf.env.DEST_OS = 'generic'
7531+
7532+ if isD('__ELF__'):
7533+ conf.env.DEST_BINFMT = 'elf'
7534+ elif isD('__WINNT__') or isD('__CYGWIN__'):
7535+ conf.env.DEST_BINFMT = 'pe'
7536+ elif isD('__APPLE__'):
7537+ conf.env.DEST_BINFMT = 'mac-o'
7538+
7539+ mp2 = {
7540+ '__x86_64__' : 'x86_64',
7541+ '__i386__' : 'x86',
7542+ '__ia64__' : 'ia',
7543+ '__mips__' : 'mips',
7544+ '__sparc__' : 'sparc',
7545+ '__alpha__' : 'alpha',
7546+ '__arm__' : 'arm',
7547+ '__hppa__' : 'hppa',
7548+ '__powerpc__' : 'powerpc',
7549+ }
7550+ for i in mp2:
7551+ if isD(i):
7552+ conf.env.DEST_CPU = mp2[i]
7553+ break
7554+
7555+ debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
7556+ conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
7557+ return k
7558+
7559+class DEBUG_LEVELS:
7560+ """Will disappear in waf 1.6"""
7561+ ULTRADEBUG = "ultradebug"
7562+ DEBUG = "debug"
7563+ RELEASE = "release"
7564+ OPTIMIZED = "optimized"
7565+ CUSTOM = "custom"
7566+
7567+ ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM]
7568+
7569+def scan(self):
7570+ "look for .h the .cpp need"
7571+ debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
7572+
7573+ # TODO waf 1.6 - assume the default input has exactly one file
7574+
7575+ if len(self.inputs) == 1:
7576+ node = self.inputs[0]
7577+ (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
7578+ if Logs.verbose:
7579+ debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
7580+ return (nodes, names)
7581+
7582+ all_nodes = []
7583+ all_names = []
7584+ seen = set()
7585+ for node in self.inputs:
7586+ (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
7587+ if Logs.verbose:
7588+ debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
7589+ for x in nodes:
7590+ if id(x) in seen: continue
7591+ seen.add(id(x))
7592+ all_nodes.append(x)
7593+ for x in names:
7594+ if not x in all_names:
7595+ all_names.append(x)
7596+ return (all_nodes, all_names)
7597+
7598+class ccroot_abstract(TaskGen.task_gen):
7599+ "Parent class for programs and libraries in languages c, c++ and moc (Qt)"
7600+ def __init__(self, *k, **kw):
7601+ # COMPAT remove in waf 1.6 TODO
7602+ if len(k) > 1:
7603+ k = list(k)
7604+ if k[1][0] != 'c':
7605+ k[1] = 'c' + k[1]
7606+ TaskGen.task_gen.__init__(self, *k, **kw)
7607+
7608+def get_target_name(self):
7609+ tp = 'program'
7610+ for x in self.features:
7611+ if x in ['cshlib', 'cstaticlib']:
7612+ tp = x.lstrip('c')
7613+
7614+ pattern = self.env[tp + '_PATTERN']
7615+ if not pattern: pattern = '%s'
7616+
7617+ dir, name = os.path.split(self.target)
7618+
7619+ if self.env.DEST_BINFMT == 'pe' and getattr(self, 'vnum', None) and 'cshlib' in self.features:
7620+ # include the version in the dll file name,
7621+ # the import lib file name stays unversionned.
7622+ name = name + '-' + self.vnum.split('.')[0]
7623+
7624+ return os.path.join(dir, pattern % name)
7625+
7626+@feature('cc', 'cxx')
7627+@before('apply_core')
7628+def default_cc(self):
7629+ """compiled_tasks attribute must be set before the '.c->.o' tasks can be created"""
7630+ Utils.def_attrs(self,
7631+ includes = '',
7632+ defines= '',
7633+ rpaths = '',
7634+ uselib = '',
7635+ uselib_local = '',
7636+ add_objects = '',
7637+ p_flag_vars = [],
7638+ p_type_vars = [],
7639+ compiled_tasks = [],
7640+ link_task = None)
7641+
7642+ # The only thing we need for cross-compilation is DEST_BINFMT.
7643+ # At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient.
7644+ # Currently, cross-compilation is auto-detected only for the gnu and intel compilers.
7645+ if not self.env.DEST_BINFMT:
7646+ # Infer the binary format from the os name.
7647+ self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format(
7648+ self.env.DEST_OS or Utils.unversioned_sys_platform())
7649+
7650+ if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env)
7651+ if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env)
7652+
7653+@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
7654+def apply_verif(self):
7655+ """no particular order, used for diagnostic"""
7656+ if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)):
7657+ raise Utils.WafError('no source files specified for %s' % self)
7658+ if not self.target:
7659+ raise Utils.WafError('no target for %s' % self)
7660+
7661+# TODO reference the d programs, shlibs in d.py, not here
7662+
7663+@feature('cprogram', 'dprogram')
7664+@after('default_cc')
7665+@before('apply_core')
7666+def vars_target_cprogram(self):
7667+ self.default_install_path = self.env.BINDIR
7668+ self.default_chmod = O755
7669+
7670+@after('default_cc')
7671+@feature('cshlib', 'dshlib')
7672+@before('apply_core')
7673+def vars_target_cshlib(self):
7674+ if self.env.DEST_BINFMT == 'pe':
7675+ # set execute bit on libs to avoid 'permission denied' (issue 283)
7676+ self.default_chmod = O755
7677+ self.default_install_path = self.env.BINDIR
7678+ else:
7679+ self.default_install_path = self.env.LIBDIR
7680+
7681+@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
7682+@after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib')
7683+def default_link_install(self):
7684+ """you may kill this method to inject your own installation for the first element
7685+ any other install should only process its own nodes and not those from the others"""
7686+ if self.install_path:
7687+ self.bld.install_files(self.install_path, self.link_task.outputs[0], env=self.env, chmod=self.chmod)
7688+
7689+@feature('cc', 'cxx')
7690+@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
7691+def apply_incpaths(self):
7692+ """used by the scanner
7693+ after processing the uselib for CPPPATH
7694+ after apply_core because some processing may add include paths
7695+ """
7696+ lst = []
7697+ # TODO move the uselib processing out of here
7698+ for lib in self.to_list(self.uselib):
7699+ for path in self.env['CPPPATH_' + lib]:
7700+ if not path in lst:
7701+ lst.append(path)
7702+ if preproc.go_absolute:
7703+ for path in preproc.standard_includes:
7704+ if not path in lst:
7705+ lst.append(path)
7706+
7707+ for path in self.to_list(self.includes):
7708+ if not path in lst:
7709+ if preproc.go_absolute or not os.path.isabs(path):
7710+ lst.append(path)
7711+ else:
7712+ self.env.prepend_value('CPPPATH', path)
7713+
7714+ for path in lst:
7715+ node = None
7716+ if os.path.isabs(path):
7717+ if preproc.go_absolute:
7718+ node = self.bld.root.find_dir(path)
7719+ elif path[0] == '#':
7720+ node = self.bld.srcnode
7721+ if len(path) > 1:
7722+ node = node.find_dir(path[1:])
7723+ else:
7724+ node = self.path.find_dir(path)
7725+
7726+ if node:
7727+ self.env.append_value('INC_PATHS', node)
7728+
7729+ # TODO WAF 1.6
7730+ if USE_TOP_LEVEL:
7731+ self.env.append_value('INC_PATHS', self.bld.srcnode)
7732+
7733+@feature('cc', 'cxx')
7734+@after('init_cc', 'init_cxx')
7735+@before('apply_lib_vars')
7736+def apply_type_vars(self):
7737+ """before apply_lib_vars because we modify uselib
7738+ after init_cc and init_cxx because web need p_type_vars
7739+ """
7740+ for x in self.features:
7741+ if not x in ['cprogram', 'cstaticlib', 'cshlib']:
7742+ continue
7743+ x = x.lstrip('c')
7744+
7745+ # if the type defines uselib to add, add them
7746+ st = self.env[x + '_USELIB']
7747+ if st: self.uselib = self.uselib + ' ' + st
7748+
7749+ # each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc
7750+ # so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly
7751+ for var in self.p_type_vars:
7752+ compvar = '%s_%s' % (x, var)
7753+ #print compvar
7754+ value = self.env[compvar]
7755+ if value: self.env.append_value(var, value)
7756+
7757+@feature('cprogram', 'cshlib', 'cstaticlib')
7758+@after('apply_core')
7759+def apply_link(self):
7760+ """executes after apply_core for collecting 'compiled_tasks'
7761+ use a custom linker if specified (self.link='name-of-custom-link-task')"""
7762+ link = getattr(self, 'link', None)
7763+ if not link:
7764+ if 'cstaticlib' in self.features: link = 'static_link'
7765+ elif 'cxx' in self.features: link = 'cxx_link'
7766+ else: link = 'cc_link'
7767+
7768+ tsk = self.create_task(link)
7769+ outputs = [t.outputs[0] for t in self.compiled_tasks]
7770+ tsk.set_inputs(outputs)
7771+ tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
7772+
7773+ self.link_task = tsk
7774+
7775+@feature('cc', 'cxx')
7776+@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
7777+def apply_lib_vars(self):
7778+ """after apply_link because of 'link_task'
7779+ after default_cc because of the attribute 'uselib'"""
7780+
7781+ # after 'apply_core' in case if 'cc' if there is no link
7782+
7783+ env = self.env
7784+
7785+ # 1. the case of the libs defined in the project (visit ancestors first)
7786+ # the ancestors external libraries (uselib) will be prepended
7787+ self.uselib = self.to_list(self.uselib)
7788+ names = self.to_list(self.uselib_local)
7789+
7790+ seen = set([])
7791+ tmp = Utils.deque(names) # consume a copy of the list of names
7792+ while tmp:
7793+ lib_name = tmp.popleft()
7794+ # visit dependencies only once
7795+ if lib_name in seen:
7796+ continue
7797+
7798+ y = self.name_to_obj(lib_name)
7799+ if not y:
7800+ raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
7801+ y.post()
7802+ seen.add(lib_name)
7803+
7804+ # object has ancestors to process (shared libraries): add them to the end of the list
7805+ if getattr(y, 'uselib_local', None):
7806+ lst = y.to_list(y.uselib_local)
7807+ if 'cshlib' in y.features or 'cprogram' in y.features:
7808+ lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features]
7809+ tmp.extend(lst)
7810+
7811+ # link task and flags
7812+ if getattr(y, 'link_task', None):
7813+
7814+ link_name = y.target[y.target.rfind(os.sep) + 1:]
7815+ if 'cstaticlib' in y.features:
7816+ env.append_value('STATICLIB', link_name)
7817+ elif 'cshlib' in y.features or 'cprogram' in y.features:
7818+ # WARNING some linkers can link against programs
7819+ env.append_value('LIB', link_name)
7820+
7821+ # the order
7822+ self.link_task.set_run_after(y.link_task)
7823+
7824+ # for the recompilation
7825+ dep_nodes = getattr(self.link_task, 'dep_nodes', [])
7826+ self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
7827+
7828+ # add the link path too
7829+ tmp_path = y.link_task.outputs[0].parent.bldpath(self.env)
7830+ if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path)
7831+
7832+ # add ancestors uselib too - but only propagate those that have no staticlib
7833+ for v in self.to_list(y.uselib):
7834+ if not env['STATICLIB_' + v]:
7835+ if not v in self.uselib:
7836+ self.uselib.insert(0, v)
7837+
7838+ # if the library task generator provides 'export_incdirs', add to the include path
7839+ # the export_incdirs must be a list of paths relative to the other library
7840+ if getattr(y, 'export_incdirs', None):
7841+ for x in self.to_list(y.export_incdirs):
7842+ node = y.path.find_dir(x)
7843+ if not node:
7844+ raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
7845+ self.env.append_unique('INC_PATHS', node)
7846+
7847+ # 2. the case of the libs defined outside
7848+ for x in self.uselib:
7849+ for v in self.p_flag_vars:
7850+ val = self.env[v + '_' + x]
7851+ if val: self.env.append_value(v, val)
7852+
7853+@feature('cprogram', 'cstaticlib', 'cshlib')
7854+@after('init_cc', 'init_cxx', 'apply_link')
7855+def apply_objdeps(self):
7856+ "add the .o files produced by some other object files in the same manner as uselib_local"
7857+ if not getattr(self, 'add_objects', None): return
7858+
7859+ seen = []
7860+ names = self.to_list(self.add_objects)
7861+ while names:
7862+ x = names[0]
7863+
7864+ # visit dependencies only once
7865+ if x in seen:
7866+ names = names[1:]
7867+ continue
7868+
7869+ # object does not exist ?
7870+ y = self.name_to_obj(x)
7871+ if not y:
7872+ raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
7873+
7874+ # object has ancestors to process first ? update the list of names
7875+ if getattr(y, 'add_objects', None):
7876+ added = 0
7877+ lst = y.to_list(y.add_objects)
7878+ lst.reverse()
7879+ for u in lst:
7880+ if u in seen: continue
7881+ added = 1
7882+ names = [u]+names
7883+ if added: continue # list of names modified, loop
7884+
7885+ # safe to process the current object
7886+ y.post()
7887+ seen.append(x)
7888+
7889+ for t in y.compiled_tasks:
7890+ self.link_task.inputs.extend(t.outputs)
7891+
7892+@feature('cprogram', 'cshlib', 'cstaticlib')
7893+@after('apply_lib_vars')
7894+def apply_obj_vars(self):
7895+ """after apply_lib_vars for uselib"""
7896+ v = self.env
7897+ lib_st = v['LIB_ST']
7898+ staticlib_st = v['STATICLIB_ST']
7899+ libpath_st = v['LIBPATH_ST']
7900+ staticlibpath_st = v['STATICLIBPATH_ST']
7901+ rpath_st = v['RPATH_ST']
7902+
7903+ app = v.append_unique
7904+
7905+ if v['FULLSTATIC']:
7906+ v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER'])
7907+
7908+ for i in v['RPATH']:
7909+ if i and rpath_st:
7910+ app('LINKFLAGS', rpath_st % i)
7911+
7912+ for i in v['LIBPATH']:
7913+ app('LINKFLAGS', libpath_st % i)
7914+ app('LINKFLAGS', staticlibpath_st % i)
7915+
7916+ if v['STATICLIB']:
7917+ v.append_value('LINKFLAGS', v['STATICLIB_MARKER'])
7918+ k = [(staticlib_st % i) for i in v['STATICLIB']]
7919+ app('LINKFLAGS', k)
7920+
7921+ # fully static binaries ?
7922+ if not v['FULLSTATIC']:
7923+ if v['STATICLIB'] or v['LIB']:
7924+ v.append_value('LINKFLAGS', v['SHLIB_MARKER'])
7925+
7926+ app('LINKFLAGS', [lib_st % i for i in v['LIB']])
7927+
7928+@after('apply_link')
7929+def process_obj_files(self):
7930+ if not hasattr(self, 'obj_files'): return
7931+ for x in self.obj_files:
7932+ node = self.path.find_resource(x)
7933+ self.link_task.inputs.append(node)
7934+
7935+@taskgen
7936+def add_obj_file(self, file):
7937+ """Small example on how to link object files as if they were source
7938+ obj = bld.create_obj('cc')
7939+ obj.add_obj_file('foo.o')"""
7940+ if not hasattr(self, 'obj_files'): self.obj_files = []
7941+ if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
7942+ self.obj_files.append(file)
7943+
7944+c_attrs = {
7945+'cxxflag' : 'CXXFLAGS',
7946+'cflag' : 'CCFLAGS',
7947+'ccflag' : 'CCFLAGS',
7948+'linkflag' : 'LINKFLAGS',
7949+'ldflag' : 'LINKFLAGS',
7950+'lib' : 'LIB',
7951+'libpath' : 'LIBPATH',
7952+'staticlib': 'STATICLIB',
7953+'staticlibpath': 'STATICLIBPATH',
7954+'rpath' : 'RPATH',
7955+'framework' : 'FRAMEWORK',
7956+'frameworkpath' : 'FRAMEWORKPATH'
7957+}
7958+
7959+@feature('cc', 'cxx')
7960+@before('init_cxx', 'init_cc')
7961+@before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc')
7962+def add_extra_flags(self):
7963+ """case and plural insensitive
7964+ before apply_obj_vars for processing the library attributes
7965+ """
7966+ for x in self.__dict__.keys():
7967+ y = x.lower()
7968+ if y[-1] == 's':
7969+ y = y[:-1]
7970+ if c_attrs.get(y, None):
7971+ self.env.append_unique(c_attrs[y], getattr(self, x))
7972+
7973+# ============ the code above must not know anything about import libs ==========
7974+
7975+@feature('cshlib')
7976+@after('apply_link', 'default_cc')
7977+@before('apply_lib_vars', 'apply_objdeps', 'default_link_install')
7978+def apply_implib(self):
7979+ """On mswindows, handle dlls and their import libs
7980+ the .dll.a is the import lib and it is required for linking so it is installed too
7981+ """
7982+ if not self.env.DEST_BINFMT == 'pe':
7983+ return
7984+
7985+ self.meths.remove('default_link_install')
7986+
7987+ bindir = self.install_path
7988+ if not bindir: return
7989+
7990+ # install the dll in the bin dir
7991+ dll = self.link_task.outputs[0]
7992+ self.bld.install_files(bindir, dll, self.env, self.chmod)
7993+
7994+ # add linker flags to generate the import lib
7995+ implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1]
7996+
7997+ implib = dll.parent.find_or_declare(implib)
7998+ self.link_task.outputs.append(implib)
7999+ self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
8000+
8001+ self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split())
8002+
8003+# ============ the code above must not know anything about vnum processing on unix platforms =========
8004+
8005+@feature('cshlib')
8006+@after('apply_link')
8007+@before('apply_lib_vars', 'default_link_install')
8008+def apply_vnum(self):
8009+ """
8010+ libfoo.so is installed as libfoo.so.1.2.3
8011+ """
8012+ if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
8013+ return
8014+
8015+ self.meths.remove('default_link_install')
8016+
8017+ link = self.link_task
8018+ nums = self.vnum.split('.')
8019+ node = link.outputs[0]
8020+
8021+ libname = node.name
8022+ if libname.endswith('.dylib'):
8023+ name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
8024+ name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
8025+ else:
8026+ name3 = libname + '.' + self.vnum
8027+ name2 = libname + '.' + nums[0]
8028+
8029+ if self.env.SONAME_ST:
8030+ v = self.env.SONAME_ST % name2
8031+ self.env.append_value('LINKFLAGS', v.split())
8032+
8033+ bld = self.bld
8034+ nums = self.vnum.split('.')
8035+
8036+ path = self.install_path
8037+ if not path: return
8038+
8039+ bld.install_as(path + os.sep + name3, node, env=self.env)
8040+ bld.symlink_as(path + os.sep + name2, name3)
8041+ bld.symlink_as(path + os.sep + libname, name3)
8042+
8043+ # the following task is just to enable execution from the build dir :-/
8044+ self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
8045+
8046+def exec_vnum_link(self):
8047+ for x in self.outputs:
8048+ path = x.abspath(self.env)
8049+ try:
8050+ os.remove(path)
8051+ except OSError:
8052+ pass
8053+
8054+ try:
8055+ os.symlink(self.inputs[0].name, path)
8056+ except OSError:
8057+ return 1
8058+
8059+cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN')
8060+cls.quiet = 1
8061+
8062+# ============ the --as-needed flag should added during the configuration, not at runtime =========
8063+
8064+@conftest
8065+def add_as_needed(conf):
8066+ if conf.env.DEST_BINFMT == 'elf' and 'gcc' in (conf.env.CXX_NAME, conf.env.CC_NAME):
8067+ conf.env.append_unique('LINKFLAGS', '--as-needed')
8068+
8069diff --git a/buildtools/wafadmin/Tools/compiler_cc.py b/buildtools/wafadmin/Tools/compiler_cc.py
8070new file mode 100644
8071index 0000000..0421503
8072--- /dev/null
8073+++ b/buildtools/wafadmin/Tools/compiler_cc.py
8074@@ -0,0 +1,67 @@
8075+#!/usr/bin/env python
8076+# encoding: utf-8
8077+# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
8078+
8079+import os, sys, imp, types, ccroot
8080+import optparse
8081+import Utils, Configure, Options
8082+from Logs import debug
8083+
8084+c_compiler = {
8085+ 'win32': ['msvc', 'gcc'],
8086+ 'cygwin': ['gcc'],
8087+ 'darwin': ['gcc'],
8088+ 'aix': ['xlc', 'gcc'],
8089+ 'linux': ['gcc', 'icc', 'suncc'],
8090+ 'sunos': ['gcc', 'suncc'],
8091+ 'irix': ['gcc'],
8092+ 'hpux': ['gcc'],
8093+ 'gnu': ['gcc'],
8094+ 'default': ['gcc']
8095+}
8096+
8097+def __list_possible_compiler(platform):
8098+ try:
8099+ return c_compiler[platform]
8100+ except KeyError:
8101+ return c_compiler["default"]
8102+
8103+def detect(conf):
8104+ """
8105+ for each compiler for the platform, try to configure the compiler
8106+ in theory the tools should raise a configuration error if the compiler
8107+ pretends to be something it is not (setting CC=icc and trying to configure gcc)
8108+ """
8109+ try: test_for_compiler = Options.options.check_c_compiler
8110+ except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
8111+ orig = conf.env
8112+ for compiler in test_for_compiler.split():
8113+ conf.env = orig.copy()
8114+ try:
8115+ conf.check_tool(compiler)
8116+ except Configure.ConfigurationError, e:
8117+ debug('compiler_cc: %r' % e)
8118+ else:
8119+ if conf.env['CC']:
8120+ orig.table = conf.env.get_merged_dict()
8121+ conf.env = orig
8122+ conf.check_message(compiler, '', True)
8123+ conf.env['COMPILER_CC'] = compiler
8124+ break
8125+ conf.check_message(compiler, '', False)
8126+ break
8127+ else:
8128+ conf.fatal('could not configure a c compiler!')
8129+
8130+def set_options(opt):
8131+ build_platform = Utils.unversioned_sys_platform()
8132+ possible_compiler_list = __list_possible_compiler(build_platform)
8133+ test_for_compiler = ' '.join(possible_compiler_list)
8134+ cc_compiler_opts = opt.add_option_group("C Compiler Options")
8135+ cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
8136+ help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
8137+ dest="check_c_compiler")
8138+
8139+ for c_compiler in test_for_compiler.split():
8140+ opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
8141+
8142diff --git a/buildtools/wafadmin/Tools/compiler_cxx.py b/buildtools/wafadmin/Tools/compiler_cxx.py
8143new file mode 100644
8144index 0000000..5308ea9
8145--- /dev/null
8146+++ b/buildtools/wafadmin/Tools/compiler_cxx.py
8147@@ -0,0 +1,62 @@
8148+#!/usr/bin/env python
8149+# encoding: utf-8
8150+# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
8151+
8152+import os, sys, imp, types, ccroot
8153+import optparse
8154+import Utils, Configure, Options
8155+from Logs import debug
8156+
8157+cxx_compiler = {
8158+'win32': ['msvc', 'g++'],
8159+'cygwin': ['g++'],
8160+'darwin': ['g++'],
8161+'aix': ['xlc++', 'g++'],
8162+'linux': ['g++', 'icpc', 'sunc++'],
8163+'sunos': ['g++', 'sunc++'],
8164+'irix': ['g++'],
8165+'hpux': ['g++'],
8166+'gnu': ['g++'],
8167+'default': ['g++']
8168+}
8169+
8170+def __list_possible_compiler(platform):
8171+ try:
8172+ return cxx_compiler[platform]
8173+ except KeyError:
8174+ return cxx_compiler["default"]
8175+
8176+def detect(conf):
8177+ try: test_for_compiler = Options.options.check_cxx_compiler
8178+ except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
8179+ orig = conf.env
8180+ for compiler in test_for_compiler.split():
8181+ try:
8182+ conf.env = orig.copy()
8183+ conf.check_tool(compiler)
8184+ except Configure.ConfigurationError, e:
8185+ debug('compiler_cxx: %r' % e)
8186+ else:
8187+ if conf.env['CXX']:
8188+ orig.table = conf.env.get_merged_dict()
8189+ conf.env = orig
8190+ conf.check_message(compiler, '', True)
8191+ conf.env['COMPILER_CXX'] = compiler
8192+ break
8193+ conf.check_message(compiler, '', False)
8194+ break
8195+ else:
8196+ conf.fatal('could not configure a cxx compiler!')
8197+
8198+def set_options(opt):
8199+ build_platform = Utils.unversioned_sys_platform()
8200+ possible_compiler_list = __list_possible_compiler(build_platform)
8201+ test_for_compiler = ' '.join(possible_compiler_list)
8202+ cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
8203+ cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
8204+ help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
8205+ dest="check_cxx_compiler")
8206+
8207+ for cxx_compiler in test_for_compiler.split():
8208+ opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
8209+
8210diff --git a/buildtools/wafadmin/Tools/compiler_d.py b/buildtools/wafadmin/Tools/compiler_d.py
8211new file mode 100644
8212index 0000000..1ea5efa
8213--- /dev/null
8214+++ b/buildtools/wafadmin/Tools/compiler_d.py
8215@@ -0,0 +1,33 @@
8216+#!/usr/bin/env python
8217+# encoding: utf-8
8218+# Carlos Rafael Giani, 2007 (dv)
8219+
8220+import os, sys, imp, types
8221+import Utils, Configure, Options
8222+
8223+def detect(conf):
8224+ if getattr(Options.options, 'check_dmd_first', None):
8225+ test_for_compiler = ['dmd', 'gdc']
8226+ else:
8227+ test_for_compiler = ['gdc', 'dmd']
8228+
8229+ for d_compiler in test_for_compiler:
8230+ try:
8231+ conf.check_tool(d_compiler)
8232+ except:
8233+ pass
8234+ else:
8235+ break
8236+ else:
8237+ conf.fatal('no suitable d compiler was found')
8238+
8239+def set_options(opt):
8240+ d_compiler_opts = opt.add_option_group('D Compiler Options')
8241+ d_compiler_opts.add_option('--check-dmd-first', action='store_true',
8242+ help='checks for the gdc compiler before dmd (default is the other way round)',
8243+ dest='check_dmd_first',
8244+ default=False)
8245+
8246+ for d_compiler in ['gdc', 'dmd']:
8247+ opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts)
8248+
8249diff --git a/buildtools/wafadmin/Tools/config_c.py b/buildtools/wafadmin/Tools/config_c.py
8250new file mode 100644
8251index 0000000..a32d8aa
8252--- /dev/null
8253+++ b/buildtools/wafadmin/Tools/config_c.py
8254@@ -0,0 +1,736 @@
8255+#!/usr/bin/env python
8256+# encoding: utf-8
8257+# Thomas Nagy, 2005-2008 (ita)
8258+
8259+"""
8260+c/c++ configuration routines
8261+"""
8262+
8263+import os, imp, sys, shlex, shutil
8264+from Utils import md5
8265+import Build, Utils, Configure, Task, Options, Logs, TaskGen
8266+from Constants import *
8267+from Configure import conf, conftest
8268+
8269+cfg_ver = {
8270+ 'atleast-version': '>=',
8271+ 'exact-version': '==',
8272+ 'max-version': '<=',
8273+}
8274+
8275+SNIP1 = '''
8276+ int main() {
8277+ void *p;
8278+ p=(void*)(%s);
8279+ return 0;
8280+}
8281+'''
8282+
8283+SNIP2 = '''
8284+int main() {
8285+ if ((%(type_name)s *) 0) return 0;
8286+ if (sizeof (%(type_name)s)) return 0;
8287+}
8288+'''
8289+
8290+SNIP3 = '''
8291+int main() {
8292+ return 0;
8293+}
8294+'''
8295+
8296+def parse_flags(line, uselib, env):
8297+ """pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/"""
8298+
8299+ lst = shlex.split(line)
8300+ while lst:
8301+ x = lst.pop(0)
8302+ st = x[:2]
8303+ ot = x[2:]
8304+ app = env.append_value
8305+ if st == '-I' or st == '/I':
8306+ if not ot: ot = lst.pop(0)
8307+ app('CPPPATH_' + uselib, ot)
8308+ elif st == '-D':
8309+ if not ot: ot = lst.pop(0)
8310+ app('CXXDEFINES_' + uselib, ot)
8311+ app('CCDEFINES_' + uselib, ot)
8312+ elif st == '-l':
8313+ if not ot: ot = lst.pop(0)
8314+ app('LIB_' + uselib, ot)
8315+ elif st == '-L':
8316+ if not ot: ot = lst.pop(0)
8317+ app('LIBPATH_' + uselib, ot)
8318+ elif x == '-pthread' or x.startswith('+'):
8319+ app('CCFLAGS_' + uselib, x)
8320+ app('CXXFLAGS_' + uselib, x)
8321+ app('LINKFLAGS_' + uselib, x)
8322+ elif x == '-framework':
8323+ app('FRAMEWORK_' + uselib, lst.pop(0))
8324+ elif x.startswith('-F'):
8325+ app('FRAMEWORKPATH_' + uselib, x[2:])
8326+ elif x.startswith('-std'):
8327+ app('CCFLAGS_' + uselib, x)
8328+ app('CXXFLAGS_' + uselib, x)
8329+ app('LINKFLAGS_' + uselib, x)
8330+ elif x.startswith('-Wl'):
8331+ app('LINKFLAGS_' + uselib, x)
8332+ elif x.startswith('-m') or x.startswith('-f'):
8333+ app('CCFLAGS_' + uselib, x)
8334+ app('CXXFLAGS_' + uselib, x)
8335+
8336+@conf
8337+def ret_msg(self, f, kw):
8338+ """execute a function, when provided"""
8339+ if isinstance(f, str):
8340+ return f
8341+ return f(kw)
8342+
8343+@conf
8344+def validate_cfg(self, kw):
8345+ if not 'path' in kw:
8346+ kw['path'] = 'pkg-config --errors-to-stdout --print-errors'
8347+
8348+ # pkg-config version
8349+ if 'atleast_pkgconfig_version' in kw:
8350+ if not 'msg' in kw:
8351+ kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version']
8352+ return
8353+
8354+ # pkg-config --modversion
8355+ if 'modversion' in kw:
8356+ return
8357+
8358+ if 'variables' in kw:
8359+ if not 'msg' in kw:
8360+ kw['msg'] = 'Checking for %s variables' % kw['package']
8361+ return
8362+
8363+ # checking for the version of a module, for the moment, one thing at a time
8364+ for x in cfg_ver.keys():
8365+ y = x.replace('-', '_')
8366+ if y in kw:
8367+ if not 'package' in kw:
8368+ raise ValueError('%s requires a package' % x)
8369+
8370+ if not 'msg' in kw:
8371+ kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y])
8372+ return
8373+
8374+ if not 'msg' in kw:
8375+ kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
8376+ if not 'okmsg' in kw:
8377+ kw['okmsg'] = 'yes'
8378+ if not 'errmsg' in kw:
8379+ kw['errmsg'] = 'not found'
8380+
8381+@conf
8382+def cmd_and_log(self, cmd, kw):
8383+ Logs.debug('runner: %s\n' % cmd)
8384+ if self.log:
8385+ self.log.write('%s\n' % cmd)
8386+
8387+ try:
8388+ p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
8389+ (out, err) = p.communicate()
8390+ except OSError, e:
8391+ self.log.write('error %r' % e)
8392+ self.fatal(str(e))
8393+
8394+ # placeholder, don't touch
8395+ out = str(out)
8396+ err = str(err)
8397+
8398+ if self.log:
8399+ self.log.write(out)
8400+ self.log.write(err)
8401+
8402+ if p.returncode:
8403+ if not kw.get('errmsg', ''):
8404+ if kw.get('mandatory', False):
8405+ kw['errmsg'] = out.strip()
8406+ else:
8407+ kw['errmsg'] = 'no'
8408+ self.fatal('fail')
8409+ return out
8410+
8411+@conf
8412+def exec_cfg(self, kw):
8413+
8414+ # pkg-config version
8415+ if 'atleast_pkgconfig_version' in kw:
8416+ cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
8417+ self.cmd_and_log(cmd, kw)
8418+ if not 'okmsg' in kw:
8419+ kw['okmsg'] = 'yes'
8420+ return
8421+
8422+ # checking for the version of a module
8423+ for x in cfg_ver:
8424+ y = x.replace('-', '_')
8425+ if y in kw:
8426+ self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
8427+ if not 'okmsg' in kw:
8428+ kw['okmsg'] = 'yes'
8429+ self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
8430+ break
8431+
8432+ # retrieving the version of a module
8433+ if 'modversion' in kw:
8434+ version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip()
8435+ self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
8436+ return version
8437+
8438+ # retrieving variables of a module
8439+ if 'variables' in kw:
8440+ env = kw.get('env', self.env)
8441+ uselib = kw.get('uselib_store', kw['package'].upper())
8442+ vars = Utils.to_list(kw['variables'])
8443+ for v in vars:
8444+ val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
8445+ var = '%s_%s' % (uselib, v)
8446+ env[var] = val
8447+ if not 'okmsg' in kw:
8448+ kw['okmsg'] = 'yes'
8449+ return
8450+
8451+ lst = [kw['path']]
8452+
8453+
8454+ defi = kw.get('define_variable', None)
8455+ if not defi:
8456+ defi = self.env.PKG_CONFIG_DEFINES or {}
8457+ for key, val in defi.iteritems():
8458+ lst.append('--define-variable=%s=%s' % (key, val))
8459+
8460+ lst.append(kw.get('args', ''))
8461+ lst.append(kw['package'])
8462+
8463+ # so we assume the command-line will output flags to be parsed afterwards
8464+ cmd = ' '.join(lst)
8465+ ret = self.cmd_and_log(cmd, kw)
8466+ if not 'okmsg' in kw:
8467+ kw['okmsg'] = 'yes'
8468+
8469+ self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
8470+ parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
8471+ return ret
8472+
8473+@conf
8474+def check_cfg(self, *k, **kw):
8475+ """
8476+ for pkg-config mostly, but also all the -config tools
8477+ conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
8478+ conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
8479+ """
8480+
8481+ self.validate_cfg(kw)
8482+ if 'msg' in kw:
8483+ self.check_message_1(kw['msg'])
8484+ ret = None
8485+ try:
8486+ ret = self.exec_cfg(kw)
8487+ except Configure.ConfigurationError, e:
8488+ if 'errmsg' in kw:
8489+ self.check_message_2(kw['errmsg'], 'YELLOW')
8490+ if 'mandatory' in kw and kw['mandatory']:
8491+ if Logs.verbose > 1:
8492+ raise
8493+ else:
8494+ self.fatal('the configuration failed (see %r)' % self.log.name)
8495+ else:
8496+ kw['success'] = ret
8497+ if 'okmsg' in kw:
8498+ self.check_message_2(self.ret_msg(kw['okmsg'], kw))
8499+
8500+ return ret
8501+
8502+# the idea is the following: now that we are certain
8503+# that all the code here is only for c or c++, it is
8504+# easy to put all the logic in one function
8505+#
8506+# this should prevent code duplication (ita)
8507+
8508+# env: an optional environment (modified -> provide a copy)
8509+# compiler: cc or cxx - it tries to guess what is best
8510+# type: cprogram, cshlib, cstaticlib
8511+# code: a c code to execute
8512+# uselib_store: where to add the variables
8513+# uselib: parameters to use for building
8514+# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */
8515+# execute: True or False - will return the result of the execution
8516+
8517+@conf
8518+def validate_c(self, kw):
8519+ """validate the parameters for the test method"""
8520+
8521+ if not 'env' in kw:
8522+ kw['env'] = self.env.copy()
8523+
8524+ env = kw['env']
8525+ if not 'compiler' in kw:
8526+ kw['compiler'] = 'cc'
8527+ if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
8528+ kw['compiler'] = 'cxx'
8529+ if not self.env['CXX']:
8530+ self.fatal('a c++ compiler is required')
8531+ else:
8532+ if not self.env['CC']:
8533+ self.fatal('a c compiler is required')
8534+
8535+ if not 'type' in kw:
8536+ kw['type'] = 'cprogram'
8537+
8538+ assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
8539+
8540+
8541+ #if kw['type'] != 'program' and kw.get('execute', 0):
8542+ # raise ValueError, 'can only execute programs'
8543+
8544+ def to_header(dct):
8545+ if 'header_name' in dct:
8546+ dct = Utils.to_list(dct['header_name'])
8547+ return ''.join(['#include <%s>\n' % x for x in dct])
8548+ return ''
8549+
8550+ # set the file name
8551+ if not 'compile_mode' in kw:
8552+ kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
8553+
8554+ if not 'compile_filename' in kw:
8555+ kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
8556+
8557+ #OSX
8558+ if 'framework_name' in kw:
8559+ try: TaskGen.task_gen.create_task_macapp
8560+ except AttributeError: self.fatal('frameworks require the osx tool')
8561+
8562+ fwkname = kw['framework_name']
8563+ if not 'uselib_store' in kw:
8564+ kw['uselib_store'] = fwkname.upper()
8565+
8566+ if not kw.get('no_header', False):
8567+ if not 'header_name' in kw:
8568+ kw['header_name'] = []
8569+ fwk = '%s/%s.h' % (fwkname, fwkname)
8570+ if kw.get('remove_dot_h', None):
8571+ fwk = fwk[:-2]
8572+ kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
8573+
8574+ kw['msg'] = 'Checking for framework %s' % fwkname
8575+ kw['framework'] = fwkname
8576+ #kw['frameworkpath'] = set it yourself
8577+
8578+ if 'function_name' in kw:
8579+ fu = kw['function_name']
8580+ if not 'msg' in kw:
8581+ kw['msg'] = 'Checking for function %s' % fu
8582+ kw['code'] = to_header(kw) + SNIP1 % fu
8583+ if not 'uselib_store' in kw:
8584+ kw['uselib_store'] = fu.upper()
8585+ if not 'define_name' in kw:
8586+ kw['define_name'] = self.have_define(fu)
8587+
8588+ elif 'type_name' in kw:
8589+ tu = kw['type_name']
8590+ if not 'msg' in kw:
8591+ kw['msg'] = 'Checking for type %s' % tu
8592+ if not 'header_name' in kw:
8593+ kw['header_name'] = 'stdint.h'
8594+ kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
8595+ if not 'define_name' in kw:
8596+ kw['define_name'] = self.have_define(tu.upper())
8597+
8598+ elif 'header_name' in kw:
8599+ if not 'msg' in kw:
8600+ kw['msg'] = 'Checking for header %s' % kw['header_name']
8601+
8602+ l = Utils.to_list(kw['header_name'])
8603+ assert len(l)>0, 'list of headers in header_name is empty'
8604+
8605+ kw['code'] = to_header(kw) + SNIP3
8606+
8607+ if not 'uselib_store' in kw:
8608+ kw['uselib_store'] = l[0].upper()
8609+
8610+ if not 'define_name' in kw:
8611+ kw['define_name'] = self.have_define(l[0])
8612+
8613+ if 'lib' in kw:
8614+ if not 'msg' in kw:
8615+ kw['msg'] = 'Checking for library %s' % kw['lib']
8616+ if not 'uselib_store' in kw:
8617+ kw['uselib_store'] = kw['lib'].upper()
8618+
8619+ if 'staticlib' in kw:
8620+ if not 'msg' in kw:
8621+ kw['msg'] = 'Checking for static library %s' % kw['staticlib']
8622+ if not 'uselib_store' in kw:
8623+ kw['uselib_store'] = kw['staticlib'].upper()
8624+
8625+ if 'fragment' in kw:
8626+ # an additional code fragment may be provided to replace the predefined code
8627+ # in custom headers
8628+ kw['code'] = kw['fragment']
8629+ if not 'msg' in kw:
8630+ kw['msg'] = 'Checking for custom code'
8631+ if not 'errmsg' in kw:
8632+ kw['errmsg'] = 'no'
8633+
8634+ for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
8635+ if flagsname in kw:
8636+ if not 'msg' in kw:
8637+ kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
8638+ if not 'errmsg' in kw:
8639+ kw['errmsg'] = 'no'
8640+
8641+ if not 'execute' in kw:
8642+ kw['execute'] = False
8643+
8644+ if not 'errmsg' in kw:
8645+ kw['errmsg'] = 'not found'
8646+
8647+ if not 'okmsg' in kw:
8648+ kw['okmsg'] = 'yes'
8649+
8650+ if not 'code' in kw:
8651+ kw['code'] = SNIP3
8652+
8653+ if not kw.get('success'): kw['success'] = None
8654+
8655+ assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
8656+
8657+@conf
8658+def post_check(self, *k, **kw):
8659+ "set the variables after a test was run successfully"
8660+
8661+ is_success = False
8662+ if kw['execute']:
8663+ if kw['success'] is not None:
8664+ is_success = True
8665+ else:
8666+ is_success = (kw['success'] == 0)
8667+
8668+ if 'define_name' in kw:
8669+ if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
8670+ if kw['execute']:
8671+ key = kw['success']
8672+ if isinstance(key, str):
8673+ if key:
8674+ self.define(kw['define_name'], key, quote=kw.get('quote', 1))
8675+ else:
8676+ self.define_cond(kw['define_name'], True)
8677+ else:
8678+ self.define_cond(kw['define_name'], False)
8679+ else:
8680+ self.define_cond(kw['define_name'], is_success)
8681+
8682+ if is_success and 'uselib_store' in kw:
8683+ import cc, cxx
8684+ for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
8685+ lk = k.lower()
8686+ # inconsistency: includes -> CPPPATH
8687+ if k == 'CPPPATH': lk = 'includes'
8688+ if k == 'CXXDEFINES': lk = 'defines'
8689+ if k == 'CCDEFINES': lk = 'defines'
8690+ if lk in kw:
8691+ val = kw[lk]
8692+ # remove trailing slash
8693+ if isinstance(val, str):
8694+ val = val.rstrip(os.path.sep)
8695+ self.env.append_unique(k + '_' + kw['uselib_store'], val)
8696+
8697+@conf
8698+def check(self, *k, **kw):
8699+ # so this will be the generic function
8700+ # it will be safer to use check_cxx or check_cc
8701+ self.validate_c(kw)
8702+ self.check_message_1(kw['msg'])
8703+ ret = None
8704+ try:
8705+ ret = self.run_c_code(*k, **kw)
8706+ except Configure.ConfigurationError, e:
8707+ self.check_message_2(kw['errmsg'], 'YELLOW')
8708+ if 'mandatory' in kw and kw['mandatory']:
8709+ if Logs.verbose > 1:
8710+ raise
8711+ else:
8712+ self.fatal('the configuration failed (see %r)' % self.log.name)
8713+ else:
8714+ kw['success'] = ret
8715+ self.check_message_2(self.ret_msg(kw['okmsg'], kw))
8716+
8717+ self.post_check(*k, **kw)
8718+ if not kw.get('execute', False):
8719+ return ret == 0
8720+ return ret
8721+
8722+@conf
8723+def run_c_code(self, *k, **kw):
8724+ test_f_name = kw['compile_filename']
8725+
8726+ k = 0
8727+ while k < 10000:
8728+ # make certain to use a fresh folder - necessary for win32
8729+ dir = os.path.join(self.blddir, '.conf_check_%d' % k)
8730+
8731+ # if the folder already exists, remove it
8732+ try:
8733+ shutil.rmtree(dir)
8734+ except OSError:
8735+ pass
8736+
8737+ try:
8738+ os.stat(dir)
8739+ except OSError:
8740+ break
8741+
8742+ k += 1
8743+
8744+ try:
8745+ os.makedirs(dir)
8746+ except:
8747+ self.fatal('cannot create a configuration test folder %r' % dir)
8748+
8749+ try:
8750+ os.stat(dir)
8751+ except:
8752+ self.fatal('cannot use the configuration test folder %r' % dir)
8753+
8754+ bdir = os.path.join(dir, 'testbuild')
8755+
8756+ if not os.path.exists(bdir):
8757+ os.makedirs(bdir)
8758+
8759+ env = kw['env']
8760+
8761+ dest = open(os.path.join(dir, test_f_name), 'w')
8762+ dest.write(kw['code'])
8763+ dest.close()
8764+
8765+ back = os.path.abspath('.')
8766+
8767+ bld = Build.BuildContext()
8768+ bld.log = self.log
8769+ bld.all_envs.update(self.all_envs)
8770+ bld.all_envs['default'] = env
8771+ bld.lst_variants = bld.all_envs.keys()
8772+ bld.load_dirs(dir, bdir)
8773+
8774+ os.chdir(dir)
8775+
8776+ bld.rescan(bld.srcnode)
8777+
8778+ if not 'features' in kw:
8779+ # conf.check(features='cc cprogram pyext', ...)
8780+ kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
8781+
8782+ o = bld(features=kw['features'], source=test_f_name, target='testprog')
8783+
8784+ for k, v in kw.iteritems():
8785+ setattr(o, k, v)
8786+
8787+ self.log.write("==>\n%s\n<==\n" % kw['code'])
8788+
8789+ # compile the program
8790+ try:
8791+ bld.compile()
8792+ except Utils.WafError:
8793+ ret = Utils.ex_stack()
8794+ else:
8795+ ret = 0
8796+
8797+ # chdir before returning
8798+ os.chdir(back)
8799+
8800+ if ret:
8801+ self.log.write('command returned %r' % ret)
8802+ self.fatal(str(ret))
8803+
8804+ # if we need to run the program, try to get its result
8805+ # keep the name of the program to execute
8806+ if kw['execute']:
8807+ lastprog = o.link_task.outputs[0].abspath(env)
8808+
8809+ args = Utils.to_list(kw.get('exec_args', []))
8810+ proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
8811+ (out, err) = proc.communicate()
8812+ w = self.log.write
8813+ w(str(out))
8814+ w('\n')
8815+ w(str(err))
8816+ w('\n')
8817+ w('returncode %r' % proc.returncode)
8818+ w('\n')
8819+ if proc.returncode:
8820+ self.fatal(Utils.ex_stack())
8821+ ret = out
8822+
8823+ return ret
8824+
8825+@conf
8826+def check_cxx(self, *k, **kw):
8827+ kw['compiler'] = 'cxx'
8828+ return self.check(*k, **kw)
8829+
8830+@conf
8831+def check_cc(self, *k, **kw):
8832+ kw['compiler'] = 'cc'
8833+ return self.check(*k, **kw)
8834+
8835+@conf
8836+def define(self, define, value, quote=1):
8837+ """store a single define and its state into an internal list for later
8838+ writing to a config header file. Value can only be
8839+ a string or int; other types not supported. String
8840+ values will appear properly quoted in the generated
8841+ header file."""
8842+ assert define and isinstance(define, str)
8843+
8844+ # ordered_dict is for writing the configuration header in order
8845+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8846+
8847+ # the user forgot to tell if the value is quoted or not
8848+ if isinstance(value, str):
8849+ if quote:
8850+ tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
8851+ else:
8852+ tbl[define] = value
8853+ elif isinstance(value, int):
8854+ tbl[define] = value
8855+ else:
8856+ raise TypeError('define %r -> %r must be a string or an int' % (define, value))
8857+
8858+ # add later to make reconfiguring faster
8859+ self.env[DEFINES] = tbl
8860+ self.env[define] = value # <- not certain this is necessary
8861+
8862+@conf
8863+def undefine(self, define):
8864+ """store a single define and its state into an internal list
8865+ for later writing to a config header file"""
8866+ assert define and isinstance(define, str)
8867+
8868+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8869+
8870+ value = UNDEFINED
8871+ tbl[define] = value
8872+
8873+ # add later to make reconfiguring faster
8874+ self.env[DEFINES] = tbl
8875+ self.env[define] = value
8876+
8877+@conf
8878+def define_cond(self, name, value):
8879+ """Conditionally define a name.
8880+ Formally equivalent to: if value: define(name, 1) else: undefine(name)"""
8881+ if value:
8882+ self.define(name, 1)
8883+ else:
8884+ self.undefine(name)
8885+
8886+@conf
8887+def is_defined(self, key):
8888+ defines = self.env[DEFINES]
8889+ if not defines:
8890+ return False
8891+ try:
8892+ value = defines[key]
8893+ except KeyError:
8894+ return False
8895+ else:
8896+ return value != UNDEFINED
8897+
8898+@conf
8899+def get_define(self, define):
8900+ "get the value of a previously stored define"
8901+ try: return self.env[DEFINES][define]
8902+ except KeyError: return None
8903+
8904+@conf
8905+def have_define(self, name):
8906+ "prefix the define with 'HAVE_' and make sure it has valid characters."
8907+ return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name)
8908+
8909+@conf
8910+def write_config_header(self, configfile='', env='', guard='', top=False):
8911+ "save the defines into a file"
8912+ if not configfile: configfile = WAF_CONFIG_H
8913+ waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
8914+
8915+ # configfile -> absolute path
8916+ # there is a good reason to concatenate first and to split afterwards
8917+ if not env: env = self.env
8918+ if top:
8919+ diff = ''
8920+ else:
8921+ diff = Utils.diff_path(self.srcdir, self.curdir)
8922+ full = os.sep.join([self.blddir, env.variant(), diff, configfile])
8923+ full = os.path.normpath(full)
8924+ (dir, base) = os.path.split(full)
8925+
8926+ try: os.makedirs(dir)
8927+ except: pass
8928+
8929+ dest = open(full, 'w')
8930+ dest.write('/* Configuration header created by Waf - do not edit */\n')
8931+ dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard))
8932+
8933+ dest.write(self.get_config_header())
8934+
8935+ # config files are not removed on "waf clean"
8936+ env.append_unique(CFG_FILES, os.path.join(diff, configfile))
8937+
8938+ dest.write('\n#endif /* %s */\n' % waf_guard)
8939+ dest.close()
8940+
8941+@conf
8942+def get_config_header(self):
8943+ """Fill-in the contents of the config header. Override when you need to write your own config header."""
8944+ config_header = []
8945+
8946+ tbl = self.env[DEFINES] or Utils.ordered_dict()
8947+ for key in tbl.allkeys:
8948+ value = tbl[key]
8949+ if value is None:
8950+ config_header.append('#define %s' % key)
8951+ elif value is UNDEFINED:
8952+ config_header.append('/* #undef %s */' % key)
8953+ else:
8954+ config_header.append('#define %s %s' % (key, value))
8955+ return "\n".join(config_header)
8956+
8957+@conftest
8958+def find_cpp(conf):
8959+ v = conf.env
8960+ cpp = []
8961+ if v['CPP']: cpp = v['CPP']
8962+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
8963+ if not cpp: cpp = conf.find_program('cpp', var='CPP')
8964+ #if not cpp: cpp = v['CC']
8965+ #if not cpp: cpp = v['CXX']
8966+ v['CPP'] = cpp
8967+
8968+@conftest
8969+def cc_add_flags(conf):
8970+ conf.add_os_flags('CFLAGS', 'CCFLAGS')
8971+ conf.add_os_flags('CPPFLAGS')
8972+
8973+@conftest
8974+def cxx_add_flags(conf):
8975+ conf.add_os_flags('CXXFLAGS')
8976+ conf.add_os_flags('CPPFLAGS')
8977+
8978+@conftest
8979+def link_add_flags(conf):
8980+ conf.add_os_flags('LINKFLAGS')
8981+ conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
8982+
8983+@conftest
8984+def cc_load_tools(conf):
8985+ conf.check_tool('cc')
8986+
8987+@conftest
8988+def cxx_load_tools(conf):
8989+ conf.check_tool('cxx')
8990+
8991diff --git a/buildtools/wafadmin/Tools/cs.py b/buildtools/wafadmin/Tools/cs.py
8992new file mode 100644
8993index 0000000..4354485
8994--- /dev/null
8995+++ b/buildtools/wafadmin/Tools/cs.py
8996@@ -0,0 +1,68 @@
8997+#!/usr/bin/env python
8998+# encoding: utf-8
8999+# Thomas Nagy, 2006 (ita)
9000+
9001+"C# support"
9002+
9003+import TaskGen, Utils, Task, Options
9004+from Logs import error
9005+from TaskGen import before, after, taskgen, feature
9006+
9007+flag_vars= ['FLAGS', 'ASSEMBLIES']
9008+
9009+@feature('cs')
9010+def init_cs(self):
9011+ Utils.def_attrs(self,
9012+ flags = '',
9013+ assemblies = '',
9014+ resources = '',
9015+ uselib = '')
9016+
9017+@feature('cs')
9018+@after('init_cs')
9019+def apply_uselib_cs(self):
9020+ if not self.uselib:
9021+ return
9022+ global flag_vars
9023+ for var in self.to_list(self.uselib):
9024+ for v in self.flag_vars:
9025+ val = self.env[v+'_'+var]
9026+ if val: self.env.append_value(v, val)
9027+
9028+@feature('cs')
9029+@after('apply_uselib_cs')
9030+@before('apply_core')
9031+def apply_cs(self):
9032+ try: self.meths.remove('apply_core')
9033+ except ValueError: pass
9034+
9035+ # process the flags for the assemblies
9036+ for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']:
9037+ self.env.append_unique('_ASSEMBLIES', '/r:'+i)
9038+
9039+ # process the flags for the resources
9040+ for i in self.to_list(self.resources):
9041+ self.env.append_unique('_RESOURCES', '/resource:'+i)
9042+
9043+ # what kind of assembly are we generating?
9044+ self.env['_TYPE'] = getattr(self, 'type', 'exe')
9045+
9046+ # additional flags
9047+ self.env.append_unique('_FLAGS', self.to_list(self.flags))
9048+ self.env.append_unique('_FLAGS', self.env.FLAGS)
9049+
9050+ # process the sources
9051+ nodes = [self.path.find_resource(i) for i in self.to_list(self.source)]
9052+ self.create_task('mcs', nodes, self.path.find_or_declare(self.target))
9053+
9054+Task.simple_task_type('mcs', '${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW')
9055+
9056+def detect(conf):
9057+ csc = getattr(Options.options, 'cscbinary', None)
9058+ if csc:
9059+ conf.env.MCS = csc
9060+ conf.find_program(['gmcs', 'mcs'], var='MCS')
9061+
9062+def set_options(opt):
9063+ opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
9064+
9065diff --git a/buildtools/wafadmin/Tools/cxx.py b/buildtools/wafadmin/Tools/cxx.py
9066new file mode 100644
9067index 0000000..719b821
9068--- /dev/null
9069+++ b/buildtools/wafadmin/Tools/cxx.py
9070@@ -0,0 +1,104 @@
9071+#!/usr/bin/env python
9072+# encoding: utf-8
9073+# Thomas Nagy, 2005 (ita)
9074+
9075+"Base for c++ programs and libraries"
9076+
9077+import TaskGen, Task, Utils
9078+from Logs import debug
9079+import ccroot # <- do not remove
9080+from TaskGen import feature, before, extension, after
9081+
9082+g_cxx_flag_vars = [
9083+'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
9084+'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
9085+'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES']
9086+"main cpp variables"
9087+
9088+EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++']
9089+
9090+g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS']
9091+
9092+# TODO remove in waf 1.6
9093+class cxx_taskgen(ccroot.ccroot_abstract):
9094+ pass
9095+
9096+@feature('cxx')
9097+@before('apply_type_vars')
9098+@after('default_cc')
9099+def init_cxx(self):
9100+ if not 'cc' in self.features:
9101+ self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx']
9102+
9103+ self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars)
9104+ self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars)
9105+
9106+ if not self.env['CXX_NAME']:
9107+ raise Utils.WafError("At least one compiler (g++, ..) must be selected")
9108+
9109+@feature('cxx')
9110+@after('apply_incpaths')
9111+def apply_obj_vars_cxx(self):
9112+ """after apply_incpaths for INC_PATHS"""
9113+ env = self.env
9114+ app = env.append_unique
9115+ cxxpath_st = env['CPPPATH_ST']
9116+
9117+ # local flags come first
9118+ # set the user-defined includes paths
9119+ for i in env['INC_PATHS']:
9120+ app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env))
9121+ app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env))
9122+
9123+ # set the library include paths
9124+ for i in env['CPPPATH']:
9125+ app('_CXXINCFLAGS', cxxpath_st % i)
9126+
9127+@feature('cxx')
9128+@after('apply_lib_vars')
9129+def apply_defines_cxx(self):
9130+ """after uselib is set for CXXDEFINES"""
9131+ self.defines = getattr(self, 'defines', [])
9132+ lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES'])
9133+ milst = []
9134+
9135+ # now process the local defines
9136+ for defi in lst:
9137+ if not defi in milst:
9138+ milst.append(defi)
9139+
9140+ # CXXDEFINES_USELIB
9141+ libs = self.to_list(self.uselib)
9142+ for l in libs:
9143+ val = self.env['CXXDEFINES_'+l]
9144+ if val: milst += self.to_list(val)
9145+
9146+ self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
9147+ y = self.env['CXXDEFINES_ST']
9148+ self.env.append_unique('_CXXDEFFLAGS', [y%x for x in milst])
9149+
9150+@extension(EXT_CXX)
9151+def cxx_hook(self, node):
9152+ # create the compilation task: cpp or cc
9153+ if getattr(self, 'obj_ext', None):
9154+ obj_ext = self.obj_ext
9155+ else:
9156+ obj_ext = '_%d.o' % self.idx
9157+
9158+ task = self.create_task('cxx', node, node.change_ext(obj_ext))
9159+ try:
9160+ self.compiled_tasks.append(task)
9161+ except AttributeError:
9162+ raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self))
9163+ return task
9164+
9165+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
9166+cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False)
9167+cls.scan = ccroot.scan
9168+cls.vars.append('CXXDEPS')
9169+
9170+link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
9171+cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
9172+cls.maxjobs = 1
9173+cls.install = Utils.nada
9174+
9175diff --git a/buildtools/wafadmin/Tools/d.py b/buildtools/wafadmin/Tools/d.py
9176new file mode 100644
9177index 0000000..1a22821
9178--- /dev/null
9179+++ b/buildtools/wafadmin/Tools/d.py
9180@@ -0,0 +1,535 @@
9181+#!/usr/bin/env python
9182+# encoding: utf-8
9183+# Carlos Rafael Giani, 2007 (dv)
9184+# Thomas Nagy, 2007-2008 (ita)
9185+
9186+import os, sys, re, optparse
9187+import ccroot # <- leave this
9188+import TaskGen, Utils, Task, Configure, Logs, Build
9189+from Logs import debug, error
9190+from TaskGen import taskgen, feature, after, before, extension
9191+from Configure import conftest
9192+
9193+EXT_D = ['.d', '.di', '.D']
9194+D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods
9195+
9196+DLIB = """
9197+version(D_Version2) {
9198+ import std.stdio;
9199+ int main() {
9200+ writefln("phobos2");
9201+ return 0;
9202+ }
9203+} else {
9204+ version(Tango) {
9205+ import tango.stdc.stdio;
9206+ int main() {
9207+ printf("tango");
9208+ return 0;
9209+ }
9210+ } else {
9211+ import std.stdio;
9212+ int main() {
9213+ writefln("phobos1");
9214+ return 0;
9215+ }
9216+ }
9217+}
9218+"""
9219+
9220+def filter_comments(filename):
9221+ txt = Utils.readf(filename)
9222+ i = 0
9223+ buf = []
9224+ max = len(txt)
9225+ begin = 0
9226+ while i < max:
9227+ c = txt[i]
9228+ if c == '"' or c == "'": # skip a string or character literal
9229+ buf.append(txt[begin:i])
9230+ delim = c
9231+ i += 1
9232+ while i < max:
9233+ c = txt[i]
9234+ if c == delim: break
9235+ elif c == '\\': # skip the character following backslash
9236+ i += 1
9237+ i += 1
9238+ i += 1
9239+ begin = i
9240+ elif c == '/': # try to replace a comment with whitespace
9241+ buf.append(txt[begin:i])
9242+ i += 1
9243+ if i == max: break
9244+ c = txt[i]
9245+ if c == '+': # eat nesting /+ +/ comment
9246+ i += 1
9247+ nesting = 1
9248+ c = None
9249+ while i < max:
9250+ prev = c
9251+ c = txt[i]
9252+ if prev == '/' and c == '+':
9253+ nesting += 1
9254+ c = None
9255+ elif prev == '+' and c == '/':
9256+ nesting -= 1
9257+ if nesting == 0: break
9258+ c = None
9259+ i += 1
9260+ elif c == '*': # eat /* */ comment
9261+ i += 1
9262+ c = None
9263+ while i < max:
9264+ prev = c
9265+ c = txt[i]
9266+ if prev == '*' and c == '/': break
9267+ i += 1
9268+ elif c == '/': # eat // comment
9269+ i += 1
9270+ while i < max and txt[i] != '\n':
9271+ i += 1
9272+ else: # no comment
9273+ begin = i - 1
9274+ continue
9275+ i += 1
9276+ begin = i
9277+ buf.append(' ')
9278+ else:
9279+ i += 1
9280+ buf.append(txt[begin:])
9281+ return buf
9282+
9283+class d_parser(object):
9284+ def __init__(self, env, incpaths):
9285+ #self.code = ''
9286+ #self.module = ''
9287+ #self.imports = []
9288+
9289+ self.allnames = []
9290+
9291+ self.re_module = re.compile("module\s+([^;]+)")
9292+ self.re_import = re.compile("import\s+([^;]+)")
9293+ self.re_import_bindings = re.compile("([^:]+):(.*)")
9294+ self.re_import_alias = re.compile("[^=]+=(.+)")
9295+
9296+ self.env = env
9297+
9298+ self.nodes = []
9299+ self.names = []
9300+
9301+ self.incpaths = incpaths
9302+
9303+ def tryfind(self, filename):
9304+ found = 0
9305+ for n in self.incpaths:
9306+ found = n.find_resource(filename.replace('.', '/') + '.d')
9307+ if found:
9308+ self.nodes.append(found)
9309+ self.waiting.append(found)
9310+ break
9311+ if not found:
9312+ if not filename in self.names:
9313+ self.names.append(filename)
9314+
9315+ def get_strings(self, code):
9316+ #self.imports = []
9317+ self.module = ''
9318+ lst = []
9319+
9320+ # get the module name (if present)
9321+
9322+ mod_name = self.re_module.search(code)
9323+ if mod_name:
9324+ self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
9325+
9326+ # go through the code, have a look at all import occurrences
9327+
9328+ # first, lets look at anything beginning with "import" and ending with ";"
9329+ import_iterator = self.re_import.finditer(code)
9330+ if import_iterator:
9331+ for import_match in import_iterator:
9332+ import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
9333+
9334+ # does this end with an import bindings declaration?
9335+ # (import bindings always terminate the list of imports)
9336+ bindings_match = self.re_import_bindings.match(import_match_str)
9337+ if bindings_match:
9338+ import_match_str = bindings_match.group(1)
9339+ # if so, extract the part before the ":" (since the module declaration(s) is/are located there)
9340+
9341+ # split the matching string into a bunch of strings, separated by a comma
9342+ matches = import_match_str.split(',')
9343+
9344+ for match in matches:
9345+ alias_match = self.re_import_alias.match(match)
9346+ if alias_match:
9347+ # is this an alias declaration? (alias = module name) if so, extract the module name
9348+ match = alias_match.group(1)
9349+
9350+ lst.append(match)
9351+ return lst
9352+
9353+ def start(self, node):
9354+ self.waiting = [node]
9355+ # while the stack is not empty, add the dependencies
9356+ while self.waiting:
9357+ nd = self.waiting.pop(0)
9358+ self.iter(nd)
9359+
9360+ def iter(self, node):
9361+ path = node.abspath(self.env) # obtain the absolute path
9362+ code = "".join(filter_comments(path)) # read the file and filter the comments
9363+ names = self.get_strings(code) # obtain the import strings
9364+ for x in names:
9365+ # optimization
9366+ if x in self.allnames: continue
9367+ self.allnames.append(x)
9368+
9369+ # for each name, see if it is like a node or not
9370+ self.tryfind(x)
9371+
9372+def scan(self):
9373+ "look for .d/.di the .d source need"
9374+ env = self.env
9375+ gruik = d_parser(env, env['INC_PATHS'])
9376+ gruik.start(self.inputs[0])
9377+
9378+ if Logs.verbose:
9379+ debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names)))
9380+ #debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps')
9381+ return (gruik.nodes, gruik.names)
9382+
9383+def get_target_name(self):
9384+ "for d programs and libs"
9385+ v = self.env
9386+ tp = 'program'
9387+ for x in self.features:
9388+ if x in ['dshlib', 'dstaticlib']:
9389+ tp = x.lstrip('d')
9390+ return v['D_%s_PATTERN' % tp] % self.target
9391+
9392+d_params = {
9393+'dflags': '',
9394+'importpaths':'',
9395+'libs':'',
9396+'libpaths':'',
9397+'generate_headers':False,
9398+}
9399+
9400+@feature('d')
9401+@before('apply_type_vars')
9402+def init_d(self):
9403+ for x in d_params:
9404+ setattr(self, x, getattr(self, x, d_params[x]))
9405+
9406+class d_taskgen(TaskGen.task_gen):
9407+ def __init__(self, *k, **kw):
9408+ TaskGen.task_gen.__init__(self, *k, **kw)
9409+
9410+ # COMPAT
9411+ if len(k) > 1:
9412+ self.features.append('d' + k[1])
9413+
9414+# okay, we borrow a few methods from ccroot
9415+TaskGen.bind_feature('d', D_METHS)
9416+
9417+@feature('d')
9418+@before('apply_d_libs')
9419+def init_d(self):
9420+ Utils.def_attrs(self,
9421+ dflags='',
9422+ importpaths='',
9423+ libs='',
9424+ libpaths='',
9425+ uselib='',
9426+ uselib_local='',
9427+ generate_headers=False, # set to true if you want .di files as well as .o
9428+ compiled_tasks=[],
9429+ add_objects=[],
9430+ link_task=None)
9431+
9432+@feature('d')
9433+@after('apply_d_link', 'init_d')
9434+@before('apply_vnum', 'apply_d_vars')
9435+def apply_d_libs(self):
9436+ """after apply_link because of 'link_task'
9437+ after default_cc because of the attribute 'uselib'"""
9438+ env = self.env
9439+
9440+ # 1. the case of the libs defined in the project (visit ancestors first)
9441+ # the ancestors external libraries (uselib) will be prepended
9442+ self.uselib = self.to_list(self.uselib)
9443+ names = self.to_list(self.uselib_local)
9444+
9445+ seen = set([])
9446+ tmp = Utils.deque(names) # consume a copy of the list of names
9447+ while tmp:
9448+ lib_name = tmp.popleft()
9449+ # visit dependencies only once
9450+ if lib_name in seen:
9451+ continue
9452+
9453+ y = self.name_to_obj(lib_name)
9454+ if not y:
9455+ raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
9456+ y.post()
9457+ seen.add(lib_name)
9458+
9459+ # object has ancestors to process (shared libraries): add them to the end of the list
9460+ if getattr(y, 'uselib_local', None):
9461+ lst = y.to_list(y.uselib_local)
9462+ if 'dshlib' in y.features or 'dprogram' in y.features:
9463+ lst = [x for x in lst if not 'dstaticlib' in self.name_to_obj(x).features]
9464+ tmp.extend(lst)
9465+
9466+ # link task and flags
9467+ if getattr(y, 'link_task', None):
9468+
9469+ link_name = y.target[y.target.rfind(os.sep) + 1:]
9470+ if 'dstaticlib' in y.features or 'dshlib' in y.features:
9471+ env.append_unique('DLINKFLAGS', env.DLIB_ST % link_name)
9472+ env.append_unique('DLINKFLAGS', env.DLIBPATH_ST % y.link_task.outputs[0].parent.bldpath(env))
9473+
9474+ # the order
9475+ self.link_task.set_run_after(y.link_task)
9476+
9477+ # for the recompilation
9478+ dep_nodes = getattr(self.link_task, 'dep_nodes', [])
9479+ self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
9480+
9481+ # add ancestors uselib too - but only propagate those that have no staticlib
9482+ for v in self.to_list(y.uselib):
9483+ if not v in self.uselib:
9484+ self.uselib.insert(0, v)
9485+
9486+ # if the library task generator provides 'export_incdirs', add to the include path
9487+ # the export_incdirs must be a list of paths relative to the other library
9488+ if getattr(y, 'export_incdirs', None):
9489+ for x in self.to_list(y.export_incdirs):
9490+ node = y.path.find_dir(x)
9491+ if not node:
9492+ raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
9493+ self.env.append_unique('INC_PATHS', node)
9494+
9495+@feature('dprogram', 'dshlib', 'dstaticlib')
9496+@after('apply_core')
9497+def apply_d_link(self):
9498+ link = getattr(self, 'link', None)
9499+ if not link:
9500+ if 'dstaticlib' in self.features: link = 'static_link'
9501+ else: link = 'd_link'
9502+
9503+ outputs = [t.outputs[0] for t in self.compiled_tasks]
9504+ self.link_task = self.create_task(link, outputs, self.path.find_or_declare(get_target_name(self)))
9505+
9506+@feature('d')
9507+@after('apply_core')
9508+def apply_d_vars(self):
9509+ env = self.env
9510+ dpath_st = env['DPATH_ST']
9511+ lib_st = env['DLIB_ST']
9512+ libpath_st = env['DLIBPATH_ST']
9513+
9514+ importpaths = self.to_list(self.importpaths)
9515+ libpaths = []
9516+ libs = []
9517+ uselib = self.to_list(self.uselib)
9518+
9519+ for i in uselib:
9520+ if env['DFLAGS_' + i]:
9521+ env.append_unique('DFLAGS', env['DFLAGS_' + i])
9522+
9523+ for x in self.features:
9524+ if not x in ['dprogram', 'dstaticlib', 'dshlib']:
9525+ continue
9526+ x.lstrip('d')
9527+ d_shlib_dflags = env['D_' + x + '_DFLAGS']
9528+ if d_shlib_dflags:
9529+ env.append_unique('DFLAGS', d_shlib_dflags)
9530+
9531+ # add import paths
9532+ for i in uselib:
9533+ if env['DPATH_' + i]:
9534+ for entry in self.to_list(env['DPATH_' + i]):
9535+ if not entry in importpaths:
9536+ importpaths.append(entry)
9537+
9538+ # now process the import paths
9539+ for path in importpaths:
9540+ if os.path.isabs(path):
9541+ env.append_unique('_DIMPORTFLAGS', dpath_st % path)
9542+ else:
9543+ node = self.path.find_dir(path)
9544+ self.env.append_unique('INC_PATHS', node)
9545+ env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env))
9546+ env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env))
9547+
9548+ # add library paths
9549+ for i in uselib:
9550+ if env['LIBPATH_' + i]:
9551+ for entry in self.to_list(env['LIBPATH_' + i]):
9552+ if not entry in libpaths:
9553+ libpaths.append(entry)
9554+ libpaths = self.to_list(self.libpaths) + libpaths
9555+
9556+ # now process the library paths
9557+ # apply same path manipulation as used with import paths
9558+ for path in libpaths:
9559+ if not os.path.isabs(path):
9560+ node = self.path.find_resource(path)
9561+ if not node:
9562+ raise Utils.WafError('could not find libpath %r from %r' % (path, self))
9563+ path = node.abspath(self.env)
9564+
9565+ env.append_unique('DLINKFLAGS', libpath_st % path)
9566+
9567+ # add libraries
9568+ for i in uselib:
9569+ if env['LIB_' + i]:
9570+ for entry in self.to_list(env['LIB_' + i]):
9571+ if not entry in libs:
9572+ libs.append(entry)
9573+ libs.extend(self.to_list(self.libs))
9574+
9575+ # process user flags
9576+ for flag in self.to_list(self.dflags):
9577+ env.append_unique('DFLAGS', flag)
9578+
9579+ # now process the libraries
9580+ for lib in libs:
9581+ env.append_unique('DLINKFLAGS', lib_st % lib)
9582+
9583+ # add linker flags
9584+ for i in uselib:
9585+ dlinkflags = env['DLINKFLAGS_' + i]
9586+ if dlinkflags:
9587+ for linkflag in dlinkflags:
9588+ env.append_unique('DLINKFLAGS', linkflag)
9589+
9590+@feature('dshlib')
9591+@after('apply_d_vars')
9592+def add_shlib_d_flags(self):
9593+ for linkflag in self.env['D_shlib_LINKFLAGS']:
9594+ self.env.append_unique('DLINKFLAGS', linkflag)
9595+
9596+@extension(EXT_D)
9597+def d_hook(self, node):
9598+ # create the compilation task: cpp or cc
9599+ task = self.create_task(self.generate_headers and 'd_with_header' or 'd')
9600+ try: obj_ext = self.obj_ext
9601+ except AttributeError: obj_ext = '_%d.o' % self.idx
9602+
9603+ task.inputs = [node]
9604+ task.outputs = [node.change_ext(obj_ext)]
9605+ self.compiled_tasks.append(task)
9606+
9607+ if self.generate_headers:
9608+ header_node = node.change_ext(self.env['DHEADER_ext'])
9609+ task.outputs += [header_node]
9610+
9611+d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
9612+d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
9613+${D_HDR_F}${TGT[1].bldpath(env)} \
9614+${D_SRC_F}${SRC} \
9615+${D_TGT_F}${TGT[0].bldpath(env)}'
9616+link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
9617+
9618+def override_exec(cls):
9619+ """stupid dmd wants -of stuck to the file name"""
9620+ old_exec = cls.exec_command
9621+ def exec_command(self, *k, **kw):
9622+ if isinstance(k[0], list):
9623+ lst = k[0]
9624+ for i in xrange(len(lst)):
9625+ if lst[i] == '-of':
9626+ del lst[i]
9627+ lst[i] = '-of' + lst[i]
9628+ break
9629+ return old_exec(self, *k, **kw)
9630+ cls.exec_command = exec_command
9631+
9632+cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False)
9633+cls.scan = scan
9634+override_exec(cls)
9635+
9636+cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False)
9637+override_exec(cls)
9638+
9639+cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False)
9640+override_exec(cls)
9641+
9642+# for feature request #104
9643+@taskgen
9644+def generate_header(self, filename, install_path):
9645+ if not hasattr(self, 'header_lst'): self.header_lst = []
9646+ self.meths.append('process_header')
9647+ self.header_lst.append([filename, install_path])
9648+
9649+@before('apply_core')
9650+def process_header(self):
9651+ env = self.env
9652+ for i in getattr(self, 'header_lst', []):
9653+ node = self.path.find_resource(i[0])
9654+
9655+ if not node:
9656+ raise Utils.WafError('file not found on d obj '+i[0])
9657+
9658+ task = self.create_task('d_header')
9659+ task.set_inputs(node)
9660+ task.set_outputs(node.change_ext('.di'))
9661+
9662+d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}'
9663+Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False)
9664+
9665+@conftest
9666+def d_platform_flags(conf):
9667+ v = conf.env
9668+ binfmt = v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(
9669+ v.DEST_OS or Utils.unversioned_sys_platform())
9670+ if binfmt == 'pe':
9671+ v['D_program_PATTERN'] = '%s.exe'
9672+ v['D_shlib_PATTERN'] = 'lib%s.dll'
9673+ v['D_staticlib_PATTERN'] = 'lib%s.a'
9674+ else:
9675+ v['D_program_PATTERN'] = '%s'
9676+ v['D_shlib_PATTERN'] = 'lib%s.so'
9677+ v['D_staticlib_PATTERN'] = 'lib%s.a'
9678+
9679+@conftest
9680+def check_dlibrary(conf):
9681+ ret = conf.check_cc(features='d dprogram', fragment=DLIB, mandatory=True, compile_filename='test.d', execute=True)
9682+ conf.env.DLIBRARY = ret.strip()
9683+
9684+# quick test #
9685+if __name__ == "__main__":
9686+ #Logs.verbose = 2
9687+
9688+ try: arg = sys.argv[1]
9689+ except IndexError: arg = "file.d"
9690+
9691+ print("".join(filter_comments(arg)))
9692+ # TODO
9693+ paths = ['.']
9694+
9695+ #gruik = filter()
9696+ #gruik.start(arg)
9697+
9698+ #code = "".join(gruik.buf)
9699+
9700+ #print "we have found the following code"
9701+ #print code
9702+
9703+ #print "now parsing"
9704+ #print "-------------------------------------------"
9705+ """
9706+ parser_ = d_parser()
9707+ parser_.start(arg)
9708+
9709+ print "module: %s" % parser_.module
9710+ print "imports: ",
9711+ for imp in parser_.imports:
9712+ print imp + " ",
9713+ print
9714+"""
9715+
9716diff --git a/buildtools/wafadmin/Tools/dbus.py b/buildtools/wafadmin/Tools/dbus.py
9717new file mode 100644
9718index 0000000..3179999
9719--- /dev/null
9720+++ b/buildtools/wafadmin/Tools/dbus.py
9721@@ -0,0 +1,34 @@
9722+#!/usr/bin/env python
9723+# encoding: utf-8
9724+# Ali Sabil, 2007
9725+
9726+import Task, Utils
9727+from TaskGen import taskgen, before, after, feature
9728+
9729+@taskgen
9730+def add_dbus_file(self, filename, prefix, mode):
9731+ if not hasattr(self, 'dbus_lst'):
9732+ self.dbus_lst = []
9733+ self.meths.append('process_dbus')
9734+ self.dbus_lst.append([filename, prefix, mode])
9735+
9736+@before('apply_core')
9737+def process_dbus(self):
9738+ for filename, prefix, mode in getattr(self, 'dbus_lst', []):
9739+ node = self.path.find_resource(filename)
9740+
9741+ if not node:
9742+ raise Utils.WafError('file not found ' + filename)
9743+
9744+ tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
9745+
9746+ tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
9747+ tsk.env.DBUS_BINDING_TOOL_MODE = mode
9748+
9749+Task.simple_task_type('dbus_binding_tool',
9750+ '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',
9751+ color='BLUE', before='cc')
9752+
9753+def detect(conf):
9754+ dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
9755+
9756diff --git a/buildtools/wafadmin/Tools/dmd.py b/buildtools/wafadmin/Tools/dmd.py
9757new file mode 100644
9758index 0000000..9c74908
9759--- /dev/null
9760+++ b/buildtools/wafadmin/Tools/dmd.py
9761@@ -0,0 +1,64 @@
9762+#!/usr/bin/env python
9763+# encoding: utf-8
9764+# Carlos Rafael Giani, 2007 (dv)
9765+# Thomas Nagy, 2008 (ita)
9766+
9767+import sys
9768+import Utils, ar
9769+from Configure import conftest
9770+
9771+@conftest
9772+def find_dmd(conf):
9773+ conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True)
9774+
9775+@conftest
9776+def common_flags_ldc(conf):
9777+ v = conf.env
9778+ v['DFLAGS'] = ['-d-version=Posix']
9779+ v['DLINKFLAGS'] = []
9780+ v['D_shlib_DFLAGS'] = ['-relocation-model=pic']
9781+
9782+@conftest
9783+def common_flags_dmd(conf):
9784+ v = conf.env
9785+
9786+ # _DFLAGS _DIMPORTFLAGS
9787+
9788+ # Compiler is dmd so 'gdc' part will be ignored, just
9789+ # ensure key is there, so wscript can append flags to it
9790+ v['DFLAGS'] = ['-version=Posix']
9791+
9792+ v['D_SRC_F'] = ''
9793+ v['D_TGT_F'] = ['-c', '-of']
9794+ v['DPATH_ST'] = '-I%s' # template for adding import paths
9795+
9796+ # linker
9797+ v['D_LINKER'] = v['D_COMPILER']
9798+ v['DLNK_SRC_F'] = ''
9799+ v['DLNK_TGT_F'] = '-of'
9800+
9801+ v['DLIB_ST'] = '-L-l%s' # template for adding libs
9802+ v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths
9803+
9804+ # linker debug levels
9805+ v['DFLAGS_OPTIMIZED'] = ['-O']
9806+ v['DFLAGS_DEBUG'] = ['-g', '-debug']
9807+ v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug']
9808+ v['DLINKFLAGS'] = ['-quiet']
9809+
9810+ v['D_shlib_DFLAGS'] = ['-fPIC']
9811+ v['D_shlib_LINKFLAGS'] = ['-L-shared']
9812+
9813+ v['DHEADER_ext'] = '.di'
9814+ v['D_HDR_F'] = ['-H', '-Hf']
9815+
9816+def detect(conf):
9817+ conf.find_dmd()
9818+ conf.check_tool('ar')
9819+ conf.check_tool('d')
9820+ conf.common_flags_dmd()
9821+ conf.d_platform_flags()
9822+
9823+ if conf.env.D_COMPILER.find('ldc') > -1:
9824+ conf.common_flags_ldc()
9825+
9826diff --git a/buildtools/wafadmin/Tools/flex.py b/buildtools/wafadmin/Tools/flex.py
9827new file mode 100644
9828index 0000000..5ce9f22
9829--- /dev/null
9830+++ b/buildtools/wafadmin/Tools/flex.py
9831@@ -0,0 +1,25 @@
9832+#!/usr/bin/env python
9833+# encoding: utf-8
9834+# John O'Meara, 2006
9835+# Thomas Nagy, 2006-2008
9836+
9837+"Flex processing"
9838+
9839+import TaskGen
9840+
9841+def decide_ext(self, node):
9842+ if 'cxx' in self.features: return '.lex.cc'
9843+ else: return '.lex.c'
9844+
9845+TaskGen.declare_chain(
9846+ name = 'flex',
9847+ rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
9848+ ext_in = '.l',
9849+ ext_out = '.c .cxx',
9850+ decider = decide_ext
9851+)
9852+
9853+def detect(conf):
9854+ conf.find_program('flex', var='FLEX', mandatory=True)
9855+ conf.env['FLEXFLAGS'] = ''
9856+
9857diff --git a/buildtools/wafadmin/Tools/gas.py b/buildtools/wafadmin/Tools/gas.py
9858new file mode 100644
9859index 0000000..c983b0a
9860--- /dev/null
9861+++ b/buildtools/wafadmin/Tools/gas.py
9862@@ -0,0 +1,38 @@
9863+#!/usr/bin/env python
9864+# encoding: utf-8
9865+# Thomas Nagy, 2008 (ita)
9866+
9867+"as and gas"
9868+
9869+import os, sys
9870+import Task
9871+from TaskGen import extension, taskgen, after, before
9872+
9873+EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
9874+
9875+as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
9876+Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False)
9877+
9878+@extension(EXT_ASM)
9879+def asm_hook(self, node):
9880+ # create the compilation task: cpp or cc
9881+ try: obj_ext = self.obj_ext
9882+ except AttributeError: obj_ext = '_%d.o' % self.idx
9883+
9884+ task = self.create_task('asm', node, node.change_ext(obj_ext))
9885+ self.compiled_tasks.append(task)
9886+ self.meths.append('asm_incflags')
9887+
9888+@after('apply_obj_vars_cc')
9889+@after('apply_obj_vars_cxx')
9890+@before('apply_link')
9891+def asm_incflags(self):
9892+ self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
9893+ var = ('cxx' in self.features) and 'CXX' or 'CC'
9894+ self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
9895+
9896+def detect(conf):
9897+ conf.find_program(['gas', 'as'], var='AS')
9898+ if not conf.env.AS: conf.env.AS = conf.env.CC
9899+ #conf.env.ASFLAGS = ['-c'] <- may be necesary for .S files
9900+
9901diff --git a/buildtools/wafadmin/Tools/gcc.py b/buildtools/wafadmin/Tools/gcc.py
9902new file mode 100644
9903index 0000000..420b44f
9904--- /dev/null
9905+++ b/buildtools/wafadmin/Tools/gcc.py
9906@@ -0,0 +1,135 @@
9907+#!/usr/bin/env python
9908+# encoding: utf-8
9909+# Thomas Nagy, 2006-2008 (ita)
9910+# Ralf Habacker, 2006 (rh)
9911+# Yinon Ehrlich, 2009
9912+
9913+import os, sys
9914+import Configure, Options, Utils
9915+import ccroot, ar
9916+from Configure import conftest
9917+
9918+@conftest
9919+def find_gcc(conf):
9920+ cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True)
9921+ cc = conf.cmd_to_list(cc)
9922+ ccroot.get_cc_version(conf, cc, gcc=True)
9923+ conf.env.CC_NAME = 'gcc'
9924+ conf.env.CC = cc
9925+
9926+@conftest
9927+def gcc_common_flags(conf):
9928+ v = conf.env
9929+
9930+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
9931+
9932+ v['CCFLAGS_DEBUG'] = ['-g']
9933+
9934+ v['CCFLAGS_RELEASE'] = ['-O2']
9935+
9936+ v['CC_SRC_F'] = ''
9937+ v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
9938+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
9939+
9940+ # linker
9941+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
9942+ v['CCLNK_SRC_F'] = ''
9943+ v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
9944+
9945+ v['LIB_ST'] = '-l%s' # template for adding libs
9946+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
9947+ v['STATICLIB_ST'] = '-l%s'
9948+ v['STATICLIBPATH_ST'] = '-L%s'
9949+ v['RPATH_ST'] = '-Wl,-rpath,%s'
9950+ v['CCDEFINES_ST'] = '-D%s'
9951+
9952+ v['SONAME_ST'] = '-Wl,-h,%s'
9953+ v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
9954+ v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
9955+ v['FULLSTATIC_MARKER'] = '-static'
9956+
9957+ # program
9958+ v['program_PATTERN'] = '%s'
9959+
9960+ # shared library
9961+ v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
9962+ v['shlib_LINKFLAGS'] = ['-shared']
9963+ v['shlib_PATTERN'] = 'lib%s.so'
9964+
9965+ # static lib
9966+ v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
9967+ v['staticlib_PATTERN'] = 'lib%s.a'
9968+
9969+ # osx stuff
9970+ v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
9971+ v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
9972+ v['macbundle_PATTERN'] = '%s.bundle'
9973+
9974+@conftest
9975+def gcc_modifier_win32(conf):
9976+ v = conf.env
9977+ v['program_PATTERN'] = '%s.exe'
9978+
9979+ v['shlib_PATTERN'] = '%s.dll'
9980+ v['implib_PATTERN'] = 'lib%s.dll.a'
9981+ v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
9982+
9983+ dest_arch = v['DEST_CPU']
9984+ v['shlib_CCFLAGS'] = ['-DPIC']
9985+
9986+ v.append_value('shlib_CCFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
9987+
9988+ # Auto-import is enabled by default even without this option,
9989+ # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
9990+ # that the linker emits otherwise.
9991+ v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
9992+
9993+@conftest
9994+def gcc_modifier_cygwin(conf):
9995+ gcc_modifier_win32(conf)
9996+ v = conf.env
9997+ v['shlib_PATTERN'] = 'cyg%s.dll'
9998+ v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
9999+
10000+@conftest
10001+def gcc_modifier_darwin(conf):
10002+ v = conf.env
10003+ v['shlib_CCFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
10004+ v['shlib_LINKFLAGS'] = ['-dynamiclib']
10005+ v['shlib_PATTERN'] = 'lib%s.dylib'
10006+
10007+ v['staticlib_LINKFLAGS'] = []
10008+
10009+ v['SHLIB_MARKER'] = ''
10010+ v['STATICLIB_MARKER'] = ''
10011+ v['SONAME_ST'] = ''
10012+
10013+@conftest
10014+def gcc_modifier_aix(conf):
10015+ v = conf.env
10016+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
10017+
10018+ v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull']
10019+
10020+ v['SHLIB_MARKER'] = ''
10021+
10022+@conftest
10023+def gcc_modifier_platform(conf):
10024+ # * set configurations specific for a platform.
10025+ # * the destination platform is detected automatically by looking at the macros the compiler predefines,
10026+ # and if it's not recognised, it fallbacks to sys.platform.
10027+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
10028+ gcc_modifier_func = globals().get('gcc_modifier_' + dest_os)
10029+ if gcc_modifier_func:
10030+ gcc_modifier_func(conf)
10031+
10032+def detect(conf):
10033+ conf.find_gcc()
10034+ conf.find_cpp()
10035+ conf.find_ar()
10036+ conf.gcc_common_flags()
10037+ conf.gcc_modifier_platform()
10038+ conf.cc_load_tools()
10039+ conf.cc_add_flags()
10040+ conf.link_add_flags()
10041+
10042diff --git a/buildtools/wafadmin/Tools/gdc.py b/buildtools/wafadmin/Tools/gdc.py
10043new file mode 100644
10044index 0000000..4d2a321
10045--- /dev/null
10046+++ b/buildtools/wafadmin/Tools/gdc.py
10047@@ -0,0 +1,52 @@
10048+#!/usr/bin/env python
10049+# encoding: utf-8
10050+# Carlos Rafael Giani, 2007 (dv)
10051+
10052+import sys
10053+import Utils, ar
10054+from Configure import conftest
10055+
10056+@conftest
10057+def find_gdc(conf):
10058+ conf.find_program('gdc', var='D_COMPILER', mandatory=True)
10059+
10060+@conftest
10061+def common_flags_gdc(conf):
10062+ v = conf.env
10063+
10064+ # _DFLAGS _DIMPORTFLAGS
10065+
10066+ # for mory info about the meaning of this dict see dmd.py
10067+ v['DFLAGS'] = []
10068+
10069+ v['D_SRC_F'] = ''
10070+ v['D_TGT_F'] = ['-c', '-o', '']
10071+ v['DPATH_ST'] = '-I%s' # template for adding import paths
10072+
10073+ # linker
10074+ v['D_LINKER'] = v['D_COMPILER']
10075+ v['DLNK_SRC_F'] = ''
10076+ v['DLNK_TGT_F'] = ['-o', '']
10077+
10078+ v['DLIB_ST'] = '-l%s' # template for adding libs
10079+ v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths
10080+
10081+ # debug levels
10082+ v['DLINKFLAGS'] = []
10083+ v['DFLAGS_OPTIMIZED'] = ['-O3']
10084+ v['DFLAGS_DEBUG'] = ['-O0']
10085+ v['DFLAGS_ULTRADEBUG'] = ['-O0']
10086+
10087+ v['D_shlib_DFLAGS'] = []
10088+ v['D_shlib_LINKFLAGS'] = ['-shared']
10089+
10090+ v['DHEADER_ext'] = '.di'
10091+ v['D_HDR_F'] = '-fintfc -fintfc-file='
10092+
10093+def detect(conf):
10094+ conf.find_gdc()
10095+ conf.check_tool('ar')
10096+ conf.check_tool('d')
10097+ conf.common_flags_gdc()
10098+ conf.d_platform_flags()
10099+
10100diff --git a/buildtools/wafadmin/Tools/glib2.py b/buildtools/wafadmin/Tools/glib2.py
10101new file mode 100644
10102index 0000000..042d612
10103--- /dev/null
10104+++ b/buildtools/wafadmin/Tools/glib2.py
10105@@ -0,0 +1,164 @@
10106+#! /usr/bin/env python
10107+# encoding: utf-8
10108+# Thomas Nagy, 2006-2008 (ita)
10109+
10110+"GLib2 support"
10111+
10112+import Task, Utils
10113+from TaskGen import taskgen, before, after, feature
10114+
10115+#
10116+# glib-genmarshal
10117+#
10118+
10119+@taskgen
10120+def add_marshal_file(self, filename, prefix):
10121+ if not hasattr(self, 'marshal_list'):
10122+ self.marshal_list = []
10123+ self.meths.append('process_marshal')
10124+ self.marshal_list.append((filename, prefix))
10125+
10126+@before('apply_core')
10127+def process_marshal(self):
10128+ for f, prefix in getattr(self, 'marshal_list', []):
10129+ node = self.path.find_resource(f)
10130+
10131+ if not node:
10132+ raise Utils.WafError('file not found %r' % f)
10133+
10134+ h_node = node.change_ext('.h')
10135+ c_node = node.change_ext('.c')
10136+
10137+ task = self.create_task('glib_genmarshal', node, [h_node, c_node])
10138+ task.env.GLIB_GENMARSHAL_PREFIX = prefix
10139+ self.allnodes.append(c_node)
10140+
10141+def genmarshal_func(self):
10142+
10143+ bld = self.inputs[0].__class__.bld
10144+
10145+ get = self.env.get_flat
10146+ cmd1 = "%s %s --prefix=%s --header > %s" % (
10147+ get('GLIB_GENMARSHAL'),
10148+ self.inputs[0].srcpath(self.env),
10149+ get('GLIB_GENMARSHAL_PREFIX'),
10150+ self.outputs[0].abspath(self.env)
10151+ )
10152+
10153+ ret = bld.exec_command(cmd1)
10154+ if ret: return ret
10155+
10156+ #print self.outputs[1].abspath(self.env)
10157+ f = open(self.outputs[1].abspath(self.env), 'wb')
10158+ c = '''#include "%s"\n''' % self.outputs[0].name
10159+ f.write(c)
10160+ f.close()
10161+
10162+ cmd2 = "%s %s --prefix=%s --body >> %s" % (
10163+ get('GLIB_GENMARSHAL'),
10164+ self.inputs[0].srcpath(self.env),
10165+ get('GLIB_GENMARSHAL_PREFIX'),
10166+ self.outputs[1].abspath(self.env)
10167+ )
10168+ ret = Utils.exec_command(cmd2)
10169+ if ret: return ret
10170+
10171+#
10172+# glib-mkenums
10173+#
10174+
10175+@taskgen
10176+def add_enums_from_template(self, source='', target='', template='', comments=''):
10177+ if not hasattr(self, 'enums_list'):
10178+ self.enums_list = []
10179+ self.meths.append('process_enums')
10180+ self.enums_list.append({'source': source,
10181+ 'target': target,
10182+ 'template': template,
10183+ 'file-head': '',
10184+ 'file-prod': '',
10185+ 'file-tail': '',
10186+ 'enum-prod': '',
10187+ 'value-head': '',
10188+ 'value-prod': '',
10189+ 'value-tail': '',
10190+ 'comments': comments})
10191+
10192+@taskgen
10193+def add_enums(self, source='', target='',
10194+ file_head='', file_prod='', file_tail='', enum_prod='',
10195+ value_head='', value_prod='', value_tail='', comments=''):
10196+ if not hasattr(self, 'enums_list'):
10197+ self.enums_list = []
10198+ self.meths.append('process_enums')
10199+ self.enums_list.append({'source': source,
10200+ 'template': '',
10201+ 'target': target,
10202+ 'file-head': file_head,
10203+ 'file-prod': file_prod,
10204+ 'file-tail': file_tail,
10205+ 'enum-prod': enum_prod,
10206+ 'value-head': value_head,
10207+ 'value-prod': value_prod,
10208+ 'value-tail': value_tail,
10209+ 'comments': comments})
10210+
10211+@before('apply_core')
10212+def process_enums(self):
10213+ for enum in getattr(self, 'enums_list', []):
10214+ task = self.create_task('glib_mkenums')
10215+ env = task.env
10216+
10217+ inputs = []
10218+
10219+ # process the source
10220+ source_list = self.to_list(enum['source'])
10221+ if not source_list:
10222+ raise Utils.WafError('missing source ' + str(enum))
10223+ source_list = [self.path.find_resource(k) for k in source_list]
10224+ inputs += source_list
10225+ env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list]
10226+
10227+ # find the target
10228+ if not enum['target']:
10229+ raise Utils.WafError('missing target ' + str(enum))
10230+ tgt_node = self.path.find_or_declare(enum['target'])
10231+ if tgt_node.name.endswith('.c'):
10232+ self.allnodes.append(tgt_node)
10233+ env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env)
10234+
10235+
10236+ options = []
10237+
10238+ if enum['template']: # template, if provided
10239+ template_node = self.path.find_resource(enum['template'])
10240+ options.append('--template %s' % (template_node.abspath(env)))
10241+ inputs.append(template_node)
10242+ params = {'file-head' : '--fhead',
10243+ 'file-prod' : '--fprod',
10244+ 'file-tail' : '--ftail',
10245+ 'enum-prod' : '--eprod',
10246+ 'value-head' : '--vhead',
10247+ 'value-prod' : '--vprod',
10248+ 'value-tail' : '--vtail',
10249+ 'comments': '--comments'}
10250+ for param, option in params.iteritems():
10251+ if enum[param]:
10252+ options.append('%s %r' % (option, enum[param]))
10253+
10254+ env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
10255+
10256+ # update the task instance
10257+ task.set_inputs(inputs)
10258+ task.set_outputs(tgt_node)
10259+
10260+Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'],
10261+ color='BLUE', before='cc cxx')
10262+Task.simple_task_type('glib_mkenums',
10263+ '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',
10264+ color='PINK', before='cc cxx')
10265+
10266+def detect(conf):
10267+ glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
10268+ mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS')
10269+
10270diff --git a/buildtools/wafadmin/Tools/gnome.py b/buildtools/wafadmin/Tools/gnome.py
10271new file mode 100644
10272index 0000000..c098a41
10273--- /dev/null
10274+++ b/buildtools/wafadmin/Tools/gnome.py
10275@@ -0,0 +1,223 @@
10276+#!/usr/bin/env python
10277+# encoding: utf-8
10278+# Thomas Nagy, 2006-2008 (ita)
10279+
10280+"Gnome support"
10281+
10282+import os, re
10283+import TaskGen, Utils, Runner, Task, Build, Options, Logs
10284+import cc
10285+from Logs import error
10286+from TaskGen import taskgen, before, after, feature
10287+
10288+n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M)
10289+n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M)
10290+
10291+def postinstall_schemas(prog_name):
10292+ if Build.bld.is_install:
10293+ dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name)
10294+ if not Options.options.destdir:
10295+ # add the gconf schema
10296+ Utils.pprint('YELLOW', 'Installing GConf schema')
10297+ command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir
10298+ ret = Utils.exec_command(command)
10299+ else:
10300+ Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:')
10301+ Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir)
10302+
10303+def postinstall_icons():
10304+ dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor')
10305+ if Build.bld.is_install:
10306+ if not Options.options.destdir:
10307+ # update the pixmap cache directory
10308+ Utils.pprint('YELLOW', "Updating Gtk icon cache.")
10309+ command = 'gtk-update-icon-cache -q -f -t %s' % dir
10310+ ret = Utils.exec_command(command)
10311+ else:
10312+ Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:')
10313+ Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir)
10314+
10315+def postinstall_scrollkeeper(prog_name):
10316+ if Build.bld.is_install:
10317+ # now the scrollkeeper update if we can write to the log file
10318+ if os.access('/var/log/scrollkeeper.log', os.W_OK):
10319+ dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
10320+ dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name)
10321+ command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2)
10322+ ret = Utils.exec_command(command)
10323+
10324+def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1):
10325+ if schemas: postinstall_schemas(prog_name)
10326+ if icons: postinstall_icons()
10327+ if scrollkeeper: postinstall_scrollkeeper(prog_name)
10328+
10329+# OBSOLETE
10330+class gnome_doc_taskgen(TaskGen.task_gen):
10331+ def __init__(self, *k, **kw):
10332+ TaskGen.task_gen.__init__(self, *k, **kw)
10333+
10334+@feature('gnome_doc')
10335+def init_gnome_doc(self):
10336+ self.default_install_path = '${PREFIX}/share'
10337+
10338+@feature('gnome_doc')
10339+@after('init_gnome_doc')
10340+def apply_gnome_doc(self):
10341+ self.env['APPNAME'] = self.doc_module
10342+ lst = self.to_list(self.doc_linguas)
10343+ bld = self.bld
10344+ lst.append('C')
10345+
10346+ for x in lst:
10347+ if not x == 'C':
10348+ tsk = self.create_task('xml2po')
10349+ node = self.path.find_resource(x+'/'+x+'.po')
10350+ src = self.path.find_resource('C/%s.xml' % self.doc_module)
10351+ out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
10352+ tsk.set_inputs([node, src])
10353+ tsk.set_outputs(out)
10354+ else:
10355+ out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
10356+
10357+ tsk2 = self.create_task('xsltproc2po')
10358+ out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
10359+ tsk2.set_outputs(out2)
10360+ node = self.path.find_resource(self.doc_module+".omf.in")
10361+ tsk2.inputs = [node, out]
10362+
10363+ tsk2.run_after.append(tsk)
10364+
10365+ if bld.is_install:
10366+ path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
10367+ bld.install_files(self.install_path + '/omf', out2, env=self.env)
10368+ for y in self.to_list(self.doc_figures):
10369+ try:
10370+ os.stat(self.path.abspath() + '/' + x + '/' + y)
10371+ bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y)
10372+ except:
10373+ bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
10374+ bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
10375+ if x == 'C':
10376+ xmls = self.to_list(self.doc_includes)
10377+ xmls.append(self.doc_entities)
10378+ for z in xmls:
10379+ out = self.path.find_resource('%s/%s' % (x, z))
10380+ bld.install_as(path + '/%s' % z, out.abspath(self.env))
10381+
10382+# OBSOLETE
10383+class xml_to_taskgen(TaskGen.task_gen):
10384+ def __init__(self, *k, **kw):
10385+ TaskGen.task_gen.__init__(self, *k, **kw)
10386+
10387+@feature('xml_to')
10388+def init_xml_to(self):
10389+ Utils.def_attrs(self,
10390+ source = 'xmlfile',
10391+ xslt = 'xlsltfile',
10392+ target = 'hey',
10393+ default_install_path = '${PREFIX}',
10394+ task_created = None)
10395+
10396+@feature('xml_to')
10397+@after('init_xml_to')
10398+def apply_xml_to(self):
10399+ xmlfile = self.path.find_resource(self.source)
10400+ xsltfile = self.path.find_resource(self.xslt)
10401+ tsk = self.create_task('xmlto', [xmlfile, xsltfile], xmlfile.change_ext('html'))
10402+ tsk.install_path = self.install_path
10403+
10404+def sgml_scan(self):
10405+ node = self.inputs[0]
10406+
10407+ env = self.env
10408+ variant = node.variant(env)
10409+
10410+ fi = open(node.abspath(env), 'r')
10411+ content = fi.read()
10412+ fi.close()
10413+
10414+ # we should use a sgml parser :-/
10415+ name = n1_regexp.findall(content)[0]
10416+ num = n2_regexp.findall(content)[0]
10417+
10418+ doc_name = name+'.'+num
10419+
10420+ if not self.outputs:
10421+ self.outputs = [self.generator.path.find_or_declare(doc_name)]
10422+
10423+ return ([], [doc_name])
10424+
10425+class gnome_sgml2man_taskgen(TaskGen.task_gen):
10426+ def __init__(self, *k, **kw):
10427+ TaskGen.task_gen.__init__(self, *k, **kw)
10428+
10429+@feature('gnome_sgml2man')
10430+def apply_gnome_sgml2man(self):
10431+ """
10432+ we could make it more complicated, but for now we just scan the document each time
10433+ """
10434+ assert(getattr(self, 'appname', None))
10435+
10436+ def install_result(task):
10437+ out = task.outputs[0]
10438+ name = out.name
10439+ ext = name[-1]
10440+ env = task.env
10441+ self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env)
10442+
10443+ self.bld.rescan(self.path)
10444+ for name in self.bld.cache_dir_contents[self.path.id]:
10445+ base, ext = os.path.splitext(name)
10446+ if ext != '.sgml': continue
10447+
10448+ task = self.create_task('sgml2man')
10449+ task.set_inputs(self.path.find_resource(name))
10450+ task.task_generator = self
10451+ if self.bld.is_install: task.install = install_result
10452+ # no outputs, the scanner does it
10453+ # no caching for now, this is not a time-critical feature
10454+ # in the future the scanner can be used to do more things (find dependencies, etc)
10455+ task.scan()
10456+
10457+cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE')
10458+cls.scan = sgml_scan
10459+cls.quiet = 1
10460+
10461+Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
10462+
10463+Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE')
10464+
10465+# how do you expect someone to understand this?!
10466+xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
10467+--stringparam db2omf.basename ${APPNAME} \
10468+--stringparam db2omf.format docbook \
10469+--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
10470+--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
10471+--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
10472+--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
10473+--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
10474+--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
10475+${DB2OMF} ${SRC[1].abspath(env)}"""
10476+
10477+#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
10478+Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE')
10479+
10480+def detect(conf):
10481+ conf.check_tool('gnu_dirs glib2 dbus')
10482+ sgml2man = conf.find_program('docbook2man', var='SGML2MAN')
10483+
10484+ def getstr(varname):
10485+ return getattr(Options.options, varname, '')
10486+
10487+ # addefine also sets the variable to the env
10488+ conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale'))
10489+
10490+ xml2po = conf.find_program('xml2po', var='XML2PO')
10491+ xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO')
10492+ conf.env['XML2POFLAGS'] = '-e -p'
10493+ conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip()
10494+ conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip()
10495+
10496+def set_options(opt):
10497+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
10498+
10499diff --git a/buildtools/wafadmin/Tools/gnu_dirs.py b/buildtools/wafadmin/Tools/gnu_dirs.py
10500new file mode 100644
10501index 0000000..856e4a7
10502--- /dev/null
10503+++ b/buildtools/wafadmin/Tools/gnu_dirs.py
10504@@ -0,0 +1,111 @@
10505+#!/usr/bin/env python
10506+# encoding: utf-8
10507+# Ali Sabil, 2007
10508+
10509+"""
10510+To use this module do not forget to call
10511+opt.tool_options('gnu_dirs')
10512+AND
10513+conf.check_tool('gnu_dirs')
10514+
10515+Add options for the standard GNU directories, this tool will add the options
10516+found in autotools, and will update the environment with the following
10517+installation variables:
10518+
10519+ * PREFIX : architecture-independent files [/usr/local]
10520+ * EXEC_PREFIX : architecture-dependent files [PREFIX]
10521+ * BINDIR : user executables [EXEC_PREFIX/bin]
10522+ * SBINDIR : user executables [EXEC_PREFIX/sbin]
10523+ * LIBEXECDIR : program executables [EXEC_PREFIX/libexec]
10524+ * SYSCONFDIR : read-only single-machine data [PREFIX/etc]
10525+ * SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com]
10526+ * LOCALSTATEDIR : modifiable single-machine data [PREFIX/var]
10527+ * LIBDIR : object code libraries [EXEC_PREFIX/lib]
10528+ * INCLUDEDIR : C header files [PREFIX/include]
10529+ * OLDINCLUDEDIR : C header files for non-gcc [/usr/include]
10530+ * DATAROOTDIR : read-only arch.-independent data root [PREFIX/share]
10531+ * DATADIR : read-only architecture-independent data [DATAROOTDIR]
10532+ * INFODIR : info documentation [DATAROOTDIR/info]
10533+ * LOCALEDIR : locale-dependent data [DATAROOTDIR/locale]
10534+ * MANDIR : man documentation [DATAROOTDIR/man]
10535+ * DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib]
10536+ * HTMLDIR : html documentation [DOCDIR]
10537+ * DVIDIR : dvi documentation [DOCDIR]
10538+ * PDFDIR : pdf documentation [DOCDIR]
10539+ * PSDIR : ps documentation [DOCDIR]
10540+"""
10541+
10542+import Utils, Options
10543+
10544+_options = [x.split(', ') for x in '''
10545+bindir, user executables, ${EXEC_PREFIX}/bin
10546+sbindir, system admin executables, ${EXEC_PREFIX}/sbin
10547+libexecdir, program executables, ${EXEC_PREFIX}/libexec
10548+sysconfdir, read-only single-machine data, ${PREFIX}/etc
10549+sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
10550+localstatedir, modifiable single-machine data, ${PREFIX}/var
10551+libdir, object code libraries, ${EXEC_PREFIX}/lib
10552+includedir, C header files, ${PREFIX}/include
10553+oldincludedir, C header files for non-gcc, /usr/include
10554+datarootdir, read-only arch.-independent data root, ${PREFIX}/share
10555+datadir, read-only architecture-independent data, ${DATAROOTDIR}
10556+infodir, info documentation, ${DATAROOTDIR}/info
10557+localedir, locale-dependent data, ${DATAROOTDIR}/locale
10558+mandir, man documentation, ${DATAROOTDIR}/man
10559+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
10560+htmldir, html documentation, ${DOCDIR}
10561+dvidir, dvi documentation, ${DOCDIR}
10562+pdfdir, pdf documentation, ${DOCDIR}
10563+psdir, ps documentation, ${DOCDIR}
10564+'''.split('\n') if x]
10565+
10566+def detect(conf):
10567+ def get_param(varname, default):
10568+ return getattr(Options.options, varname, '') or default
10569+
10570+ env = conf.env
10571+ env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
10572+ env['PACKAGE'] = Utils.g_module.APPNAME
10573+
10574+ complete = False
10575+ iter = 0
10576+ while not complete and iter < len(_options) + 1:
10577+ iter += 1
10578+ complete = True
10579+ for name, help, default in _options:
10580+ name = name.upper()
10581+ if not env[name]:
10582+ try:
10583+ env[name] = Utils.subst_vars(get_param(name, default), env)
10584+ except TypeError:
10585+ complete = False
10586+ if not complete:
10587+ lst = [name for name, _, _ in _options if not env[name.upper()]]
10588+ raise Utils.WafError('Variable substitution failure %r' % lst)
10589+
10590+def set_options(opt):
10591+
10592+ inst_dir = opt.add_option_group('Installation directories',
10593+'By default, "waf install" will put the files in\
10594+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
10595+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
10596+
10597+ for k in ('--prefix', '--destdir'):
10598+ option = opt.parser.get_option(k)
10599+ if option:
10600+ opt.parser.remove_option(k)
10601+ inst_dir.add_option(option)
10602+
10603+ inst_dir.add_option('--exec-prefix',
10604+ help = 'installation prefix [Default: ${PREFIX}]',
10605+ default = '',
10606+ dest = 'EXEC_PREFIX')
10607+
10608+ dirs_options = opt.add_option_group('Pre-defined installation directories', '')
10609+
10610+ for name, help, default in _options:
10611+ option_name = '--' + name
10612+ str_default = default
10613+ str_help = '%s [Default: %s]' % (help, str_default)
10614+ dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
10615+
10616diff --git a/buildtools/wafadmin/Tools/gob2.py b/buildtools/wafadmin/Tools/gob2.py
10617new file mode 100644
10618index 0000000..00aaa32
10619--- /dev/null
10620+++ b/buildtools/wafadmin/Tools/gob2.py
10621@@ -0,0 +1,18 @@
10622+#!/usr/bin/env python
10623+# encoding: utf-8
10624+# Ali Sabil, 2007
10625+
10626+import TaskGen
10627+
10628+TaskGen.declare_chain(
10629+ name = 'gob2',
10630+ rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',
10631+ ext_in = '.gob',
10632+ ext_out = '.c'
10633+)
10634+
10635+def detect(conf):
10636+ gob2 = conf.find_program('gob2', var='GOB2', mandatory=True)
10637+ conf.env['GOB2'] = gob2
10638+ conf.env['GOB2FLAGS'] = ''
10639+
10640diff --git a/buildtools/wafadmin/Tools/gxx.py b/buildtools/wafadmin/Tools/gxx.py
10641new file mode 100644
10642index 0000000..8f4a0bf
10643--- /dev/null
10644+++ b/buildtools/wafadmin/Tools/gxx.py
10645@@ -0,0 +1,133 @@
10646+#!/usr/bin/env python
10647+# encoding: utf-8
10648+# Thomas Nagy, 2006 (ita)
10649+# Ralf Habacker, 2006 (rh)
10650+# Yinon Ehrlich, 2009
10651+
10652+import os, sys
10653+import Configure, Options, Utils
10654+import ccroot, ar
10655+from Configure import conftest
10656+
10657+@conftest
10658+def find_gxx(conf):
10659+ cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True)
10660+ cxx = conf.cmd_to_list(cxx)
10661+ ccroot.get_cc_version(conf, cxx, gcc=True)
10662+ conf.env.CXX_NAME = 'gcc'
10663+ conf.env.CXX = cxx
10664+
10665+@conftest
10666+def gxx_common_flags(conf):
10667+ v = conf.env
10668+
10669+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
10670+ v['CXXFLAGS_DEBUG'] = ['-g']
10671+ v['CXXFLAGS_RELEASE'] = ['-O2']
10672+
10673+ v['CXX_SRC_F'] = ''
10674+ v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
10675+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
10676+
10677+ # linker
10678+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
10679+ v['CXXLNK_SRC_F'] = ''
10680+ v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
10681+
10682+ v['LIB_ST'] = '-l%s' # template for adding libs
10683+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
10684+ v['STATICLIB_ST'] = '-l%s'
10685+ v['STATICLIBPATH_ST'] = '-L%s'
10686+ v['RPATH_ST'] = '-Wl,-rpath,%s'
10687+ v['CXXDEFINES_ST'] = '-D%s'
10688+
10689+ v['SONAME_ST'] = '-Wl,-h,%s'
10690+ v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
10691+ v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
10692+ v['FULLSTATIC_MARKER'] = '-static'
10693+
10694+ # program
10695+ v['program_PATTERN'] = '%s'
10696+
10697+ # shared library
10698+ v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
10699+ v['shlib_LINKFLAGS'] = ['-shared']
10700+ v['shlib_PATTERN'] = 'lib%s.so'
10701+
10702+ # static lib
10703+ v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
10704+ v['staticlib_PATTERN'] = 'lib%s.a'
10705+
10706+ # osx stuff
10707+ v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
10708+ v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
10709+ v['macbundle_PATTERN'] = '%s.bundle'
10710+
10711+@conftest
10712+def gxx_modifier_win32(conf):
10713+ v = conf.env
10714+ v['program_PATTERN'] = '%s.exe'
10715+
10716+ v['shlib_PATTERN'] = '%s.dll'
10717+ v['implib_PATTERN'] = 'lib%s.dll.a'
10718+ v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
10719+
10720+ dest_arch = v['DEST_CPU']
10721+ v['shlib_CXXFLAGS'] = []
10722+
10723+ v.append_value('shlib_CXXFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
10724+
10725+ # Auto-import is enabled by default even without this option,
10726+ # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
10727+ # that the linker emits otherwise.
10728+ v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
10729+
10730+@conftest
10731+def gxx_modifier_cygwin(conf):
10732+ gxx_modifier_win32(conf)
10733+ v = conf.env
10734+ v['shlib_PATTERN'] = 'cyg%s.dll'
10735+ v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
10736+
10737+@conftest
10738+def gxx_modifier_darwin(conf):
10739+ v = conf.env
10740+ v['shlib_CXXFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
10741+ v['shlib_LINKFLAGS'] = ['-dynamiclib']
10742+ v['shlib_PATTERN'] = 'lib%s.dylib'
10743+
10744+ v['staticlib_LINKFLAGS'] = []
10745+
10746+ v['SHLIB_MARKER'] = ''
10747+ v['STATICLIB_MARKER'] = ''
10748+ v['SONAME_ST'] = ''
10749+
10750+@conftest
10751+def gxx_modifier_aix(conf):
10752+ v = conf.env
10753+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
10754+
10755+ v['shlib_LINKFLAGS'] = ['-shared', '-Wl,-brtl,-bexpfull']
10756+
10757+ v['SHLIB_MARKER'] = ''
10758+
10759+@conftest
10760+def gxx_modifier_platform(conf):
10761+ # * set configurations specific for a platform.
10762+ # * the destination platform is detected automatically by looking at the macros the compiler predefines,
10763+ # and if it's not recognised, it fallbacks to sys.platform.
10764+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
10765+ gxx_modifier_func = globals().get('gxx_modifier_' + dest_os)
10766+ if gxx_modifier_func:
10767+ gxx_modifier_func(conf)
10768+
10769+def detect(conf):
10770+ conf.find_gxx()
10771+ conf.find_cpp()
10772+ conf.find_ar()
10773+ conf.gxx_common_flags()
10774+ conf.gxx_modifier_platform()
10775+ conf.cxx_load_tools()
10776+ conf.cxx_add_flags()
10777+ conf.link_add_flags()
10778+
10779diff --git a/buildtools/wafadmin/Tools/icc.py b/buildtools/wafadmin/Tools/icc.py
10780new file mode 100644
10781index 0000000..9c9a926
10782--- /dev/null
10783+++ b/buildtools/wafadmin/Tools/icc.py
10784@@ -0,0 +1,37 @@
10785+#!/usr/bin/env python
10786+# encoding: utf-8
10787+# Stian Selnes, 2008
10788+# Thomas Nagy 2009
10789+
10790+import os, sys
10791+import Configure, Options, Utils
10792+import ccroot, ar, gcc
10793+from Configure import conftest
10794+
10795+@conftest
10796+def find_icc(conf):
10797+ if sys.platform == 'cygwin':
10798+ conf.fatal('The Intel compiler does not work on Cygwin')
10799+
10800+ v = conf.env
10801+ cc = None
10802+ if v['CC']: cc = v['CC']
10803+ elif 'CC' in conf.environ: cc = conf.environ['CC']
10804+ if not cc: cc = conf.find_program('icc', var='CC')
10805+ if not cc: cc = conf.find_program('ICL', var='CC')
10806+ if not cc: conf.fatal('Intel C Compiler (icc) was not found')
10807+ cc = conf.cmd_to_list(cc)
10808+
10809+ ccroot.get_cc_version(conf, cc, icc=True)
10810+ v['CC'] = cc
10811+ v['CC_NAME'] = 'icc'
10812+
10813+detect = '''
10814+find_icc
10815+find_ar
10816+gcc_common_flags
10817+gcc_modifier_platform
10818+cc_load_tools
10819+cc_add_flags
10820+link_add_flags
10821+'''
10822diff --git a/buildtools/wafadmin/Tools/icpc.py b/buildtools/wafadmin/Tools/icpc.py
10823new file mode 100644
10824index 0000000..7d79c57
10825--- /dev/null
10826+++ b/buildtools/wafadmin/Tools/icpc.py
10827@@ -0,0 +1,35 @@
10828+#!/usr/bin/env python
10829+# encoding: utf-8
10830+# Thomas Nagy 2009
10831+
10832+import os, sys
10833+import Configure, Options, Utils
10834+import ccroot, ar, gxx
10835+from Configure import conftest
10836+
10837+@conftest
10838+def find_icpc(conf):
10839+ if sys.platform == 'cygwin':
10840+ conf.fatal('The Intel compiler does not work on Cygwin')
10841+
10842+ v = conf.env
10843+ cxx = None
10844+ if v['CXX']: cxx = v['CXX']
10845+ elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
10846+ if not cxx: cxx = conf.find_program('icpc', var='CXX')
10847+ if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
10848+ cxx = conf.cmd_to_list(cxx)
10849+
10850+ ccroot.get_cc_version(conf, cxx, icc=True)
10851+ v['CXX'] = cxx
10852+ v['CXX_NAME'] = 'icc'
10853+
10854+detect = '''
10855+find_icpc
10856+find_ar
10857+gxx_common_flags
10858+gxx_modifier_platform
10859+cxx_load_tools
10860+cxx_add_flags
10861+link_add_flags
10862+'''
10863diff --git a/buildtools/wafadmin/Tools/intltool.py b/buildtools/wafadmin/Tools/intltool.py
10864new file mode 100644
10865index 0000000..deb8f4a
10866--- /dev/null
10867+++ b/buildtools/wafadmin/Tools/intltool.py
10868@@ -0,0 +1,139 @@
10869+#!/usr/bin/env python
10870+# encoding: utf-8
10871+# Thomas Nagy, 2006 (ita)
10872+
10873+"intltool support"
10874+
10875+import os, re
10876+import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c
10877+from TaskGen import feature, before, taskgen
10878+from Logs import error
10879+
10880+"""
10881+Usage:
10882+
10883+bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
10884+
10885+"""
10886+
10887+class intltool_in_taskgen(TaskGen.task_gen):
10888+ """deprecated"""
10889+ def __init__(self, *k, **kw):
10890+ TaskGen.task_gen.__init__(self, *k, **kw)
10891+
10892+@before('apply_core')
10893+@feature('intltool_in')
10894+def iapply_intltool_in_f(self):
10895+ try: self.meths.remove('apply_core')
10896+ except ValueError: pass
10897+
10898+ for i in self.to_list(self.source):
10899+ node = self.path.find_resource(i)
10900+
10901+ podir = getattr(self, 'podir', 'po')
10902+ podirnode = self.path.find_dir(podir)
10903+ if not podirnode:
10904+ error("could not find the podir %r" % podir)
10905+ continue
10906+
10907+ cache = getattr(self, 'intlcache', '.intlcache')
10908+ self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache)
10909+ self.env['INTLPODIR'] = podirnode.srcpath(self.env)
10910+ self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
10911+
10912+ task = self.create_task('intltool', node, node.change_ext(''))
10913+ task.install_path = self.install_path
10914+
10915+class intltool_po_taskgen(TaskGen.task_gen):
10916+ """deprecated"""
10917+ def __init__(self, *k, **kw):
10918+ TaskGen.task_gen.__init__(self, *k, **kw)
10919+
10920+
10921+@feature('intltool_po')
10922+def apply_intltool_po(self):
10923+ try: self.meths.remove('apply_core')
10924+ except ValueError: pass
10925+
10926+ self.default_install_path = '${LOCALEDIR}'
10927+ appname = getattr(self, 'appname', 'set_your_app_name')
10928+ podir = getattr(self, 'podir', '')
10929+
10930+ def install_translation(task):
10931+ out = task.outputs[0]
10932+ filename = out.name
10933+ (langname, ext) = os.path.splitext(filename)
10934+ inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
10935+ self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod)
10936+
10937+ linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS'))
10938+ if linguas:
10939+ # scan LINGUAS file for locales to process
10940+ file = open(linguas.abspath())
10941+ langs = []
10942+ for line in file.readlines():
10943+ # ignore lines containing comments
10944+ if not line.startswith('#'):
10945+ langs += line.split()
10946+ file.close()
10947+ re_linguas = re.compile('[-a-zA-Z_@.]+')
10948+ for lang in langs:
10949+ # Make sure that we only process lines which contain locales
10950+ if re_linguas.match(lang):
10951+ node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
10952+ task = self.create_task('po')
10953+ task.set_inputs(node)
10954+ task.set_outputs(node.change_ext('.mo'))
10955+ if self.bld.is_install: task.install = install_translation
10956+ else:
10957+ Utils.pprint('RED', "Error no LINGUAS file found in po directory")
10958+
10959+Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False)
10960+Task.simple_task_type('intltool',
10961+ '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',
10962+ color='BLUE', after="cc_link cxx_link", shell=False)
10963+
10964+def detect(conf):
10965+ pocom = conf.find_program('msgfmt')
10966+ if not pocom:
10967+ # if msgfmt should not be mandatory, catch the thrown exception in your wscript
10968+ conf.fatal('The program msgfmt (gettext) is mandatory!')
10969+ conf.env['POCOM'] = pocom
10970+
10971+ # NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it
10972+
10973+ intltool = conf.find_program('intltool-merge', var='INTLTOOL')
10974+ if not intltool:
10975+ # if intltool-merge should not be mandatory, catch the thrown exception in your wscript
10976+ if Options.platform == 'win32':
10977+ perl = conf.find_program('perl', var='PERL')
10978+ if not perl:
10979+ conf.fatal('The program perl (required by intltool) could not be found')
10980+
10981+ intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep))
10982+ if not intltooldir:
10983+ conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
10984+
10985+ conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge']
10986+ conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL']))
10987+ else:
10988+ conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
10989+
10990+ def getstr(varname):
10991+ return getattr(Options.options, varname, '')
10992+
10993+ prefix = conf.env['PREFIX']
10994+ datadir = getstr('datadir')
10995+ if not datadir: datadir = os.path.join(prefix,'share')
10996+
10997+ conf.define('LOCALEDIR', os.path.join(datadir, 'locale'))
10998+ conf.define('DATADIR', datadir)
10999+
11000+ if conf.env['CC'] or conf.env['CXX']:
11001+ # Define to 1 if <locale.h> is present
11002+ conf.check(header_name='locale.h')
11003+
11004+def set_options(opt):
11005+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
11006+ opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data')
11007+
11008diff --git a/buildtools/wafadmin/Tools/javaw.py b/buildtools/wafadmin/Tools/javaw.py
11009new file mode 100644
11010index 0000000..301ebc4
11011--- /dev/null
11012+++ b/buildtools/wafadmin/Tools/javaw.py
11013@@ -0,0 +1,255 @@
11014+#!/usr/bin/env python
11015+# encoding: utf-8
11016+# Thomas Nagy, 2006-2008 (ita)
11017+
11018+"""
11019+Java support
11020+
11021+Javac is one of the few compilers that behaves very badly:
11022+* it outputs files where it wants to (-d is only for the package root)
11023+* it recompiles files silently behind your back
11024+* it outputs an undefined amount of files (inner classes)
11025+
11026+Fortunately, the convention makes it possible to use the build dir without
11027+too many problems for the moment
11028+
11029+Inner classes must be located and cleaned when a problem arise,
11030+for the moment waf does not track the production of inner classes.
11031+
11032+Adding all the files to a task and executing it if any of the input files
11033+change is only annoying for the compilation times
11034+
11035+Compilation can be run using Jython[1] rather than regular Python. Instead of
11036+running one of the following commands:
11037+ ./waf configure
11038+ python waf configure
11039+You would have to run:
11040+ java -jar /path/to/jython.jar waf configure
11041+
11042+[1] http://www.jython.org/
11043+"""
11044+
11045+import os, re
11046+from Configure import conf
11047+import TaskGen, Task, Utils, Options, Build
11048+from TaskGen import feature, before, taskgen
11049+
11050+class_check_source = '''
11051+public class Test {
11052+ public static void main(String[] argv) {
11053+ Class lib;
11054+ if (argv.length < 1) {
11055+ System.err.println("Missing argument");
11056+ System.exit(77);
11057+ }
11058+ try {
11059+ lib = Class.forName(argv[0]);
11060+ } catch (ClassNotFoundException e) {
11061+ System.err.println("ClassNotFoundException");
11062+ System.exit(1);
11063+ }
11064+ lib = null;
11065+ System.exit(0);
11066+ }
11067+}
11068+'''
11069+
11070+@feature('jar')
11071+@before('apply_core')
11072+def jar_files(self):
11073+ basedir = getattr(self, 'basedir', '.')
11074+ destfile = getattr(self, 'destfile', 'test.jar')
11075+ jaropts = getattr(self, 'jaropts', [])
11076+ jarcreate = getattr(self, 'jarcreate', 'cf')
11077+
11078+ dir = self.path.find_dir(basedir)
11079+ if not dir: raise
11080+
11081+ jaropts.append('-C')
11082+ jaropts.append(dir.abspath(self.env))
11083+ jaropts.append('.')
11084+
11085+ out = self.path.find_or_declare(destfile)
11086+
11087+ tsk = self.create_task('jar_create')
11088+ tsk.set_outputs(out)
11089+ tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id]
11090+ tsk.env['JAROPTS'] = jaropts
11091+ tsk.env['JARCREATE'] = jarcreate
11092+
11093+@feature('javac')
11094+@before('apply_core')
11095+def apply_java(self):
11096+ Utils.def_attrs(self, jarname='', jaropts='', classpath='',
11097+ sourcepath='.', srcdir='.', source_re='**/*.java',
11098+ jar_mf_attributes={}, jar_mf_classpath=[])
11099+
11100+ if getattr(self, 'source_root', None):
11101+ # old stuff
11102+ self.srcdir = self.source_root
11103+
11104+
11105+ nodes_lst = []
11106+
11107+ if not self.classpath:
11108+ if not self.env['CLASSPATH']:
11109+ self.env['CLASSPATH'] = '..' + os.pathsep + '.'
11110+ else:
11111+ self.env['CLASSPATH'] = self.classpath
11112+
11113+ srcdir_node = self.path.find_dir(self.srcdir)
11114+ if not srcdir_node:
11115+ raise Utils.WafError('could not find srcdir %r' % self.srcdir)
11116+
11117+ src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)]
11118+ bld_nodes = [x.change_ext('.class') for x in src_nodes]
11119+
11120+ self.env['OUTDIR'] = [srcdir_node.bldpath(self.env)]
11121+
11122+ tsk = self.create_task('javac')
11123+ tsk.set_inputs(src_nodes)
11124+ tsk.set_outputs(bld_nodes)
11125+
11126+ if getattr(self, 'compat', None):
11127+ tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
11128+
11129+ if hasattr(self, 'sourcepath'):
11130+ fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
11131+ names = os.pathsep.join([x.srcpath() for x in fold])
11132+ else:
11133+ names = srcdir_node.srcpath()
11134+
11135+ if names:
11136+ tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
11137+
11138+ if self.jarname:
11139+ jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
11140+ jtsk.set_run_after(tsk)
11141+
11142+ if not self.env.JAROPTS:
11143+ if self.jaropts:
11144+ self.env.JAROPTS = self.jaropts
11145+ else:
11146+ dirs = '.'
11147+ self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
11148+
11149+Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN', shell=False)
11150+cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}', shell=False)
11151+cls.color = 'BLUE'
11152+def post_run_javac(self):
11153+ """this is for cleaning the folder
11154+ javac creates single files for inner classes
11155+ but it is not possible to know which inner classes in advance"""
11156+
11157+ par = {}
11158+ for x in self.inputs:
11159+ par[x.parent.id] = x.parent
11160+
11161+ inner = {}
11162+ for k in par.values():
11163+ path = k.abspath(self.env)
11164+ lst = os.listdir(path)
11165+
11166+ for u in lst:
11167+ if u.find('$') >= 0:
11168+ inner_class_node = k.find_or_declare(u)
11169+ inner[inner_class_node.id] = inner_class_node
11170+
11171+ to_add = set(inner.keys()) - set([x.id for x in self.outputs])
11172+ for x in to_add:
11173+ self.outputs.append(inner[x])
11174+
11175+ self.cached = True # disable the cache here - inner classes are a problem
11176+ return Task.Task.post_run(self)
11177+cls.post_run = post_run_javac
11178+
11179+def detect(conf):
11180+ # If JAVA_PATH is set, we prepend it to the path list
11181+ java_path = conf.environ['PATH'].split(os.pathsep)
11182+ v = conf.env
11183+
11184+ if 'JAVA_HOME' in conf.environ:
11185+ java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path
11186+ conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']]
11187+
11188+ for x in 'javac java jar'.split():
11189+ conf.find_program(x, var=x.upper(), path_list=java_path)
11190+ conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()])
11191+ v['JAVA_EXT'] = ['.java']
11192+
11193+ if 'CLASSPATH' in conf.environ:
11194+ v['CLASSPATH'] = conf.environ['CLASSPATH']
11195+
11196+ if not v['JAR']: conf.fatal('jar is required for making java packages')
11197+ if not v['JAVAC']: conf.fatal('javac is required for compiling java classes')
11198+ v['JARCREATE'] = 'cf' # can use cvf
11199+
11200+@conf
11201+def check_java_class(self, classname, with_classpath=None):
11202+ """Check if the specified java class is installed"""
11203+
11204+ import shutil
11205+
11206+ javatestdir = '.waf-javatest'
11207+
11208+ classpath = javatestdir
11209+ if self.env['CLASSPATH']:
11210+ classpath += os.pathsep + self.env['CLASSPATH']
11211+ if isinstance(with_classpath, str):
11212+ classpath += os.pathsep + with_classpath
11213+
11214+ shutil.rmtree(javatestdir, True)
11215+ os.mkdir(javatestdir)
11216+
11217+ java_file = open(os.path.join(javatestdir, 'Test.java'), 'w')
11218+ java_file.write(class_check_source)
11219+ java_file.close()
11220+
11221+ # Compile the source
11222+ Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
11223+
11224+ # Try to run the app
11225+ cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
11226+ self.log.write("%s\n" % str(cmd))
11227+ found = Utils.exec_command(cmd, shell=False, log=self.log)
11228+
11229+ self.check_message('Java class %s' % classname, "", not found)
11230+
11231+ shutil.rmtree(javatestdir, True)
11232+
11233+ return found
11234+
11235+@conf
11236+def check_jni_headers(conf):
11237+ """
11238+ Check for jni headers and libraries
11239+
11240+ On success the environment variable xxx_JAVA is added for uselib
11241+ """
11242+
11243+ if not conf.env.CC_NAME and not conf.env.CXX_NAME:
11244+ conf.fatal('load a compiler first (gcc, g++, ..)')
11245+
11246+ if not conf.env.JAVA_HOME:
11247+ conf.fatal('set JAVA_HOME in the system environment')
11248+
11249+ # jni requires the jvm
11250+ javaHome = conf.env['JAVA_HOME'][0]
11251+
11252+ b = Build.BuildContext()
11253+ b.load_dirs(conf.srcdir, conf.blddir)
11254+ dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
11255+ f = dir.ant_glob('**/(jni|jni_md).h', flat=False)
11256+ incDirs = [x.parent.abspath() for x in f]
11257+
11258+ dir = b.root.find_dir(conf.env.JAVA_HOME[0])
11259+ f = dir.ant_glob('**/*jvm.(so|dll)', flat=False)
11260+ libDirs = [x.parent.abspath() for x in f] or [javaHome]
11261+
11262+ for i, d in enumerate(libDirs):
11263+ if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
11264+ libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'):
11265+ break
11266+ else:
11267+ conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
11268+
11269diff --git a/buildtools/wafadmin/Tools/kde4.py b/buildtools/wafadmin/Tools/kde4.py
11270new file mode 100644
11271index 0000000..f480929
11272--- /dev/null
11273+++ b/buildtools/wafadmin/Tools/kde4.py
11274@@ -0,0 +1,74 @@
11275+#!/usr/bin/env python
11276+# encoding: utf-8
11277+# Thomas Nagy, 2006 (ita)
11278+
11279+import os, sys, re
11280+import Options, TaskGen, Task, Utils
11281+from TaskGen import taskgen, feature, after
11282+
11283+class msgfmt_taskgen(TaskGen.task_gen):
11284+ def __init__(self, *k, **kw):
11285+ TaskGen.task_gen.__init__(self, *k, **kw)
11286+
11287+@feature('msgfmt')
11288+def init_msgfmt(self):
11289+ #langs = '' # for example "foo/fr foo/br"
11290+ self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}'
11291+
11292+@feature('msgfmt')
11293+@after('init_msgfmt')
11294+def apply_msgfmt(self):
11295+ for lang in self.to_list(self.langs):
11296+ node = self.path.find_resource(lang+'.po')
11297+ task = self.create_task('msgfmt', node, node.change_ext('.mo'))
11298+
11299+ if not self.bld.is_install: continue
11300+ langname = lang.split('/')
11301+ langname = langname[-1]
11302+ task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES'
11303+ task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo'
11304+ task.chmod = self.chmod
11305+
11306+def detect(conf):
11307+ kdeconfig = conf.find_program('kde4-config')
11308+ if not kdeconfig:
11309+ conf.fatal('we need kde4-config')
11310+ prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip()
11311+ file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
11312+ try: os.stat(file)
11313+ except OSError:
11314+ file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
11315+ try: os.stat(file)
11316+ except OSError: conf.fatal('could not open %s' % file)
11317+
11318+ try:
11319+ txt = Utils.readf(file)
11320+ except (OSError, IOError):
11321+ conf.fatal('could not read %s' % file)
11322+
11323+ txt = txt.replace('\\\n', '\n')
11324+ fu = re.compile('#(.*)\n')
11325+ txt = fu.sub('', txt)
11326+
11327+ setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
11328+ found = setregexp.findall(txt)
11329+
11330+ for (_, key, val) in found:
11331+ #print key, val
11332+ conf.env[key] = val
11333+
11334+ # well well, i could just write an interpreter for cmake files
11335+ conf.env['LIB_KDECORE']='kdecore'
11336+ conf.env['LIB_KDEUI'] ='kdeui'
11337+ conf.env['LIB_KIO'] ='kio'
11338+ conf.env['LIB_KHTML'] ='khtml'
11339+ conf.env['LIB_KPARTS'] ='kparts'
11340+
11341+ conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR']
11342+ conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR']
11343+ conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
11344+
11345+ conf.env['MSGFMT'] = conf.find_program('msgfmt')
11346+
11347+Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False)
11348+
11349diff --git a/buildtools/wafadmin/Tools/libtool.py b/buildtools/wafadmin/Tools/libtool.py
11350new file mode 100644
11351index 0000000..47fa906
11352--- /dev/null
11353+++ b/buildtools/wafadmin/Tools/libtool.py
11354@@ -0,0 +1,330 @@
11355+#!/usr/bin/env python
11356+# encoding: utf-8
11357+# Matthias Jahn, 2008, jahn matthias ath freenet punto de
11358+# Thomas Nagy, 2008 (ita)
11359+
11360+import sys, re, os, optparse
11361+
11362+import TaskGen, Task, Utils, preproc
11363+from Logs import error, debug, warn
11364+from TaskGen import taskgen, after, before, feature
11365+
11366+REVISION="0.1.3"
11367+
11368+"""
11369+if you want to use the code here, you must use something like this:
11370+obj = obj.create(...)
11371+obj.features.append("libtool")
11372+obj.vnum = "1.2.3" # optional, but versioned libraries are common
11373+"""
11374+
11375+# fake libtool files
11376+fakelibtool_vardeps = ['CXX', 'PREFIX']
11377+def fakelibtool_build(task):
11378+ # Writes a .la file, used by libtool
11379+ env = task.env
11380+ dest = open(task.outputs[0].abspath(env), 'w')
11381+ sname = task.inputs[0].name
11382+ fu = dest.write
11383+ fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
11384+ if env['vnum']:
11385+ nums = env['vnum'].split('.')
11386+ libname = task.inputs[0].name
11387+ name3 = libname+'.'+env['vnum']
11388+ name2 = libname+'.'+nums[0]
11389+ name1 = libname
11390+ fu("dlname='%s'\n" % name2)
11391+ strn = " ".join([name3, name2, name1])
11392+ fu("library_names='%s'\n" % (strn) )
11393+ else:
11394+ fu("dlname='%s'\n" % sname)
11395+ fu("library_names='%s %s %s'\n" % (sname, sname, sname) )
11396+ fu("old_library=''\n")
11397+ vars = ' '.join(env['libtoolvars']+env['LINKFLAGS'])
11398+ fu("dependency_libs='%s'\n" % vars)
11399+ fu("current=0\n")
11400+ fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
11401+ fu("dlopen=''\ndlpreopen=''\n")
11402+ fu("libdir='%s/lib'\n" % env['PREFIX'])
11403+ dest.close()
11404+ return 0
11405+
11406+def read_la_file(path):
11407+ sp = re.compile(r'^([^=]+)=\'(.*)\'$')
11408+ dc={}
11409+ file = open(path, "r")
11410+ for line in file.readlines():
11411+ try:
11412+ #print sp.split(line.strip())
11413+ _, left, right, _ = sp.split(line.strip())
11414+ dc[left]=right
11415+ except ValueError:
11416+ pass
11417+ file.close()
11418+ return dc
11419+
11420+@feature("libtool")
11421+@after('apply_link')
11422+def apply_link_libtool(self):
11423+ if self.type != 'program':
11424+ linktask = self.link_task
11425+ self.latask = self.create_task('fakelibtool', linktask.outputs, linktask.outputs[0].change_ext('.la'))
11426+
11427+ if self.bld.is_install:
11428+ self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env)
11429+
11430+@feature("libtool")
11431+@before('apply_core')
11432+def apply_libtool(self):
11433+ self.env['vnum']=self.vnum
11434+
11435+ paths=[]
11436+ libs=[]
11437+ libtool_files=[]
11438+ libtool_vars=[]
11439+
11440+ for l in self.env['LINKFLAGS']:
11441+ if l[:2]=='-L':
11442+ paths.append(l[2:])
11443+ elif l[:2]=='-l':
11444+ libs.append(l[2:])
11445+
11446+ for l in libs:
11447+ for p in paths:
11448+ dict = read_la_file(p+'/lib'+l+'.la')
11449+ linkflags2 = dict.get('dependency_libs', '')
11450+ for v in linkflags2.split():
11451+ if v.endswith('.la'):
11452+ libtool_files.append(v)
11453+ libtool_vars.append(v)
11454+ continue
11455+ self.env.append_unique('LINKFLAGS', v)
11456+ break
11457+
11458+ self.env['libtoolvars']=libtool_vars
11459+
11460+ while libtool_files:
11461+ file = libtool_files.pop()
11462+ dict = read_la_file(file)
11463+ for v in dict['dependency_libs'].split():
11464+ if v[-3:] == '.la':
11465+ libtool_files.append(v)
11466+ continue
11467+ self.env.append_unique('LINKFLAGS', v)
11468+
11469+Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link")
11470+
11471+class libtool_la_file:
11472+ def __init__ (self, la_filename):
11473+ self.__la_filename = la_filename
11474+ #remove path and .la suffix
11475+ self.linkname = str(os.path.split(la_filename)[-1])[:-3]
11476+ if self.linkname.startswith("lib"):
11477+ self.linkname = self.linkname[3:]
11478+ # The name that we can dlopen(3).
11479+ self.dlname = None
11480+ # Names of this library
11481+ self.library_names = None
11482+ # The name of the static archive.
11483+ self.old_library = None
11484+ # Libraries that this one depends upon.
11485+ self.dependency_libs = None
11486+ # Version information for libIlmImf.
11487+ self.current = None
11488+ self.age = None
11489+ self.revision = None
11490+ # Is this an already installed library?
11491+ self.installed = None
11492+ # Should we warn about portability when linking against -modules?
11493+ self.shouldnotlink = None
11494+ # Files to dlopen/dlpreopen
11495+ self.dlopen = None
11496+ self.dlpreopen = None
11497+ # Directory that this library needs to be installed in:
11498+ self.libdir = '/usr/lib'
11499+ if not self.__parse():
11500+ raise ValueError("file %s not found!!" %(la_filename))
11501+
11502+ def __parse(self):
11503+ "Retrieve the variables from a file"
11504+ if not os.path.isfile(self.__la_filename): return 0
11505+ la_file=open(self.__la_filename, 'r')
11506+ for line in la_file:
11507+ ln = line.strip()
11508+ if not ln: continue
11509+ if ln[0]=='#': continue
11510+ (key, value) = str(ln).split('=', 1)
11511+ key = key.strip()
11512+ value = value.strip()
11513+ if value == "no": value = False
11514+ elif value == "yes": value = True
11515+ else:
11516+ try: value = int(value)
11517+ except ValueError: value = value.strip("'")
11518+ setattr(self, key, value)
11519+ la_file.close()
11520+ return 1
11521+
11522+ def get_libs(self):
11523+ """return linkflags for this lib"""
11524+ libs = []
11525+ if self.dependency_libs:
11526+ libs = str(self.dependency_libs).strip().split()
11527+ if libs == None:
11528+ libs = []
11529+ # add la lib and libdir
11530+ libs.insert(0, "-l%s" % self.linkname.strip())
11531+ libs.insert(0, "-L%s" % self.libdir.strip())
11532+ return libs
11533+
11534+ def __str__(self):
11535+ return '''\
11536+dlname = "%(dlname)s"
11537+library_names = "%(library_names)s"
11538+old_library = "%(old_library)s"
11539+dependency_libs = "%(dependency_libs)s"
11540+version = %(current)s.%(age)s.%(revision)s
11541+installed = "%(installed)s"
11542+shouldnotlink = "%(shouldnotlink)s"
11543+dlopen = "%(dlopen)s"
11544+dlpreopen = "%(dlpreopen)s"
11545+libdir = "%(libdir)s"''' % self.__dict__
11546+
11547+class libtool_config:
11548+ def __init__ (self, la_filename):
11549+ self.__libtool_la_file = libtool_la_file(la_filename)
11550+ tmp = self.__libtool_la_file
11551+ self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)]
11552+ self.__sub_la_files = []
11553+ self.__sub_la_files.append(la_filename)
11554+ self.__libs = None
11555+
11556+ def __cmp__(self, other):
11557+ """make it compareable with X.Y.Z versions (Y and Z are optional)"""
11558+ if not other:
11559+ return 1
11560+ othervers = [int(s) for s in str(other).split(".")]
11561+ selfvers = self.__version
11562+ return cmp(selfvers, othervers)
11563+
11564+ def __str__(self):
11565+ return "\n".join([
11566+ str(self.__libtool_la_file),
11567+ ' '.join(self.__libtool_la_file.get_libs()),
11568+ '* New getlibs:',
11569+ ' '.join(self.get_libs())
11570+ ])
11571+
11572+ def __get_la_libs(self, la_filename):
11573+ return libtool_la_file(la_filename).get_libs()
11574+
11575+ def get_libs(self):
11576+ """return the complete uniqe linkflags that do not
11577+ contain .la files anymore"""
11578+ libs_list = list(self.__libtool_la_file.get_libs())
11579+ libs_map = {}
11580+ while len(libs_list) > 0:
11581+ entry = libs_list.pop(0)
11582+ if entry:
11583+ if str(entry).endswith(".la"):
11584+ ## prevents duplicate .la checks
11585+ if entry not in self.__sub_la_files:
11586+ self.__sub_la_files.append(entry)
11587+ libs_list.extend(self.__get_la_libs(entry))
11588+ else:
11589+ libs_map[entry]=1
11590+ self.__libs = libs_map.keys()
11591+ return self.__libs
11592+
11593+ def get_libs_only_L(self):
11594+ if not self.__libs: self.get_libs()
11595+ libs = self.__libs
11596+ libs = [s for s in libs if str(s).startswith('-L')]
11597+ return libs
11598+
11599+ def get_libs_only_l(self):
11600+ if not self.__libs: self.get_libs()
11601+ libs = self.__libs
11602+ libs = [s for s in libs if str(s).startswith('-l')]
11603+ return libs
11604+
11605+ def get_libs_only_other(self):
11606+ if not self.__libs: self.get_libs()
11607+ libs = self.__libs
11608+ libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
11609+ return libs
11610+
11611+def useCmdLine():
11612+ """parse cmdline args and control build"""
11613+ usage = '''Usage: %prog [options] PathToFile.la
11614+example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
11615+nor: %prog --libs /usr/lib/libamarok.la'''
11616+ parser = optparse.OptionParser(usage)
11617+ a = parser.add_option
11618+ a("--version", dest = "versionNumber",
11619+ action = "store_true", default = False,
11620+ help = "output version of libtool-config"
11621+ )
11622+ a("--debug", dest = "debug",
11623+ action = "store_true", default = False,
11624+ help = "enable debug"
11625+ )
11626+ a("--libs", dest = "libs",
11627+ action = "store_true", default = False,
11628+ help = "output all linker flags"
11629+ )
11630+ a("--libs-only-l", dest = "libs_only_l",
11631+ action = "store_true", default = False,
11632+ help = "output -l flags"
11633+ )
11634+ a("--libs-only-L", dest = "libs_only_L",
11635+ action = "store_true", default = False,
11636+ help = "output -L flags"
11637+ )
11638+ a("--libs-only-other", dest = "libs_only_other",
11639+ action = "store_true", default = False,
11640+ help = "output other libs (e.g. -pthread)"
11641+ )
11642+ a("--atleast-version", dest = "atleast_version",
11643+ default=None,
11644+ help = "return 0 if the module is at least version ATLEAST_VERSION"
11645+ )
11646+ a("--exact-version", dest = "exact_version",
11647+ default=None,
11648+ help = "return 0 if the module is exactly version EXACT_VERSION"
11649+ )
11650+ a("--max-version", dest = "max_version",
11651+ default=None,
11652+ help = "return 0 if the module is at no newer than version MAX_VERSION"
11653+ )
11654+
11655+ (options, args) = parser.parse_args()
11656+ if len(args) != 1 and not options.versionNumber:
11657+ parser.error("incorrect number of arguments")
11658+ if options.versionNumber:
11659+ print("libtool-config version %s" % REVISION)
11660+ return 0
11661+ ltf = libtool_config(args[0])
11662+ if options.debug:
11663+ print(ltf)
11664+ if options.atleast_version:
11665+ if ltf >= options.atleast_version: return 0
11666+ sys.exit(1)
11667+ if options.exact_version:
11668+ if ltf == options.exact_version: return 0
11669+ sys.exit(1)
11670+ if options.max_version:
11671+ if ltf <= options.max_version: return 0
11672+ sys.exit(1)
11673+
11674+ def p(x):
11675+ print(" ".join(x))
11676+ if options.libs: p(ltf.get_libs())
11677+ elif options.libs_only_l: p(ltf.get_libs_only_l())
11678+ elif options.libs_only_L: p(ltf.get_libs_only_L())
11679+ elif options.libs_only_other: p(ltf.get_libs_only_other())
11680+ return 0
11681+
11682+if __name__ == '__main__':
11683+ useCmdLine()
11684+
11685diff --git a/buildtools/wafadmin/Tools/lua.py b/buildtools/wafadmin/Tools/lua.py
11686new file mode 100644
11687index 0000000..5b181e1
11688--- /dev/null
11689+++ b/buildtools/wafadmin/Tools/lua.py
11690@@ -0,0 +1,25 @@
11691+#!/usr/bin/env python
11692+# encoding: utf-8
11693+# Sebastian Schlingmann, 2008
11694+# Thomas Nagy, 2008 (ita)
11695+
11696+import TaskGen
11697+from TaskGen import taskgen, feature
11698+from Constants import *
11699+
11700+TaskGen.declare_chain(
11701+ name = 'luac',
11702+ rule = '${LUAC} -s -o ${TGT} ${SRC}',
11703+ ext_in = '.lua',
11704+ ext_out = '.luac',
11705+ reentrant = False,
11706+ install = 'LUADIR', # env variable
11707+)
11708+
11709+@feature('lua')
11710+def init_lua(self):
11711+ self.default_chmod = O755
11712+
11713+def detect(conf):
11714+ conf.find_program('luac', var='LUAC', mandatory = True)
11715+
11716diff --git a/buildtools/wafadmin/Tools/misc.py b/buildtools/wafadmin/Tools/misc.py
11717new file mode 100644
11718index 0000000..9903ee4
11719--- /dev/null
11720+++ b/buildtools/wafadmin/Tools/misc.py
11721@@ -0,0 +1,430 @@
11722+#!/usr/bin/env python
11723+# encoding: utf-8
11724+# Thomas Nagy, 2006 (ita)
11725+
11726+"""
11727+Custom objects:
11728+ - execute a function everytime
11729+ - copy a file somewhere else
11730+"""
11731+
11732+import shutil, re, os
11733+import TaskGen, Node, Task, Utils, Build, Constants
11734+from TaskGen import feature, taskgen, after, before
11735+from Logs import debug
11736+
11737+def copy_func(tsk):
11738+ "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
11739+ env = tsk.env
11740+ infile = tsk.inputs[0].abspath(env)
11741+ outfile = tsk.outputs[0].abspath(env)
11742+ try:
11743+ shutil.copy2(infile, outfile)
11744+ except (OSError, IOError):
11745+ return 1
11746+ else:
11747+ if tsk.chmod: os.chmod(outfile, tsk.chmod)
11748+ return 0
11749+
11750+def action_process_file_func(tsk):
11751+ "Ask the function attached to the task to process it"
11752+ if not tsk.fun: raise Utils.WafError('task must have a function attached to it for copy_func to work!')
11753+ return tsk.fun(tsk)
11754+
11755+class cmd_taskgen(TaskGen.task_gen):
11756+ def __init__(self, *k, **kw):
11757+ TaskGen.task_gen.__init__(self, *k, **kw)
11758+
11759+@feature('cmd')
11760+def apply_cmd(self):
11761+ "call a command everytime"
11762+ if not self.fun: raise Utils.WafError('cmdobj needs a function!')
11763+ tsk = Task.TaskBase()
11764+ tsk.fun = self.fun
11765+ tsk.env = self.env
11766+ self.tasks.append(tsk)
11767+ tsk.install_path = self.install_path
11768+
11769+class copy_taskgen(TaskGen.task_gen):
11770+ "By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)"
11771+ def __init__(self, *k, **kw):
11772+ TaskGen.task_gen.__init__(self, *k, **kw)
11773+
11774+@feature('copy')
11775+@before('apply_core')
11776+def apply_copy(self):
11777+ Utils.def_attrs(self, fun=copy_func)
11778+ self.default_install_path = 0
11779+
11780+ lst = self.to_list(self.source)
11781+ self.meths.remove('apply_core')
11782+
11783+ for filename in lst:
11784+ node = self.path.find_resource(filename)
11785+ if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
11786+
11787+ target = self.target
11788+ if not target or len(lst)>1: target = node.name
11789+
11790+ # TODO the file path may be incorrect
11791+ newnode = self.path.find_or_declare(target)
11792+
11793+ tsk = self.create_task('copy', node, newnode)
11794+ tsk.fun = self.fun
11795+ tsk.chmod = self.chmod
11796+ tsk.install_path = self.install_path
11797+
11798+ if not tsk.env:
11799+ tsk.debug()
11800+ raise Utils.WafError('task without an environment')
11801+
11802+def subst_func(tsk):
11803+ "Substitutes variables in a .in file"
11804+
11805+ m4_re = re.compile('@(\w+)@', re.M)
11806+
11807+ env = tsk.env
11808+ infile = tsk.inputs[0].abspath(env)
11809+ outfile = tsk.outputs[0].abspath(env)
11810+
11811+ code = Utils.readf(infile)
11812+
11813+ # replace all % by %% to prevent errors by % signs in the input file while string formatting
11814+ code = code.replace('%', '%%')
11815+
11816+ s = m4_re.sub(r'%(\1)s', code)
11817+
11818+ di = tsk.dict or {}
11819+ if not di:
11820+ names = m4_re.findall(code)
11821+ for i in names:
11822+ di[i] = env.get_flat(i) or env.get_flat(i.upper())
11823+
11824+ file = open(outfile, 'w')
11825+ file.write(s % di)
11826+ file.close()
11827+ if tsk.chmod: os.chmod(outfile, tsk.chmod)
11828+
11829+class subst_taskgen(TaskGen.task_gen):
11830+ def __init__(self, *k, **kw):
11831+ TaskGen.task_gen.__init__(self, *k, **kw)
11832+
11833+@feature('subst')
11834+@before('apply_core')
11835+def apply_subst(self):
11836+ Utils.def_attrs(self, fun=subst_func)
11837+ self.default_install_path = 0
11838+ lst = self.to_list(self.source)
11839+ self.meths.remove('apply_core')
11840+
11841+ self.dict = getattr(self, 'dict', {})
11842+
11843+ for filename in lst:
11844+ node = self.path.find_resource(filename)
11845+ if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
11846+
11847+ if self.target:
11848+ newnode = self.path.find_or_declare(self.target)
11849+ else:
11850+ newnode = node.change_ext('')
11851+
11852+ try:
11853+ self.dict = self.dict.get_merged_dict()
11854+ except AttributeError:
11855+ pass
11856+
11857+ if self.dict and not self.env['DICT_HASH']:
11858+ self.env = self.env.copy()
11859+ keys = list(self.dict.keys())
11860+ keys.sort()
11861+ lst = [self.dict[x] for x in keys]
11862+ self.env['DICT_HASH'] = str(Utils.h_list(lst))
11863+
11864+ tsk = self.create_task('copy', node, newnode)
11865+ tsk.fun = self.fun
11866+ tsk.dict = self.dict
11867+ tsk.dep_vars = ['DICT_HASH']
11868+ tsk.install_path = self.install_path
11869+ tsk.chmod = self.chmod
11870+
11871+ if not tsk.env:
11872+ tsk.debug()
11873+ raise Utils.WafError('task without an environment')
11874+
11875+####################
11876+## command-output ####
11877+####################
11878+
11879+class cmd_arg(object):
11880+ """command-output arguments for representing files or folders"""
11881+ def __init__(self, name, template='%s'):
11882+ self.name = name
11883+ self.template = template
11884+ self.node = None
11885+
11886+class input_file(cmd_arg):
11887+ def find_node(self, base_path):
11888+ assert isinstance(base_path, Node.Node)
11889+ self.node = base_path.find_resource(self.name)
11890+ if self.node is None:
11891+ raise Utils.WafError("Input file %s not found in " % (self.name, base_path))
11892+
11893+ def get_path(self, env, absolute):
11894+ if absolute:
11895+ return self.template % self.node.abspath(env)
11896+ else:
11897+ return self.template % self.node.srcpath(env)
11898+
11899+class output_file(cmd_arg):
11900+ def find_node(self, base_path):
11901+ assert isinstance(base_path, Node.Node)
11902+ self.node = base_path.find_or_declare(self.name)
11903+ if self.node is None:
11904+ raise Utils.WafError("Output file %s not found in " % (self.name, base_path))
11905+
11906+ def get_path(self, env, absolute):
11907+ if absolute:
11908+ return self.template % self.node.abspath(env)
11909+ else:
11910+ return self.template % self.node.bldpath(env)
11911+
11912+class cmd_dir_arg(cmd_arg):
11913+ def find_node(self, base_path):
11914+ assert isinstance(base_path, Node.Node)
11915+ self.node = base_path.find_dir(self.name)
11916+ if self.node is None:
11917+ raise Utils.WafError("Directory %s not found in " % (self.name, base_path))
11918+
11919+class input_dir(cmd_dir_arg):
11920+ def get_path(self, dummy_env, dummy_absolute):
11921+ return self.template % self.node.abspath()
11922+
11923+class output_dir(cmd_dir_arg):
11924+ def get_path(self, env, dummy_absolute):
11925+ return self.template % self.node.abspath(env)
11926+
11927+
11928+class command_output(Task.Task):
11929+ color = "BLUE"
11930+ def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
11931+ Task.Task.__init__(self, env, normal=1)
11932+ assert isinstance(command, (str, Node.Node))
11933+ self.command = command
11934+ self.command_args = command_args
11935+ self.stdin = stdin
11936+ self.stdout = stdout
11937+ self.cwd = cwd
11938+ self.os_env = os_env
11939+ self.stderr = stderr
11940+
11941+ if command_node is not None: self.dep_nodes = [command_node]
11942+ self.dep_vars = [] # additional environment variables to look
11943+
11944+ def run(self):
11945+ task = self
11946+ #assert len(task.inputs) > 0
11947+
11948+ def input_path(node, template):
11949+ if task.cwd is None:
11950+ return template % node.bldpath(task.env)
11951+ else:
11952+ return template % node.abspath()
11953+ def output_path(node, template):
11954+ fun = node.abspath
11955+ if task.cwd is None: fun = node.bldpath
11956+ return template % fun(task.env)
11957+
11958+ if isinstance(task.command, Node.Node):
11959+ argv = [input_path(task.command, '%s')]
11960+ else:
11961+ argv = [task.command]
11962+
11963+ for arg in task.command_args:
11964+ if isinstance(arg, str):
11965+ argv.append(arg)
11966+ else:
11967+ assert isinstance(arg, cmd_arg)
11968+ argv.append(arg.get_path(task.env, (task.cwd is not None)))
11969+
11970+ if task.stdin:
11971+ stdin = open(input_path(task.stdin, '%s'))
11972+ else:
11973+ stdin = None
11974+
11975+ if task.stdout:
11976+ stdout = open(output_path(task.stdout, '%s'), "w")
11977+ else:
11978+ stdout = None
11979+
11980+ if task.stderr:
11981+ stderr = open(output_path(task.stderr, '%s'), "w")
11982+ else:
11983+ stderr = None
11984+
11985+ if task.cwd is None:
11986+ cwd = ('None (actually %r)' % os.getcwd())
11987+ else:
11988+ cwd = repr(task.cwd)
11989+ debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
11990+ (cwd, stdin, stdout, argv))
11991+
11992+ if task.os_env is None:
11993+ os_env = os.environ
11994+ else:
11995+ os_env = task.os_env
11996+ command = Utils.pproc.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
11997+ return command.wait()
11998+
11999+class cmd_output_taskgen(TaskGen.task_gen):
12000+ def __init__(self, *k, **kw):
12001+ TaskGen.task_gen.__init__(self, *k, **kw)
12002+
12003+@feature('command-output')
12004+def init_cmd_output(self):
12005+ Utils.def_attrs(self,
12006+ stdin = None,
12007+ stdout = None,
12008+ stderr = None,
12009+ # the command to execute
12010+ command = None,
12011+
12012+ # whether it is an external command; otherwise it is assumed
12013+ # to be an executable binary or script that lives in the
12014+ # source or build tree.
12015+ command_is_external = False,
12016+
12017+ # extra parameters (argv) to pass to the command (excluding
12018+ # the command itself)
12019+ argv = [],
12020+
12021+ # dependencies to other objects -> this is probably not what you want (ita)
12022+ # values must be 'task_gen' instances (not names!)
12023+ dependencies = [],
12024+
12025+ # dependencies on env variable contents
12026+ dep_vars = [],
12027+
12028+ # input files that are implicit, i.e. they are not
12029+ # stdin, nor are they mentioned explicitly in argv
12030+ hidden_inputs = [],
12031+
12032+ # output files that are implicit, i.e. they are not
12033+ # stdout, nor are they mentioned explicitly in argv
12034+ hidden_outputs = [],
12035+
12036+ # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
12037+ cwd = None,
12038+
12039+ # OS environment variables to pass to the subprocess
12040+ # if None, use the default environment variables unchanged
12041+ os_env = None)
12042+
12043+@feature('command-output')
12044+@after('init_cmd_output')
12045+def apply_cmd_output(self):
12046+ if self.command is None:
12047+ raise Utils.WafError("command-output missing command")
12048+ if self.command_is_external:
12049+ cmd = self.command
12050+ cmd_node = None
12051+ else:
12052+ cmd_node = self.path.find_resource(self.command)
12053+ assert cmd_node is not None, ('''Could not find command '%s' in source tree.
12054+Hint: if this is an external command,
12055+use command_is_external=True''') % (self.command,)
12056+ cmd = cmd_node
12057+
12058+ if self.cwd is None:
12059+ cwd = None
12060+ else:
12061+ assert isinstance(cwd, CmdDirArg)
12062+ self.cwd.find_node(self.path)
12063+
12064+ args = []
12065+ inputs = []
12066+ outputs = []
12067+
12068+ for arg in self.argv:
12069+ if isinstance(arg, cmd_arg):
12070+ arg.find_node(self.path)
12071+ if isinstance(arg, input_file):
12072+ inputs.append(arg.node)
12073+ if isinstance(arg, output_file):
12074+ outputs.append(arg.node)
12075+
12076+ if self.stdout is None:
12077+ stdout = None
12078+ else:
12079+ assert isinstance(self.stdout, str)
12080+ stdout = self.path.find_or_declare(self.stdout)
12081+ if stdout is None:
12082+ raise Utils.WafError("File %s not found" % (self.stdout,))
12083+ outputs.append(stdout)
12084+
12085+ if self.stderr is None:
12086+ stderr = None
12087+ else:
12088+ assert isinstance(self.stderr, str)
12089+ stderr = self.path.find_or_declare(self.stderr)
12090+ if stderr is None:
12091+ raise Utils.WafError("File %s not found" % (self.stderr,))
12092+ outputs.append(stderr)
12093+
12094+ if self.stdin is None:
12095+ stdin = None
12096+ else:
12097+ assert isinstance(self.stdin, str)
12098+ stdin = self.path.find_resource(self.stdin)
12099+ if stdin is None:
12100+ raise Utils.WafError("File %s not found" % (self.stdin,))
12101+ inputs.append(stdin)
12102+
12103+ for hidden_input in self.to_list(self.hidden_inputs):
12104+ node = self.path.find_resource(hidden_input)
12105+ if node is None:
12106+ raise Utils.WafError("File %s not found in dir %s" % (hidden_input, self.path))
12107+ inputs.append(node)
12108+
12109+ for hidden_output in self.to_list(self.hidden_outputs):
12110+ node = self.path.find_or_declare(hidden_output)
12111+ if node is None:
12112+ raise Utils.WafError("File %s not found in dir %s" % (hidden_output, self.path))
12113+ outputs.append(node)
12114+
12115+ if not (inputs or getattr(self, 'no_inputs', None)):
12116+ raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
12117+ if not (outputs or getattr(self, 'no_outputs', None)):
12118+ raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
12119+
12120+ task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
12121+ Utils.copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
12122+ self.tasks.append(task)
12123+
12124+ task.inputs = inputs
12125+ task.outputs = outputs
12126+ task.dep_vars = self.to_list(self.dep_vars)
12127+
12128+ for dep in self.dependencies:
12129+ assert dep is not self
12130+ dep.post()
12131+ for dep_task in dep.tasks:
12132+ task.set_run_after(dep_task)
12133+
12134+ if not task.inputs:
12135+ # the case for svnversion, always run, and update the output nodes
12136+ task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
12137+ task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
12138+
12139+ # TODO the case with no outputs?
12140+
12141+def post_run(self):
12142+ for x in self.outputs:
12143+ h = Utils.h_file(x.abspath(self.env))
12144+ self.generator.bld.node_sigs[self.env.variant()][x.id] = h
12145+
12146+def runnable_status(self):
12147+ return Constants.RUN_ME
12148+
12149+Task.task_type_from_func('copy', vars=[], func=action_process_file_func)
12150+TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen
12151+
12152diff --git a/buildtools/wafadmin/Tools/msvc.py b/buildtools/wafadmin/Tools/msvc.py
12153new file mode 100644
12154index 0000000..4fde8b1
12155--- /dev/null
12156+++ b/buildtools/wafadmin/Tools/msvc.py
12157@@ -0,0 +1,797 @@
12158+#!/usr/bin/env python
12159+# encoding: utf-8
12160+# Carlos Rafael Giani, 2006 (dv)
12161+# Tamas Pal, 2007 (folti)
12162+# Nicolas Mercier, 2009
12163+# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing
12164+
12165+# usage:
12166+#
12167+# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
12168+# conf.env['MSVC_TARGETS'] = ['x64']
12169+# conf.check_tool('msvc')
12170+# OR conf.check_tool('msvc', funs='no_autodetect')
12171+# conf.check_lib_msvc('gdi32')
12172+# conf.check_libs_msvc('kernel32 user32', mandatory=true)
12173+# ...
12174+# obj.uselib = 'KERNEL32 USER32 GDI32'
12175+#
12176+# platforms and targets will be tested in the order they appear;
12177+# the first good configuration will be used
12178+# supported platforms :
12179+# ia64, x64, x86, x86_amd64, x86_ia64
12180+
12181+# compilers supported :
12182+# msvc => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008)
12183+# wsdk => Windows SDK, versions 6.0, 6.1, 7.0
12184+# icl => Intel compiler, versions 9,10,11
12185+# Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
12186+# PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i)
12187+
12188+
12189+import os, sys, re, string, optparse
12190+import Utils, TaskGen, Runner, Configure, Task, Options
12191+from Logs import debug, info, warn, error
12192+from TaskGen import after, before, feature
12193+
12194+from Configure import conftest, conf
12195+import ccroot, cc, cxx, ar, winres
12196+from libtool import read_la_file
12197+
12198+try:
12199+ import _winreg
12200+except:
12201+ import winreg as _winreg
12202+
12203+pproc = Utils.pproc
12204+
12205+# importlibs provided by MSVC/Platform SDK. Do NOT search them....
12206+g_msvc_systemlibs = """
12207+aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
12208+cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
12209+credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
12210+ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
12211+faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
12212+gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
12213+kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
12214+mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
12215+msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
12216+netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
12217+odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
12218+osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
12219+ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
12220+rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
12221+shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
12222+traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
12223+version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
12224+wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
12225+""".split()
12226+
12227+
12228+all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ]
12229+all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
12230+all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
12231+
12232+def setup_msvc(conf, versions):
12233+ platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
12234+ desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
12235+ versiondict = dict(versions)
12236+
12237+ for version in desired_versions:
12238+ try:
12239+ targets = dict(versiondict [version])
12240+ for target in platforms:
12241+ try:
12242+ arch,(p1,p2,p3) = targets[target]
12243+ compiler,revision = version.split()
12244+ return compiler,revision,p1,p2,p3
12245+ except KeyError: continue
12246+ except KeyError: continue
12247+ conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
12248+
12249+@conf
12250+def get_msvc_version(conf, compiler, version, target, vcvars):
12251+ debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
12252+ batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
12253+ f = open(batfile, 'w')
12254+ f.write("""@echo off
12255+set INCLUDE=
12256+set LIB=
12257+call "%s" %s
12258+echo PATH=%%PATH%%
12259+echo INCLUDE=%%INCLUDE%%
12260+echo LIB=%%LIB%%
12261+""" % (vcvars,target))
12262+ f.close()
12263+ sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
12264+ lines = sout.splitlines()
12265+
12266+ for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'):
12267+ if lines[0].find(x) != -1:
12268+ break
12269+ else:
12270+ debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
12271+ conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
12272+
12273+ for line in lines[1:]:
12274+ if line.startswith('PATH='):
12275+ path = line[5:]
12276+ MSVC_PATH = path.split(';')
12277+ elif line.startswith('INCLUDE='):
12278+ MSVC_INCDIR = [i for i in line[8:].split(';') if i]
12279+ elif line.startswith('LIB='):
12280+ MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
12281+
12282+ # Check if the compiler is usable at all.
12283+ # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
12284+ env = {}
12285+ env.update(os.environ)
12286+ env.update(PATH = path)
12287+ compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
12288+ cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
12289+ # delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
12290+ if env.has_key('CL'):
12291+ del(env['CL'])
12292+
12293+ try:
12294+ p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE)
12295+ out, err = p.communicate()
12296+ if p.returncode != 0:
12297+ raise Exception('return code: %r: %r' % (p.returncode, err))
12298+ except Exception, e:
12299+ debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
12300+ debug(str(e))
12301+ conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
12302+ else:
12303+ debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
12304+
12305+ return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
12306+
12307+@conf
12308+def gather_wsdk_versions(conf, versions):
12309+ version_pattern = re.compile('^v..?.?\...?.?')
12310+ try:
12311+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
12312+ except WindowsError:
12313+ try:
12314+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
12315+ except WindowsError:
12316+ return
12317+ index = 0
12318+ while 1:
12319+ try:
12320+ version = _winreg.EnumKey(all_versions, index)
12321+ except WindowsError:
12322+ break
12323+ index = index + 1
12324+ if not version_pattern.match(version):
12325+ continue
12326+ try:
12327+ msvc_version = _winreg.OpenKey(all_versions, version)
12328+ path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder')
12329+ except WindowsError:
12330+ continue
12331+ if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
12332+ targets = []
12333+ for target,arch in all_msvc_platforms:
12334+ try:
12335+ targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
12336+ except Configure.ConfigurationError:
12337+ pass
12338+ versions.append(('wsdk ' + version[1:], targets))
12339+
12340+@conf
12341+def gather_msvc_versions(conf, versions):
12342+ # checks SmartPhones SDKs
12343+ try:
12344+ ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
12345+ except WindowsError:
12346+ try:
12347+ ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
12348+ except WindowsError:
12349+ ce_sdk = ''
12350+ if ce_sdk:
12351+ supported_wince_platforms = []
12352+ ce_index = 0
12353+ while 1:
12354+ try:
12355+ sdk_device = _winreg.EnumKey(ce_sdk, ce_index)
12356+ except WindowsError:
12357+ break
12358+ ce_index = ce_index + 1
12359+ sdk = _winreg.OpenKey(ce_sdk, sdk_device)
12360+ path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir')
12361+ path=str(path)
12362+ path,device = os.path.split(path)
12363+ if not device:
12364+ path,device = os.path.split(path)
12365+ for arch,compiler in all_wince_platforms:
12366+ platforms = []
12367+ if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
12368+ platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
12369+ if platforms:
12370+ supported_wince_platforms.append((device, platforms))
12371+ # checks MSVC
12372+ version_pattern = re.compile('^..?\...?')
12373+ for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]:
12374+ try:
12375+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
12376+ except WindowsError:
12377+ try:
12378+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver)
12379+ except WindowsError:
12380+ continue
12381+ index = 0
12382+ while 1:
12383+ try:
12384+ version = _winreg.EnumKey(all_versions, index)
12385+ except WindowsError:
12386+ break
12387+ index = index + 1
12388+ if not version_pattern.match(version):
12389+ continue
12390+ try:
12391+ msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS")
12392+ path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir')
12393+ path=str(path)
12394+ targets = []
12395+ if ce_sdk:
12396+ for device,platforms in supported_wince_platforms:
12397+ cetargets = []
12398+ for platform,compiler,include,lib in platforms:
12399+ winCEpath = os.path.join(path, 'VC', 'ce')
12400+ if os.path.isdir(winCEpath):
12401+ common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))
12402+ if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
12403+ bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs
12404+ incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')]
12405+ libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)]
12406+ cetargets.append((platform, (platform, (bindirs,incdirs,libdirs))))
12407+ versions.append((device+' '+version, cetargets))
12408+ if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')):
12409+ for target,realtarget in all_msvc_platforms[::-1]:
12410+ try:
12411+ targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat')))))
12412+ except:
12413+ pass
12414+ elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')):
12415+ try:
12416+ targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')))))
12417+ except Configure.ConfigurationError:
12418+ pass
12419+ versions.append(('msvc '+version, targets))
12420+
12421+ except WindowsError:
12422+ continue
12423+
12424+@conf
12425+def gather_icl_versions(conf, versions):
12426+ version_pattern = re.compile('^...?.?\....?.?')
12427+ try:
12428+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
12429+ except WindowsError:
12430+ try:
12431+ all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
12432+ except WindowsError:
12433+ return
12434+ index = 0
12435+ while 1:
12436+ try:
12437+ version = _winreg.EnumKey(all_versions, index)
12438+ except WindowsError:
12439+ break
12440+ index = index + 1
12441+ if not version_pattern.match(version):
12442+ continue
12443+ targets = []
12444+ for target,arch in all_icl_platforms:
12445+ try:
12446+ icl_version = _winreg.OpenKey(all_versions, version+'\\'+target)
12447+ path,type = _winreg.QueryValueEx(icl_version,'ProductDir')
12448+ if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')):
12449+ try:
12450+ targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat')))))
12451+ except Configure.ConfigurationError:
12452+ pass
12453+ except WindowsError:
12454+ continue
12455+ major = version[0:2]
12456+ versions.append(('intel ' + major, targets))
12457+
12458+@conf
12459+def get_msvc_versions(conf):
12460+ if not conf.env.MSVC_INSTALLED_VERSIONS:
12461+ lst = []
12462+ conf.gather_msvc_versions(lst)
12463+ conf.gather_wsdk_versions(lst)
12464+ conf.gather_icl_versions(lst)
12465+ conf.env.MSVC_INSTALLED_VERSIONS = lst
12466+ return conf.env.MSVC_INSTALLED_VERSIONS
12467+
12468+@conf
12469+def print_all_msvc_detected(conf):
12470+ for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
12471+ info(version)
12472+ for target,l in targets:
12473+ info("\t"+target)
12474+
12475+def detect_msvc(conf):
12476+ versions = get_msvc_versions(conf)
12477+ return setup_msvc(conf, versions)
12478+
12479+@conf
12480+def find_lt_names_msvc(self, libname, is_static=False):
12481+ """
12482+ Win32/MSVC specific code to glean out information from libtool la files.
12483+ this function is not attached to the task_gen class
12484+ """
12485+ lt_names=[
12486+ 'lib%s.la' % libname,
12487+ '%s.la' % libname,
12488+ ]
12489+
12490+ for path in self.env['LIBPATH']:
12491+ for la in lt_names:
12492+ laf=os.path.join(path,la)
12493+ dll=None
12494+ if os.path.exists(laf):
12495+ ltdict=read_la_file(laf)
12496+ lt_libdir=None
12497+ if ltdict.get('libdir', ''):
12498+ lt_libdir = ltdict['libdir']
12499+ if not is_static and ltdict.get('library_names', ''):
12500+ dllnames=ltdict['library_names'].split()
12501+ dll=dllnames[0].lower()
12502+ dll=re.sub('\.dll$', '', dll)
12503+ return (lt_libdir, dll, False)
12504+ elif ltdict.get('old_library', ''):
12505+ olib=ltdict['old_library']
12506+ if os.path.exists(os.path.join(path,olib)):
12507+ return (path, olib, True)
12508+ elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
12509+ return (lt_libdir, olib, True)
12510+ else:
12511+ return (None, olib, True)
12512+ else:
12513+ raise Utils.WafError('invalid libtool object file: %s' % laf)
12514+ return (None, None, None)
12515+
12516+@conf
12517+def libname_msvc(self, libname, is_static=False, mandatory=False):
12518+ lib = libname.lower()
12519+ lib = re.sub('\.lib$','',lib)
12520+
12521+ if lib in g_msvc_systemlibs:
12522+ return lib
12523+
12524+ lib=re.sub('^lib','',lib)
12525+
12526+ if lib == 'm':
12527+ return None
12528+
12529+ (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
12530+
12531+ if lt_path != None and lt_libname != None:
12532+ if lt_static == True:
12533+ # file existance check has been made by find_lt_names
12534+ return os.path.join(lt_path,lt_libname)
12535+
12536+ if lt_path != None:
12537+ _libpaths=[lt_path] + self.env['LIBPATH']
12538+ else:
12539+ _libpaths=self.env['LIBPATH']
12540+
12541+ static_libs=[
12542+ 'lib%ss.lib' % lib,
12543+ 'lib%s.lib' % lib,
12544+ '%ss.lib' % lib,
12545+ '%s.lib' %lib,
12546+ ]
12547+
12548+ dynamic_libs=[
12549+ 'lib%s.dll.lib' % lib,
12550+ 'lib%s.dll.a' % lib,
12551+ '%s.dll.lib' % lib,
12552+ '%s.dll.a' % lib,
12553+ 'lib%s_d.lib' % lib,
12554+ '%s_d.lib' % lib,
12555+ '%s.lib' %lib,
12556+ ]
12557+
12558+ libnames=static_libs
12559+ if not is_static:
12560+ libnames=dynamic_libs + static_libs
12561+
12562+ for path in _libpaths:
12563+ for libn in libnames:
12564+ if os.path.exists(os.path.join(path, libn)):
12565+ debug('msvc: lib found: %s', os.path.join(path,libn))
12566+ return re.sub('\.lib$', '',libn)
12567+
12568+ #if no lib can be found, just return the libname as msvc expects it
12569+ if mandatory:
12570+ self.fatal("The library %r could not be found" % libname)
12571+ return re.sub('\.lib$', '', libname)
12572+
12573+@conf
12574+def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False):
12575+ "This is the api to use"
12576+ libn = self.libname_msvc(libname, is_static, mandatory)
12577+
12578+ if not uselib_store:
12579+ uselib_store = libname.upper()
12580+
12581+ # Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB,
12582+ # but we don't distinguish static libs from shared libs.
12583+ # This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER'])
12584+ if False and is_static: # disabled
12585+ self.env['STATICLIB_' + uselib_store] = [libn]
12586+ else:
12587+ self.env['LIB_' + uselib_store] = [libn]
12588+
12589+@conf
12590+def check_libs_msvc(self, libnames, is_static=False, mandatory=False):
12591+ for libname in Utils.to_list(libnames):
12592+ self.check_lib_msvc(libname, is_static, mandatory=mandatory)
12593+
12594+@conftest
12595+def no_autodetect(conf):
12596+ conf.eval_rules(detect.replace('autodetect', ''))
12597+
12598+
12599+detect = '''
12600+autodetect
12601+find_msvc
12602+msvc_common_flags
12603+cc_load_tools
12604+cxx_load_tools
12605+cc_add_flags
12606+cxx_add_flags
12607+link_add_flags
12608+'''
12609+
12610+@conftest
12611+def autodetect(conf):
12612+ v = conf.env
12613+ compiler, version, path, includes, libdirs = detect_msvc(conf)
12614+ v['PATH'] = path
12615+ v['CPPPATH'] = includes
12616+ v['LIBPATH'] = libdirs
12617+ v['MSVC_COMPILER'] = compiler
12618+
12619+def _get_prog_names(conf, compiler):
12620+ if compiler=='intel':
12621+ compiler_name = 'ICL'
12622+ linker_name = 'XILINK'
12623+ lib_name = 'XILIB'
12624+ else:
12625+ # assumes CL.exe
12626+ compiler_name = 'CL'
12627+ linker_name = 'LINK'
12628+ lib_name = 'LIB'
12629+ return compiler_name, linker_name, lib_name
12630+
12631+@conftest
12632+def find_msvc(conf):
12633+ # due to path format limitations, limit operation only to native Win32. Yeah it sucks.
12634+ if sys.platform != 'win32':
12635+ conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
12636+
12637+ v = conf.env
12638+
12639+ compiler, version, path, includes, libdirs = detect_msvc(conf)
12640+
12641+ compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
12642+ has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6) or (compiler == 'intel' and float(version) >= 11)
12643+
12644+ # compiler
12645+ cxx = None
12646+ if v.CXX: cxx = v.CXX
12647+ elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
12648+ if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
12649+ cxx = conf.cmd_to_list(cxx)
12650+
12651+ # before setting anything, check if the compiler is really msvc
12652+ env = dict(conf.environ)
12653+ env.update(PATH = ';'.join(path))
12654+ if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
12655+ conf.fatal('the msvc compiler could not be identified')
12656+
12657+ link = v.LINK_CXX
12658+ if not link:
12659+ link = conf.find_program(linker_name, path_list=path, mandatory=True)
12660+ ar = v.AR
12661+ if not ar:
12662+ ar = conf.find_program(lib_name, path_list=path, mandatory=True)
12663+
12664+ # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
12665+ mt = v.MT
12666+ if has_msvc_manifest:
12667+ mt = conf.find_program('MT', path_list=path, mandatory=True)
12668+
12669+ # no more possibility of failure means the data state will be consistent
12670+ # we may store the data safely now
12671+
12672+ v.MSVC_MANIFEST = has_msvc_manifest
12673+ v.PATH = path
12674+ v.CPPPATH = includes
12675+ v.LIBPATH = libdirs
12676+
12677+ # c/c++ compiler
12678+ v.CC = v.CXX = cxx
12679+ v.CC_NAME = v.CXX_NAME = 'msvc'
12680+
12681+ v.LINK = v.LINK_CXX = link
12682+ if not v.LINK_CC:
12683+ v.LINK_CC = v.LINK_CXX
12684+
12685+ v.AR = ar
12686+ v.MT = mt
12687+ v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
12688+
12689+
12690+ conf.check_tool('winres')
12691+
12692+ if not conf.env.WINRC:
12693+ warn('Resource compiler not found. Compiling resource file is disabled')
12694+
12695+ # environment flags
12696+ try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
12697+ except KeyError: pass
12698+ try: v.prepend_value('LIBPATH', conf.environ['LIB'])
12699+ except KeyError: pass
12700+
12701+@conftest
12702+def msvc_common_flags(conf):
12703+ v = conf.env
12704+
12705+ v['CPPFLAGS'] = ['/W3', '/nologo']
12706+
12707+ v['CCDEFINES_ST'] = '/D%s'
12708+ v['CXXDEFINES_ST'] = '/D%s'
12709+
12710+ # TODO just use _WIN32, which defined by the compiler itself!
12711+ v['CCDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
12712+ v['CXXDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
12713+
12714+ v['_CCINCFLAGS'] = []
12715+ v['_CCDEFFLAGS'] = []
12716+ v['_CXXINCFLAGS'] = []
12717+ v['_CXXDEFFLAGS'] = []
12718+
12719+ v['CC_SRC_F'] = ''
12720+ v['CC_TGT_F'] = ['/c', '/Fo']
12721+ v['CXX_SRC_F'] = ''
12722+ v['CXX_TGT_F'] = ['/c', '/Fo']
12723+
12724+ v['CPPPATH_ST'] = '/I%s' # template for adding include paths
12725+
12726+ v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
12727+
12728+ # Subsystem specific flags
12729+ v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE']
12730+ v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE']
12731+ v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX']
12732+ v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS']
12733+ v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE']
12734+
12735+ # CRT specific flags
12736+ v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT']
12737+ v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
12738+
12739+ # TODO these are defined by the compiler itself!
12740+ v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself!
12741+ v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself!
12742+
12743+ v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
12744+ v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
12745+
12746+ # TODO these are defined by the compiler itself!
12747+ v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself!
12748+ v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself!
12749+
12750+ # compiler debug levels
12751+ v['CCFLAGS'] = ['/TC']
12752+ v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
12753+ v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
12754+ v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
12755+ v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
12756+
12757+ v['CXXFLAGS'] = ['/TP', '/EHsc']
12758+ v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
12759+ v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
12760+
12761+ v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
12762+ v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
12763+
12764+ # linker
12765+ v['LIB'] = []
12766+
12767+ v['LIB_ST'] = '%s.lib' # template for adding libs
12768+ v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths
12769+ v['STATICLIB_ST'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
12770+ v['STATICLIBPATH_ST'] = '/LIBPATH:%s'
12771+
12772+ v['LINKFLAGS'] = ['/NOLOGO']
12773+ if v['MSVC_MANIFEST']:
12774+ v.append_value('LINKFLAGS', '/MANIFEST')
12775+ v['LINKFLAGS_DEBUG'] = ['/DEBUG']
12776+ v['LINKFLAGS_ULTRADEBUG'] = ['/DEBUG']
12777+
12778+ # shared library
12779+ v['shlib_CCFLAGS'] = ['']
12780+ v['shlib_CXXFLAGS'] = ['']
12781+ v['shlib_LINKFLAGS']= ['/DLL']
12782+ v['shlib_PATTERN'] = '%s.dll'
12783+ v['implib_PATTERN'] = '%s.lib'
12784+ v['IMPLIB_ST'] = '/IMPLIB:%s'
12785+
12786+ # static library
12787+ v['staticlib_LINKFLAGS'] = ['']
12788+ v['staticlib_PATTERN'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
12789+
12790+ # program
12791+ v['program_PATTERN'] = '%s.exe'
12792+
12793+
12794+#######################################################################################################
12795+##### conf above, build below
12796+
12797+@after('apply_link')
12798+@feature('cc', 'cxx')
12799+def apply_flags_msvc(self):
12800+ if self.env.CC_NAME != 'msvc' or not self.link_task:
12801+ return
12802+
12803+ subsystem = getattr(self, 'subsystem', '')
12804+ if subsystem:
12805+ subsystem = '/subsystem:%s' % subsystem
12806+ flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS'
12807+ self.env.append_value(flags, subsystem)
12808+
12809+ if getattr(self, 'link_task', None) and not 'cstaticlib' in self.features:
12810+ for f in self.env.LINKFLAGS:
12811+ d = f.lower()
12812+ if d[1:] == 'debug':
12813+ pdbnode = self.link_task.outputs[0].change_ext('.pdb')
12814+ pdbfile = pdbnode.bldpath(self.env)
12815+ self.link_task.outputs.append(pdbnode)
12816+ self.bld.install_files(self.install_path, [pdbnode], env=self.env)
12817+ break
12818+
12819+@feature('cprogram', 'cshlib', 'cstaticlib')
12820+@after('apply_lib_vars')
12821+@before('apply_obj_vars')
12822+def apply_obj_vars_msvc(self):
12823+ if self.env['CC_NAME'] != 'msvc':
12824+ return
12825+
12826+ try:
12827+ self.meths.remove('apply_obj_vars')
12828+ except ValueError:
12829+ pass
12830+
12831+ libpaths = getattr(self, 'libpaths', [])
12832+ if not libpaths: self.libpaths = libpaths
12833+
12834+ env = self.env
12835+ app = env.append_unique
12836+
12837+ cpppath_st = env['CPPPATH_ST']
12838+ lib_st = env['LIB_ST']
12839+ staticlib_st = env['STATICLIB_ST']
12840+ libpath_st = env['LIBPATH_ST']
12841+ staticlibpath_st = env['STATICLIBPATH_ST']
12842+
12843+ for i in env['LIBPATH']:
12844+ app('LINKFLAGS', libpath_st % i)
12845+ if not libpaths.count(i):
12846+ libpaths.append(i)
12847+
12848+ for i in env['LIBPATH']:
12849+ app('LINKFLAGS', staticlibpath_st % i)
12850+ if not libpaths.count(i):
12851+ libpaths.append(i)
12852+
12853+ # i doubt that anyone will make a fully static binary anyway
12854+ if not env['FULLSTATIC']:
12855+ if env['STATICLIB'] or env['LIB']:
12856+ app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work?
12857+
12858+ for i in env['STATICLIB']:
12859+ app('LINKFLAGS', staticlib_st % i)
12860+
12861+ for i in env['LIB']:
12862+ app('LINKFLAGS', lib_st % i)
12863+
12864+# split the manifest file processing from the link task, like for the rc processing
12865+
12866+@feature('cprogram', 'cshlib')
12867+@after('apply_link')
12868+def apply_manifest(self):
12869+ """Special linker for MSVC with support for embedding manifests into DLL's
12870+ and executables compiled by Visual Studio 2005 or probably later. Without
12871+ the manifest file, the binaries are unusable.
12872+ See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
12873+
12874+ if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
12875+ out_node = self.link_task.outputs[0]
12876+ man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
12877+ self.link_task.outputs.append(man_node)
12878+ self.link_task.do_manifest = True
12879+
12880+def exec_mf(self):
12881+ env = self.env
12882+ mtool = env['MT']
12883+ if not mtool:
12884+ return 0
12885+
12886+ self.do_manifest = False
12887+
12888+ outfile = self.outputs[0].bldpath(env)
12889+
12890+ manifest = None
12891+ for out_node in self.outputs:
12892+ if out_node.name.endswith('.manifest'):
12893+ manifest = out_node.bldpath(env)
12894+ break
12895+ if manifest is None:
12896+ # Should never get here. If we do, it means the manifest file was
12897+ # never added to the outputs list, thus we don't have a manifest file
12898+ # to embed, so we just return.
12899+ return 0
12900+
12901+ # embedding mode. Different for EXE's and DLL's.
12902+ # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
12903+ mode = ''
12904+ if 'cprogram' in self.generator.features:
12905+ mode = '1'
12906+ elif 'cshlib' in self.generator.features:
12907+ mode = '2'
12908+
12909+ debug('msvc: embedding manifest')
12910+ #flags = ' '.join(env['MTFLAGS'] or [])
12911+
12912+ lst = []
12913+ lst.extend([env['MT']])
12914+ lst.extend(Utils.to_list(env['MTFLAGS']))
12915+ lst.extend(Utils.to_list("-manifest"))
12916+ lst.extend(Utils.to_list(manifest))
12917+ lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
12918+
12919+ #cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
12920+ # manifest, outfile, mode)
12921+ lst = [lst]
12922+ return self.exec_command(*lst)
12923+
12924+########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
12925+
12926+def exec_command_msvc(self, *k, **kw):
12927+ "instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in"
12928+ if self.env['CC_NAME'] == 'msvc':
12929+ if isinstance(k[0], list):
12930+ lst = []
12931+ carry = ''
12932+ for a in k[0]:
12933+ if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':':
12934+ carry = a
12935+ else:
12936+ lst.append(carry + a)
12937+ carry = ''
12938+ k = [lst]
12939+
12940+ env = dict(os.environ)
12941+ env.update(PATH = ';'.join(self.env['PATH']))
12942+ kw['env'] = env
12943+
12944+ ret = self.generator.bld.exec_command(*k, **kw)
12945+ if ret: return ret
12946+ if getattr(self, 'do_manifest', None):
12947+ ret = exec_mf(self)
12948+ return ret
12949+
12950+for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
12951+ cls = Task.TaskBase.classes.get(k, None)
12952+ if cls:
12953+ cls.exec_command = exec_command_msvc
12954+
12955diff --git a/buildtools/wafadmin/Tools/nasm.py b/buildtools/wafadmin/Tools/nasm.py
12956new file mode 100644
12957index 0000000..b99c3c7
12958--- /dev/null
12959+++ b/buildtools/wafadmin/Tools/nasm.py
12960@@ -0,0 +1,49 @@
12961+#!/usr/bin/env python
12962+# encoding: utf-8
12963+# Thomas Nagy, 2008
12964+
12965+"""
12966+Nasm processing
12967+"""
12968+
12969+import os
12970+import TaskGen, Task, Utils
12971+from TaskGen import taskgen, before, extension
12972+
12973+nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
12974+
12975+EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
12976+
12977+@before('apply_link')
12978+def apply_nasm_vars(self):
12979+
12980+ # flags
12981+ if hasattr(self, 'nasm_flags'):
12982+ for flag in self.to_list(self.nasm_flags):
12983+ self.env.append_value('NASM_FLAGS', flag)
12984+
12985+ # includes - well, if we suppose it works with c processing
12986+ if hasattr(self, 'includes'):
12987+ for inc in self.to_list(self.includes):
12988+ node = self.path.find_dir(inc)
12989+ if not node:
12990+ raise Utils.WafError('cannot find the dir' + inc)
12991+ self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env))
12992+ self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env))
12993+
12994+@extension(EXT_NASM)
12995+def nasm_file(self, node):
12996+ try: obj_ext = self.obj_ext
12997+ except AttributeError: obj_ext = '_%d.o' % self.idx
12998+
12999+ task = self.create_task('nasm', node, node.change_ext(obj_ext))
13000+ self.compiled_tasks.append(task)
13001+
13002+ self.meths.append('apply_nasm_vars')
13003+
13004+# create our action here
13005+Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False)
13006+
13007+def detect(conf):
13008+ nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True)
13009+
13010diff --git a/buildtools/wafadmin/Tools/ocaml.py b/buildtools/wafadmin/Tools/ocaml.py
13011new file mode 100644
13012index 0000000..20c9269
13013--- /dev/null
13014+++ b/buildtools/wafadmin/Tools/ocaml.py
13015@@ -0,0 +1,298 @@
13016+#!/usr/bin/env python
13017+# encoding: utf-8
13018+# Thomas Nagy, 2006 (ita)
13019+
13020+"ocaml support"
13021+
13022+import os, re
13023+import TaskGen, Utils, Task, Build
13024+from Logs import error
13025+from TaskGen import taskgen, feature, before, after, extension
13026+
13027+EXT_MLL = ['.mll']
13028+EXT_MLY = ['.mly']
13029+EXT_MLI = ['.mli']
13030+EXT_MLC = ['.c']
13031+EXT_ML = ['.ml']
13032+
13033+open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
13034+foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
13035+def filter_comments(txt):
13036+ meh = [0]
13037+ def repl(m):
13038+ if m.group(1): meh[0] += 1
13039+ elif m.group(2): meh[0] -= 1
13040+ elif not meh[0]: return m.group(0)
13041+ return ''
13042+ return foo.sub(repl, txt)
13043+
13044+def scan(self):
13045+ node = self.inputs[0]
13046+ code = filter_comments(node.read(self.env))
13047+
13048+ global open_re
13049+ names = []
13050+ import_iterator = open_re.finditer(code)
13051+ if import_iterator:
13052+ for import_match in import_iterator:
13053+ names.append(import_match.group(1))
13054+ found_lst = []
13055+ raw_lst = []
13056+ for name in names:
13057+ nd = None
13058+ for x in self.incpaths:
13059+ nd = x.find_resource(name.lower()+'.ml')
13060+ if not nd: nd = x.find_resource(name+'.ml')
13061+ if nd:
13062+ found_lst.append(nd)
13063+ break
13064+ else:
13065+ raw_lst.append(name)
13066+
13067+ return (found_lst, raw_lst)
13068+
13069+native_lst=['native', 'all', 'c_object']
13070+bytecode_lst=['bytecode', 'all']
13071+class ocaml_taskgen(TaskGen.task_gen):
13072+ def __init__(self, *k, **kw):
13073+ TaskGen.task_gen.__init__(self, *k, **kw)
13074+
13075+@feature('ocaml')
13076+def init_ml(self):
13077+ Utils.def_attrs(self,
13078+ type = 'all',
13079+ incpaths_lst = [],
13080+ bld_incpaths_lst = [],
13081+ mlltasks = [],
13082+ mlytasks = [],
13083+ mlitasks = [],
13084+ native_tasks = [],
13085+ bytecode_tasks = [],
13086+ linktasks = [],
13087+ bytecode_env = None,
13088+ native_env = None,
13089+ compiled_tasks = [],
13090+ includes = '',
13091+ uselib = '',
13092+ are_deps_set = 0)
13093+
13094+@feature('ocaml')
13095+@after('init_ml')
13096+def init_envs_ml(self):
13097+
13098+ self.islibrary = getattr(self, 'islibrary', False)
13099+
13100+ global native_lst, bytecode_lst
13101+ self.native_env = None
13102+ if self.type in native_lst:
13103+ self.native_env = self.env.copy()
13104+ if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
13105+
13106+ self.bytecode_env = None
13107+ if self.type in bytecode_lst:
13108+ self.bytecode_env = self.env.copy()
13109+ if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
13110+
13111+ if self.type == 'c_object':
13112+ self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
13113+
13114+@feature('ocaml')
13115+@before('apply_vars_ml')
13116+@after('init_envs_ml')
13117+def apply_incpaths_ml(self):
13118+ inc_lst = self.includes.split()
13119+ lst = self.incpaths_lst
13120+ for dir in inc_lst:
13121+ node = self.path.find_dir(dir)
13122+ if not node:
13123+ error("node not found: " + str(dir))
13124+ continue
13125+ self.bld.rescan(node)
13126+ if not node in lst: lst.append(node)
13127+ self.bld_incpaths_lst.append(node)
13128+ # now the nodes are added to self.incpaths_lst
13129+
13130+@feature('ocaml')
13131+@before('apply_core')
13132+def apply_vars_ml(self):
13133+ for i in self.incpaths_lst:
13134+ if self.bytecode_env:
13135+ app = self.bytecode_env.append_value
13136+ app('OCAMLPATH', '-I')
13137+ app('OCAMLPATH', i.srcpath(self.env))
13138+ app('OCAMLPATH', '-I')
13139+ app('OCAMLPATH', i.bldpath(self.env))
13140+
13141+ if self.native_env:
13142+ app = self.native_env.append_value
13143+ app('OCAMLPATH', '-I')
13144+ app('OCAMLPATH', i.bldpath(self.env))
13145+ app('OCAMLPATH', '-I')
13146+ app('OCAMLPATH', i.srcpath(self.env))
13147+
13148+ varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
13149+ for name in self.uselib.split():
13150+ for vname in varnames:
13151+ cnt = self.env[vname+'_'+name]
13152+ if cnt:
13153+ if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
13154+ if self.native_env: self.native_env.append_value(vname, cnt)
13155+
13156+@feature('ocaml')
13157+@after('apply_core')
13158+def apply_link_ml(self):
13159+
13160+ if self.bytecode_env:
13161+ ext = self.islibrary and '.cma' or '.run'
13162+
13163+ linktask = self.create_task('ocalink')
13164+ linktask.bytecode = 1
13165+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
13166+ linktask.obj = self
13167+ linktask.env = self.bytecode_env
13168+ self.linktasks.append(linktask)
13169+
13170+ if self.native_env:
13171+ if self.type == 'c_object': ext = '.o'
13172+ elif self.islibrary: ext = '.cmxa'
13173+ else: ext = ''
13174+
13175+ linktask = self.create_task('ocalinkx')
13176+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
13177+ linktask.obj = self
13178+ linktask.env = self.native_env
13179+ self.linktasks.append(linktask)
13180+
13181+ # we produce a .o file to be used by gcc
13182+ self.compiled_tasks.append(linktask)
13183+
13184+@extension(EXT_MLL)
13185+def mll_hook(self, node):
13186+ mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'), env=self.native_env)
13187+ self.mlltasks.append(mll_task)
13188+
13189+ self.allnodes.append(mll_task.outputs[0])
13190+
13191+@extension(EXT_MLY)
13192+def mly_hook(self, node):
13193+ mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')], env=self.native_env)
13194+ self.mlytasks.append(mly_task)
13195+ self.allnodes.append(mly_task.outputs[0])
13196+
13197+ task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'), env=self.native_env)
13198+
13199+@extension(EXT_MLI)
13200+def mli_hook(self, node):
13201+ task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'), env=self.native_env)
13202+ self.mlitasks.append(task)
13203+
13204+@extension(EXT_MLC)
13205+def mlc_hook(self, node):
13206+ task = self.create_task('ocamlcc', node, node.change_ext('.o'), env=self.native_env)
13207+ self.compiled_tasks.append(task)
13208+
13209+@extension(EXT_ML)
13210+def ml_hook(self, node):
13211+ if self.native_env:
13212+ task = self.create_task('ocamlx', node, node.change_ext('.cmx'), env=self.native_env)
13213+ task.obj = self
13214+ task.incpaths = self.bld_incpaths_lst
13215+ self.native_tasks.append(task)
13216+
13217+ if self.bytecode_env:
13218+ task = self.create_task('ocaml', node, node.change_ext('.cmo'), env=self.bytecode_env)
13219+ task.obj = self
13220+ task.bytecode = 1
13221+ task.incpaths = self.bld_incpaths_lst
13222+ self.bytecode_tasks.append(task)
13223+
13224+def compile_may_start(self):
13225+ if not getattr(self, 'flag_deps', ''):
13226+ self.flag_deps = 1
13227+
13228+ # the evil part is that we can only compute the dependencies after the
13229+ # source files can be read (this means actually producing the source files)
13230+ if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks
13231+ else: alltasks = self.obj.native_tasks
13232+
13233+ self.signature() # ensure that files are scanned - unfortunately
13234+ tree = self.generator.bld
13235+ env = self.env
13236+ for node in self.inputs:
13237+ lst = tree.node_deps[self.unique_id()]
13238+ for depnode in lst:
13239+ for t in alltasks:
13240+ if t == self: continue
13241+ if depnode in t.inputs:
13242+ self.set_run_after(t)
13243+
13244+ # TODO necessary to get the signature right - for now
13245+ delattr(self, 'cache_sig')
13246+ self.signature()
13247+
13248+ return Task.Task.runnable_status(self)
13249+
13250+b = Task.simple_task_type
13251+cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
13252+cls.runnable_status = compile_may_start
13253+cls.scan = scan
13254+
13255+b = Task.simple_task_type
13256+cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
13257+cls.runnable_status = compile_may_start
13258+cls.scan = scan
13259+
13260+
13261+b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx")
13262+b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN')
13263+
13264+b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
13265+b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
13266+
13267+
13268+def link_may_start(self):
13269+ if not getattr(self, 'order', ''):
13270+
13271+ # now reorder the inputs given the task dependencies
13272+ if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks
13273+ else: alltasks = self.obj.native_tasks
13274+
13275+ # this part is difficult, we do not have a total order on the tasks
13276+ # if the dependencies are wrong, this may not stop
13277+ seen = []
13278+ pendant = []+alltasks
13279+ while pendant:
13280+ task = pendant.pop(0)
13281+ if task in seen: continue
13282+ for x in task.run_after:
13283+ if not x in seen:
13284+ pendant.append(task)
13285+ break
13286+ else:
13287+ seen.append(task)
13288+ self.inputs = [x.outputs[0] for x in seen]
13289+ self.order = 1
13290+ return Task.Task.runnable_status(self)
13291+
13292+act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc")
13293+act.runnable_status = link_may_start
13294+act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc")
13295+act.runnable_status = link_may_start
13296+
13297+def detect(conf):
13298+ opt = conf.find_program('ocamlopt', var='OCAMLOPT')
13299+ occ = conf.find_program('ocamlc', var='OCAMLC')
13300+ if (not opt) or (not occ):
13301+ conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
13302+
13303+ v = conf.env
13304+ v['OCAMLC'] = occ
13305+ v['OCAMLOPT'] = opt
13306+ v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX')
13307+ v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC')
13308+ v['OCAMLFLAGS'] = ''
13309+ v['OCAMLLIB'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13310+ v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13311+ v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
13312+ v['LIB_OCAML'] = 'camlrun'
13313+
13314diff --git a/buildtools/wafadmin/Tools/osx.py b/buildtools/wafadmin/Tools/osx.py
13315new file mode 100644
13316index 0000000..561eca4
13317--- /dev/null
13318+++ b/buildtools/wafadmin/Tools/osx.py
13319@@ -0,0 +1,188 @@
13320+#!/usr/bin/env python
13321+# encoding: utf-8
13322+# Thomas Nagy 2008
13323+
13324+"""MacOSX related tools
13325+
13326+To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute
13327+ obj.mac_app = True
13328+
13329+To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute:
13330+ obj.mac_bundle = True
13331+"""
13332+
13333+import os, shutil, sys, platform
13334+import TaskGen, Task, Build, Options, Utils
13335+from TaskGen import taskgen, feature, after, before
13336+from Logs import error, debug
13337+
13338+# plist template
13339+app_info = '''
13340+<?xml version="1.0" encoding="UTF-8"?>
13341+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
13342+<plist version="0.9">
13343+<dict>
13344+ <key>CFBundlePackageType</key>
13345+ <string>APPL</string>
13346+ <key>CFBundleGetInfoString</key>
13347+ <string>Created by Waf</string>
13348+ <key>CFBundleSignature</key>
13349+ <string>????</string>
13350+ <key>NOTE</key>
13351+ <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
13352+ <key>CFBundleExecutable</key>
13353+ <string>%s</string>
13354+</dict>
13355+</plist>
13356+'''
13357+
13358+# see WAF issue 285
13359+# and also http://trac.macports.org/ticket/17059
13360+@feature('cc', 'cxx')
13361+@before('apply_lib_vars')
13362+def set_macosx_deployment_target(self):
13363+ if self.env['MACOSX_DEPLOYMENT_TARGET']:
13364+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
13365+ elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
13366+ if sys.platform == 'darwin':
13367+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
13368+
13369+@feature('cc', 'cxx')
13370+@after('apply_lib_vars')
13371+def apply_framework(self):
13372+ for x in self.to_list(self.env['FRAMEWORKPATH']):
13373+ frameworkpath_st = '-F%s'
13374+ self.env.append_unique('CXXFLAGS', frameworkpath_st % x)
13375+ self.env.append_unique('CCFLAGS', frameworkpath_st % x)
13376+ self.env.append_unique('LINKFLAGS', frameworkpath_st % x)
13377+
13378+ for x in self.to_list(self.env['FRAMEWORK']):
13379+ self.env.append_value('LINKFLAGS', ['-framework', x])
13380+
13381+@taskgen
13382+def create_bundle_dirs(self, name, out):
13383+ bld = self.bld
13384+ dir = out.parent.get_dir(name)
13385+
13386+ if not dir:
13387+ dir = out.__class__(name, out.parent, 1)
13388+ bld.rescan(dir)
13389+ contents = out.__class__('Contents', dir, 1)
13390+ bld.rescan(contents)
13391+ macos = out.__class__('MacOS', contents, 1)
13392+ bld.rescan(macos)
13393+ return dir
13394+
13395+def bundle_name_for_output(out):
13396+ name = out.name
13397+ k = name.rfind('.')
13398+ if k >= 0:
13399+ name = name[:k] + '.app'
13400+ else:
13401+ name = name + '.app'
13402+ return name
13403+
13404+@taskgen
13405+@after('apply_link')
13406+@feature('cprogram')
13407+def create_task_macapp(self):
13408+ """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
13409+ or use obj.mac_app = True to build specific targets as Mac apps"""
13410+ if self.env['MACAPP'] or getattr(self, 'mac_app', False):
13411+ apptask = self.create_task('macapp')
13412+ apptask.set_inputs(self.link_task.outputs)
13413+
13414+ out = self.link_task.outputs[0]
13415+
13416+ name = bundle_name_for_output(out)
13417+ dir = self.create_bundle_dirs(name, out)
13418+
13419+ n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
13420+
13421+ apptask.set_outputs([n1])
13422+ apptask.chmod = 0755
13423+ apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS')
13424+ self.apptask = apptask
13425+
13426+@after('apply_link')
13427+@feature('cprogram')
13428+def create_task_macplist(self):
13429+ """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
13430+ or use obj.mac_app = True to build specific targets as Mac apps"""
13431+ if self.env['MACAPP'] or getattr(self, 'mac_app', False):
13432+ # check if the user specified a plist before using our template
13433+ if not getattr(self, 'mac_plist', False):
13434+ self.mac_plist = app_info
13435+
13436+ plisttask = self.create_task('macplist')
13437+ plisttask.set_inputs(self.link_task.outputs)
13438+
13439+ out = self.link_task.outputs[0]
13440+ self.mac_plist = self.mac_plist % (out.name)
13441+
13442+ name = bundle_name_for_output(out)
13443+ dir = self.create_bundle_dirs(name, out)
13444+
13445+ n1 = dir.find_or_declare(['Contents', 'Info.plist'])
13446+
13447+ plisttask.set_outputs([n1])
13448+ plisttask.mac_plist = self.mac_plist
13449+ plisttask.install_path = os.path.join(self.install_path, name, 'Contents')
13450+ self.plisttask = plisttask
13451+
13452+@after('apply_link')
13453+@feature('cshlib')
13454+def apply_link_osx(self):
13455+ name = self.link_task.outputs[0].name
13456+ if not self.install_path:
13457+ return
13458+ if getattr(self, 'vnum', None):
13459+ name = name.replace('.dylib', '.%s.dylib' % self.vnum)
13460+
13461+ path = os.path.join(Utils.subst_vars(self.install_path, self.env), name)
13462+ if '-dynamiclib' in self.env['LINKFLAGS']:
13463+ self.env.append_value('LINKFLAGS', '-install_name')
13464+ self.env.append_value('LINKFLAGS', path)
13465+
13466+@before('apply_link', 'apply_lib_vars')
13467+@feature('cc', 'cxx')
13468+def apply_bundle(self):
13469+ """use env['MACBUNDLE'] to force all shlibs into mac bundles
13470+ or use obj.mac_bundle = True for specific targets only"""
13471+ if not ('cshlib' in self.features or 'shlib' in self.features): return
13472+ if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
13473+ self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN']
13474+ uselib = self.uselib = self.to_list(self.uselib)
13475+ if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE')
13476+
13477+@after('apply_link')
13478+@feature('cshlib')
13479+def apply_bundle_remove_dynamiclib(self):
13480+ if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
13481+ if not getattr(self, 'vnum', None):
13482+ try:
13483+ self.env['LINKFLAGS'].remove('-dynamiclib')
13484+ self.env['LINKFLAGS'].remove('-single_module')
13485+ except ValueError:
13486+ pass
13487+
13488+# TODO REMOVE IN 1.6 (global variable)
13489+app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
13490+
13491+def app_build(task):
13492+ env = task.env
13493+ shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env))
13494+
13495+ return 0
13496+
13497+def plist_build(task):
13498+ env = task.env
13499+ f = open(task.outputs[0].abspath(env), "w")
13500+ f.write(task.mac_plist)
13501+ f.close()
13502+
13503+ return 0
13504+
13505+Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link")
13506+Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link")
13507+
13508diff --git a/buildtools/wafadmin/Tools/perl.py b/buildtools/wafadmin/Tools/perl.py
13509new file mode 100644
13510index 0000000..a6787a8
13511--- /dev/null
13512+++ b/buildtools/wafadmin/Tools/perl.py
13513@@ -0,0 +1,109 @@
13514+#!/usr/bin/env python
13515+# encoding: utf-8
13516+# andersg at 0x63.nu 2007
13517+
13518+import os
13519+import Task, Options, Utils
13520+from Configure import conf
13521+from TaskGen import extension, taskgen, feature, before
13522+
13523+xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
13524+EXT_XS = ['.xs']
13525+
13526+@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars')
13527+@feature('perlext')
13528+def init_perlext(self):
13529+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
13530+ if not 'PERL' in self.uselib: self.uselib.append('PERL')
13531+ if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
13532+ self.env['shlib_PATTERN'] = self.env['perlext_PATTERN']
13533+
13534+@extension(EXT_XS)
13535+def xsubpp_file(self, node):
13536+ outnode = node.change_ext('.c')
13537+ self.create_task('xsubpp', node, outnode)
13538+ self.allnodes.append(outnode)
13539+
13540+Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before='cc cxx', shell=False)
13541+
13542+@conf
13543+def check_perl_version(conf, minver=None):
13544+ """
13545+ Checks if perl is installed.
13546+
13547+ If installed the variable PERL will be set in environment.
13548+
13549+ Perl binary can be overridden by --with-perl-binary config variable
13550+
13551+ """
13552+
13553+ if getattr(Options.options, 'perlbinary', None):
13554+ conf.env.PERL = Options.options.perlbinary
13555+ else:
13556+ conf.find_program('perl', var='PERL', mandatory=True)
13557+
13558+ try:
13559+ version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
13560+ except:
13561+ conf.fatal('could not determine the perl version')
13562+
13563+ conf.env.PERL_VERSION = version
13564+ cver = ''
13565+ if minver:
13566+ try:
13567+ ver = tuple(map(int, version.split('.')))
13568+ except:
13569+ conf.fatal('unsupported perl version %r' % version)
13570+ if ver < minver:
13571+ conf.fatal('perl is too old')
13572+
13573+ cver = '.'.join(map(str,minver))
13574+ conf.check_message('perl', cver, True, version)
13575+
13576+@conf
13577+def check_perl_module(conf, module):
13578+ """
13579+ Check if specified perlmodule is installed.
13580+
13581+ Minimum version can be specified by specifying it after modulename
13582+ like this:
13583+
13584+ conf.check_perl_module("Some::Module 2.92")
13585+ """
13586+ cmd = [conf.env['PERL'], '-e', 'use %s' % module]
13587+ r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0
13588+ conf.check_message("perl module %s" % module, "", r)
13589+ return r
13590+
13591+@conf
13592+def check_perl_ext_devel(conf):
13593+ """
13594+ Check for configuration needed to build perl extensions.
13595+
13596+ Sets different xxx_PERLEXT variables in the environment.
13597+
13598+ Also sets the ARCHDIR_PERL variable useful as installation path,
13599+ which can be overridden by --with-perl-archdir
13600+ """
13601+ if not conf.env.PERL:
13602+ conf.fatal('perl detection is required first')
13603+
13604+ def read_out(cmd):
13605+ return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
13606+
13607+ conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
13608+ conf.env.CPPPATH_PERLEXT = read_out('print "$Config{archlib}/CORE"')
13609+ conf.env.CCFLAGS_PERLEXT = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
13610+ conf.env.XSUBPP = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
13611+ conf.env.EXTUTILS_TYPEMAP = read_out('print "$Config{privlib}/ExtUtils/typemap"')
13612+ conf.env.perlext_PATTERN = '%s.' + read_out('print $Config{dlext}')[0]
13613+
13614+ if getattr(Options.options, 'perlarchdir', None):
13615+ conf.env.ARCHDIR_PERL = Options.options.perlarchdir
13616+ else:
13617+ conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0]
13618+
13619+def set_options(opt):
13620+ opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)
13621+ opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None)
13622+
13623diff --git a/buildtools/wafadmin/Tools/preproc.py b/buildtools/wafadmin/Tools/preproc.py
13624new file mode 100644
13625index 0000000..5055456
13626--- /dev/null
13627+++ b/buildtools/wafadmin/Tools/preproc.py
13628@@ -0,0 +1,836 @@
13629+#!/usr/bin/env python
13630+# encoding: utf-8
13631+# Thomas Nagy, 2006-2009 (ita)
13632+
13633+"""
13634+C/C++ preprocessor for finding dependencies
13635+
13636+Reasons for using the Waf preprocessor by default
13637+1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
13638+2. Not all compilers provide .d files for obtaining the dependencies (portability)
13639+3. A naive file scanner will not catch the constructs such as "#include foo()"
13640+4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
13641+
13642+Regarding the speed concerns:
13643+a. the preprocessing is performed only when files must be compiled
13644+b. the macros are evaluated only for #if/#elif/#include
13645+c. the time penalty is about 10%
13646+d. system headers are not scanned
13647+
13648+Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced
13649+during the compilation to track the dependencies (useful when used with the boost libraries).
13650+It only works with gcc though, and it cannot be used with Qt builds. A dumb
13651+file scanner will be added in the future, so we will have most bahaviours.
13652+"""
13653+# TODO: more varargs, pragma once
13654+# TODO: dumb file scanner tracking all includes
13655+
13656+import re, sys, os, string
13657+import Logs, Build, Utils
13658+from Logs import debug, error
13659+import traceback
13660+
13661+class PreprocError(Utils.WafError):
13662+ pass
13663+
13664+POPFILE = '-'
13665+
13666+
13667+recursion_limit = 5000
13668+"do not loop too much on header inclusion"
13669+
13670+go_absolute = 0
13671+"set to 1 to track headers on files in /usr/include - else absolute paths are ignored"
13672+
13673+standard_includes = ['/usr/include']
13674+if sys.platform == "win32":
13675+ standard_includes = []
13676+
13677+use_trigraphs = 0
13678+'apply the trigraph rules first'
13679+
13680+strict_quotes = 0
13681+"Keep <> for system includes (do not search for those includes)"
13682+
13683+g_optrans = {
13684+'not':'!',
13685+'and':'&&',
13686+'bitand':'&',
13687+'and_eq':'&=',
13688+'or':'||',
13689+'bitor':'|',
13690+'or_eq':'|=',
13691+'xor':'^',
13692+'xor_eq':'^=',
13693+'compl':'~',
13694+}
13695+"these ops are for c++, to reset, set an empty dict"
13696+
13697+# ignore #warning and #error
13698+re_lines = re.compile(\
13699+ '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
13700+ re.IGNORECASE | re.MULTILINE)
13701+
13702+re_mac = re.compile("^[a-zA-Z_]\w*")
13703+re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
13704+re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
13705+re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
13706+re_cpp = re.compile(
13707+ r"""(/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)|//[^\n]*|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^/"'\\]*)""",
13708+ re.MULTILINE)
13709+trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
13710+chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
13711+
13712+NUM = 'i'
13713+OP = 'O'
13714+IDENT = 'T'
13715+STR = 's'
13716+CHAR = 'c'
13717+
13718+tok_types = [NUM, STR, IDENT, OP]
13719+exp_types = [
13720+ r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
13721+ r'L?"([^"\\]|\\.)*"',
13722+ r'[a-zA-Z_]\w*',
13723+ r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
13724+]
13725+re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
13726+
13727+accepted = 'a'
13728+ignored = 'i'
13729+undefined = 'u'
13730+skipped = 's'
13731+
13732+def repl(m):
13733+ if m.group(1):
13734+ return ' '
13735+ s = m.group(2)
13736+ if s is None:
13737+ return ''
13738+ return s
13739+
13740+def filter_comments(filename):
13741+ # return a list of tuples : keyword, line
13742+ code = Utils.readf(filename)
13743+ if use_trigraphs:
13744+ for (a, b) in trig_def: code = code.split(a).join(b)
13745+ code = re_nl.sub('', code)
13746+ code = re_cpp.sub(repl, code)
13747+ return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
13748+
13749+prec = {}
13750+# op -> number, needed for such expressions: #if 1 && 2 != 0
13751+ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
13752+for x in range(len(ops)):
13753+ syms = ops[x]
13754+ for u in syms.split():
13755+ prec[u] = x
13756+
13757+def reduce_nums(val_1, val_2, val_op):
13758+ """apply arithmetic rules and try to return an integer result"""
13759+ #print val_1, val_2, val_op
13760+
13761+ # now perform the operation, make certain a and b are numeric
13762+ try: a = 0 + val_1
13763+ except TypeError: a = int(val_1)
13764+ try: b = 0 + val_2
13765+ except TypeError: b = int(val_2)
13766+
13767+ d = val_op
13768+ if d == '%': c = a%b
13769+ elif d=='+': c = a+b
13770+ elif d=='-': c = a-b
13771+ elif d=='*': c = a*b
13772+ elif d=='/': c = a/b
13773+ elif d=='^': c = a^b
13774+ elif d=='|': c = a|b
13775+ elif d=='||': c = int(a or b)
13776+ elif d=='&': c = a&b
13777+ elif d=='&&': c = int(a and b)
13778+ elif d=='==': c = int(a == b)
13779+ elif d=='!=': c = int(a != b)
13780+ elif d=='<=': c = int(a <= b)
13781+ elif d=='<': c = int(a < b)
13782+ elif d=='>': c = int(a > b)
13783+ elif d=='>=': c = int(a >= b)
13784+ elif d=='^': c = int(a^b)
13785+ elif d=='<<': c = a<<b
13786+ elif d=='>>': c = a>>b
13787+ else: c = 0
13788+ return c
13789+
13790+def get_num(lst):
13791+ if not lst: raise PreprocError("empty list for get_num")
13792+ (p, v) = lst[0]
13793+ if p == OP:
13794+ if v == '(':
13795+ count_par = 1
13796+ i = 1
13797+ while i < len(lst):
13798+ (p, v) = lst[i]
13799+
13800+ if p == OP:
13801+ if v == ')':
13802+ count_par -= 1
13803+ if count_par == 0:
13804+ break
13805+ elif v == '(':
13806+ count_par += 1
13807+ i += 1
13808+ else:
13809+ raise PreprocError("rparen expected %r" % lst)
13810+
13811+ (num, _) = get_term(lst[1:i])
13812+ return (num, lst[i+1:])
13813+
13814+ elif v == '+':
13815+ return get_num(lst[1:])
13816+ elif v == '-':
13817+ num, lst = get_num(lst[1:])
13818+ return (reduce_nums('-1', num, '*'), lst)
13819+ elif v == '!':
13820+ num, lst = get_num(lst[1:])
13821+ return (int(not int(num)), lst)
13822+ elif v == '~':
13823+ return (~ int(num), lst)
13824+ else:
13825+ raise PreprocError("invalid op token %r for get_num" % lst)
13826+ elif p == NUM:
13827+ return v, lst[1:]
13828+ elif p == IDENT:
13829+ # all macros should have been replaced, remaining identifiers eval to 0
13830+ return 0, lst[1:]
13831+ else:
13832+ raise PreprocError("invalid token %r for get_num" % lst)
13833+
13834+def get_term(lst):
13835+ if not lst: raise PreprocError("empty list for get_term")
13836+ num, lst = get_num(lst)
13837+ if not lst:
13838+ return (num, [])
13839+ (p, v) = lst[0]
13840+ if p == OP:
13841+ if v == '&&' and not num:
13842+ return (num, [])
13843+ elif v == '||' and num:
13844+ return (num, [])
13845+ elif v == ',':
13846+ # skip
13847+ return get_term(lst[1:])
13848+ elif v == '?':
13849+ count_par = 0
13850+ i = 1
13851+ while i < len(lst):
13852+ (p, v) = lst[i]
13853+
13854+ if p == OP:
13855+ if v == ')':
13856+ count_par -= 1
13857+ elif v == '(':
13858+ count_par += 1
13859+ elif v == ':':
13860+ if count_par == 0:
13861+ break
13862+ i += 1
13863+ else:
13864+ raise PreprocError("rparen expected %r" % lst)
13865+
13866+ if int(num):
13867+ return get_term(lst[1:i])
13868+ else:
13869+ return get_term(lst[i+1:])
13870+
13871+ else:
13872+ num2, lst = get_num(lst[1:])
13873+
13874+ if not lst:
13875+ # no more tokens to process
13876+ num2 = reduce_nums(num, num2, v)
13877+ return get_term([(NUM, num2)] + lst)
13878+
13879+ # operator precedence
13880+ p2, v2 = lst[0]
13881+ if p2 != OP:
13882+ raise PreprocError("op expected %r" % lst)
13883+
13884+ if prec[v2] >= prec[v]:
13885+ num2 = reduce_nums(num, num2, v)
13886+ return get_term([(NUM, num2)] + lst)
13887+ else:
13888+ num3, lst = get_num(lst[1:])
13889+ num3 = reduce_nums(num2, num3, v2)
13890+ return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
13891+
13892+
13893+ raise PreprocError("cannot reduce %r" % lst)
13894+
13895+def reduce_eval(lst):
13896+ """take a list of tokens and output true or false (#if/#elif conditions)"""
13897+ num, lst = get_term(lst)
13898+ return (NUM, num)
13899+
13900+def stringize(lst):
13901+ """use for converting a list of tokens to a string"""
13902+ lst = [str(v2) for (p2, v2) in lst]
13903+ return "".join(lst)
13904+
13905+def paste_tokens(t1, t2):
13906+ """
13907+ here is what we can paste:
13908+ a ## b -> ab
13909+ > ## = -> >=
13910+ a ## 2 -> a2
13911+ """
13912+ p1 = None
13913+ if t1[0] == OP and t2[0] == OP:
13914+ p1 = OP
13915+ elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
13916+ p1 = IDENT
13917+ elif t1[0] == NUM and t2[0] == NUM:
13918+ p1 = NUM
13919+ if not p1:
13920+ raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
13921+ return (p1, t1[1] + t2[1])
13922+
13923+def reduce_tokens(lst, defs, ban=[]):
13924+ """replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied"""
13925+ i = 0
13926+
13927+ while i < len(lst):
13928+ (p, v) = lst[i]
13929+
13930+ if p == IDENT and v == "defined":
13931+ del lst[i]
13932+ if i < len(lst):
13933+ (p2, v2) = lst[i]
13934+ if p2 == IDENT:
13935+ if v2 in defs:
13936+ lst[i] = (NUM, 1)
13937+ else:
13938+ lst[i] = (NUM, 0)
13939+ elif p2 == OP and v2 == '(':
13940+ del lst[i]
13941+ (p2, v2) = lst[i]
13942+ del lst[i] # remove the ident, and change the ) for the value
13943+ if v2 in defs:
13944+ lst[i] = (NUM, 1)
13945+ else:
13946+ lst[i] = (NUM, 0)
13947+ else:
13948+ raise PreprocError("invalid define expression %r" % lst)
13949+
13950+ elif p == IDENT and v in defs:
13951+
13952+ if isinstance(defs[v], str):
13953+ a, b = extract_macro(defs[v])
13954+ defs[v] = b
13955+ macro_def = defs[v]
13956+ to_add = macro_def[1]
13957+
13958+ if isinstance(macro_def[0], list):
13959+ # macro without arguments
13960+ del lst[i]
13961+ for x in xrange(len(to_add)):
13962+ lst.insert(i, to_add[x])
13963+ i += 1
13964+ else:
13965+ # collect the arguments for the funcall
13966+
13967+ args = []
13968+ del lst[i]
13969+
13970+ if i >= len(lst):
13971+ raise PreprocError("expected '(' after %r (got nothing)" % v)
13972+
13973+ (p2, v2) = lst[i]
13974+ if p2 != OP or v2 != '(':
13975+ raise PreprocError("expected '(' after %r" % v)
13976+
13977+ del lst[i]
13978+
13979+ one_param = []
13980+ count_paren = 0
13981+ while i < len(lst):
13982+ p2, v2 = lst[i]
13983+
13984+ del lst[i]
13985+ if p2 == OP and count_paren == 0:
13986+ if v2 == '(':
13987+ one_param.append((p2, v2))
13988+ count_paren += 1
13989+ elif v2 == ')':
13990+ if one_param: args.append(one_param)
13991+ break
13992+ elif v2 == ',':
13993+ if not one_param: raise PreprocError("empty param in funcall %s" % p)
13994+ args.append(one_param)
13995+ one_param = []
13996+ else:
13997+ one_param.append((p2, v2))
13998+ else:
13999+ one_param.append((p2, v2))
14000+ if v2 == '(': count_paren += 1
14001+ elif v2 == ')': count_paren -= 1
14002+ else:
14003+ raise PreprocError('malformed macro')
14004+
14005+ # substitute the arguments within the define expression
14006+ accu = []
14007+ arg_table = macro_def[0]
14008+ j = 0
14009+ while j < len(to_add):
14010+ (p2, v2) = to_add[j]
14011+
14012+ if p2 == OP and v2 == '#':
14013+ # stringize is for arguments only
14014+ if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
14015+ toks = args[arg_table[to_add[j+1][1]]]
14016+ accu.append((STR, stringize(toks)))
14017+ j += 1
14018+ else:
14019+ accu.append((p2, v2))
14020+ elif p2 == OP and v2 == '##':
14021+ # token pasting, how can man invent such a complicated system?
14022+ if accu and j+1 < len(to_add):
14023+ # we have at least two tokens
14024+
14025+ t1 = accu[-1]
14026+
14027+ if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
14028+ toks = args[arg_table[to_add[j+1][1]]]
14029+
14030+ if toks:
14031+ accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
14032+ accu.extend(toks[1:])
14033+ else:
14034+ # error, case "a##"
14035+ accu.append((p2, v2))
14036+ accu.extend(toks)
14037+ elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
14038+ # TODO not sure
14039+ # first collect the tokens
14040+ va_toks = []
14041+ st = len(macro_def[0])
14042+ pt = len(args)
14043+ for x in args[pt-st+1:]:
14044+ va_toks.extend(x)
14045+ va_toks.append((OP, ','))
14046+ if va_toks: va_toks.pop() # extra comma
14047+ if len(accu)>1:
14048+ (p3, v3) = accu[-1]
14049+ (p4, v4) = accu[-2]
14050+ if v3 == '##':
14051+ # remove the token paste
14052+ accu.pop()
14053+ if v4 == ',' and pt < st:
14054+ # remove the comma
14055+ accu.pop()
14056+ accu += va_toks
14057+ else:
14058+ accu[-1] = paste_tokens(t1, to_add[j+1])
14059+
14060+ j += 1
14061+ else:
14062+ # invalid paste, case "##a" or "b##"
14063+ accu.append((p2, v2))
14064+
14065+ elif p2 == IDENT and v2 in arg_table:
14066+ toks = args[arg_table[v2]]
14067+ reduce_tokens(toks, defs, ban+[v])
14068+ accu.extend(toks)
14069+ else:
14070+ accu.append((p2, v2))
14071+
14072+ j += 1
14073+
14074+
14075+ reduce_tokens(accu, defs, ban+[v])
14076+
14077+ for x in xrange(len(accu)-1, -1, -1):
14078+ lst.insert(i, accu[x])
14079+
14080+ i += 1
14081+
14082+
14083+def eval_macro(lst, adefs):
14084+ """reduce the tokens from the list lst, and try to return a 0/1 result"""
14085+ reduce_tokens(lst, adefs, [])
14086+ if not lst: raise PreprocError("missing tokens to evaluate")
14087+ (p, v) = reduce_eval(lst)
14088+ return int(v) != 0
14089+
14090+def extract_macro(txt):
14091+ """process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments"""
14092+ t = tokenize(txt)
14093+ if re_fun.search(txt):
14094+ p, name = t[0]
14095+
14096+ p, v = t[1]
14097+ if p != OP: raise PreprocError("expected open parenthesis")
14098+
14099+ i = 1
14100+ pindex = 0
14101+ params = {}
14102+ prev = '('
14103+
14104+ while 1:
14105+ i += 1
14106+ p, v = t[i]
14107+
14108+ if prev == '(':
14109+ if p == IDENT:
14110+ params[v] = pindex
14111+ pindex += 1
14112+ prev = p
14113+ elif p == OP and v == ')':
14114+ break
14115+ else:
14116+ raise PreprocError("unexpected token (3)")
14117+ elif prev == IDENT:
14118+ if p == OP and v == ',':
14119+ prev = v
14120+ elif p == OP and v == ')':
14121+ break
14122+ else:
14123+ raise PreprocError("comma or ... expected")
14124+ elif prev == ',':
14125+ if p == IDENT:
14126+ params[v] = pindex
14127+ pindex += 1
14128+ prev = p
14129+ elif p == OP and v == '...':
14130+ raise PreprocError("not implemented (1)")
14131+ else:
14132+ raise PreprocError("comma or ... expected (2)")
14133+ elif prev == '...':
14134+ raise PreprocError("not implemented (2)")
14135+ else:
14136+ raise PreprocError("unexpected else")
14137+
14138+ #~ print (name, [params, t[i+1:]])
14139+ return (name, [params, t[i+1:]])
14140+ else:
14141+ (p, v) = t[0]
14142+ return (v, [[], t[1:]])
14143+
14144+re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
14145+def extract_include(txt, defs):
14146+ """process a line in the form "#include foo" to return a string representing the file"""
14147+ m = re_include.search(txt)
14148+ if m:
14149+ if m.group('a'): return '<', m.group('a')
14150+ if m.group('b'): return '"', m.group('b')
14151+
14152+ # perform preprocessing and look at the result, it must match an include
14153+ toks = tokenize(txt)
14154+ reduce_tokens(toks, defs, ['waf_include'])
14155+
14156+ if not toks:
14157+ raise PreprocError("could not parse include %s" % txt)
14158+
14159+ if len(toks) == 1:
14160+ if toks[0][0] == STR:
14161+ return '"', toks[0][1]
14162+ else:
14163+ if toks[0][1] == '<' and toks[-1][1] == '>':
14164+ return stringize(toks).lstrip('<').rstrip('>')
14165+
14166+ raise PreprocError("could not parse include %s." % txt)
14167+
14168+def parse_char(txt):
14169+ if not txt: raise PreprocError("attempted to parse a null char")
14170+ if txt[0] != '\\':
14171+ return ord(txt)
14172+ c = txt[1]
14173+ if c == 'x':
14174+ if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
14175+ return int(txt[2:], 16)
14176+ elif c.isdigit():
14177+ if c == '0' and len(txt)==2: return 0
14178+ for i in 3, 2, 1:
14179+ if len(txt) > i and txt[1:1+i].isdigit():
14180+ return (1+i, int(txt[1:1+i], 8))
14181+ else:
14182+ try: return chr_esc[c]
14183+ except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
14184+
14185+@Utils.run_once
14186+def tokenize(s):
14187+ """convert a string into a list of tokens (shlex.split does not apply to c/c++/d)"""
14188+ ret = []
14189+ for match in re_clexer.finditer(s):
14190+ m = match.group
14191+ for name in tok_types:
14192+ v = m(name)
14193+ if v:
14194+ if name == IDENT:
14195+ try: v = g_optrans[v]; name = OP
14196+ except KeyError:
14197+ # c++ specific
14198+ if v.lower() == "true":
14199+ v = 1
14200+ name = NUM
14201+ elif v.lower() == "false":
14202+ v = 0
14203+ name = NUM
14204+ elif name == NUM:
14205+ if m('oct'): v = int(v, 8)
14206+ elif m('hex'): v = int(m('hex'), 16)
14207+ elif m('n0'): v = m('n0')
14208+ else:
14209+ v = m('char')
14210+ if v: v = parse_char(v)
14211+ else: v = m('n2') or m('n4')
14212+ elif name == OP:
14213+ if v == '%:': v = '#'
14214+ elif v == '%:%:': v = '##'
14215+ elif name == STR:
14216+ # remove the quotes around the string
14217+ v = v[1:-1]
14218+ ret.append((name, v))
14219+ break
14220+ return ret
14221+
14222+@Utils.run_once
14223+def define_name(line):
14224+ return re_mac.match(line).group(0)
14225+
14226+class c_parser(object):
14227+ def __init__(self, nodepaths=None, defines=None):
14228+ #self.lines = txt.split('\n')
14229+ self.lines = []
14230+
14231+ if defines is None:
14232+ self.defs = {}
14233+ else:
14234+ self.defs = dict(defines) # make a copy
14235+ self.state = []
14236+
14237+ self.env = None # needed for the variant when searching for files
14238+
14239+ self.count_files = 0
14240+ self.currentnode_stack = []
14241+
14242+ self.nodepaths = nodepaths or []
14243+
14244+ self.nodes = []
14245+ self.names = []
14246+
14247+ # file added
14248+ self.curfile = ''
14249+ self.ban_includes = set([])
14250+
14251+ def cached_find_resource(self, node, filename):
14252+ try:
14253+ nd = node.bld.cache_nd
14254+ except:
14255+ nd = node.bld.cache_nd = {}
14256+
14257+ tup = (node.id, filename)
14258+ try:
14259+ return nd[tup]
14260+ except KeyError:
14261+ ret = node.find_resource(filename)
14262+ nd[tup] = ret
14263+ return ret
14264+
14265+ def tryfind(self, filename):
14266+ self.curfile = filename
14267+
14268+ # for msvc it should be a for loop on the whole stack
14269+ found = self.cached_find_resource(self.currentnode_stack[-1], filename)
14270+
14271+ for n in self.nodepaths:
14272+ if found:
14273+ break
14274+ found = self.cached_find_resource(n, filename)
14275+
14276+ if found:
14277+ self.nodes.append(found)
14278+ if filename[-4:] != '.moc':
14279+ self.addlines(found)
14280+ else:
14281+ if not filename in self.names:
14282+ self.names.append(filename)
14283+ return found
14284+
14285+ def addlines(self, node):
14286+
14287+ self.currentnode_stack.append(node.parent)
14288+ filepath = node.abspath(self.env)
14289+
14290+ self.count_files += 1
14291+ if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded")
14292+ pc = self.parse_cache
14293+ debug('preproc: reading file %r', filepath)
14294+ try:
14295+ lns = pc[filepath]
14296+ except KeyError:
14297+ pass
14298+ else:
14299+ self.lines.extend(lns)
14300+ return
14301+
14302+ try:
14303+ lines = filter_comments(filepath)
14304+ lines.append((POPFILE, ''))
14305+ lines.reverse()
14306+ pc[filepath] = lines # cache the lines filtered
14307+ self.lines.extend(lines)
14308+ except IOError:
14309+ raise PreprocError("could not read the file %s" % filepath)
14310+ except Exception:
14311+ if Logs.verbose > 0:
14312+ error("parsing %s failed" % filepath)
14313+ traceback.print_exc()
14314+
14315+ def start(self, node, env):
14316+ debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
14317+
14318+ self.env = env
14319+ variant = node.variant(env)
14320+ bld = node.__class__.bld
14321+ try:
14322+ self.parse_cache = bld.parse_cache
14323+ except AttributeError:
14324+ bld.parse_cache = {}
14325+ self.parse_cache = bld.parse_cache
14326+
14327+ self.addlines(node)
14328+ if env['DEFLINES']:
14329+ lst = [('define', x) for x in env['DEFLINES']]
14330+ lst.reverse()
14331+ self.lines.extend(lst)
14332+
14333+ while self.lines:
14334+ (kind, line) = self.lines.pop()
14335+ if kind == POPFILE:
14336+ self.currentnode_stack.pop()
14337+ continue
14338+ try:
14339+ self.process_line(kind, line)
14340+ except Exception, e:
14341+ if Logs.verbose:
14342+ debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
14343+
14344+ def process_line(self, token, line):
14345+ """
14346+ WARNING: a new state must be added for if* because the endif
14347+ """
14348+ ve = Logs.verbose
14349+ if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
14350+ state = self.state
14351+
14352+ # make certain we define the state if we are about to enter in an if block
14353+ if token in ['ifdef', 'ifndef', 'if']:
14354+ state.append(undefined)
14355+ elif token == 'endif':
14356+ state.pop()
14357+
14358+ # skip lines when in a dead 'if' branch, wait for the endif
14359+ if not token in ['else', 'elif', 'endif']:
14360+ if skipped in self.state or ignored in self.state:
14361+ return
14362+
14363+ if token == 'if':
14364+ ret = eval_macro(tokenize(line), self.defs)
14365+ if ret: state[-1] = accepted
14366+ else: state[-1] = ignored
14367+ elif token == 'ifdef':
14368+ m = re_mac.match(line)
14369+ if m and m.group(0) in self.defs: state[-1] = accepted
14370+ else: state[-1] = ignored
14371+ elif token == 'ifndef':
14372+ m = re_mac.match(line)
14373+ if m and m.group(0) in self.defs: state[-1] = ignored
14374+ else: state[-1] = accepted
14375+ elif token == 'include' or token == 'import':
14376+ (kind, inc) = extract_include(line, self.defs)
14377+ if inc in self.ban_includes: return
14378+ if token == 'import': self.ban_includes.add(inc)
14379+ if ve: debug('preproc: include found %s (%s) ', inc, kind)
14380+ if kind == '"' or not strict_quotes:
14381+ self.tryfind(inc)
14382+ elif token == 'elif':
14383+ if state[-1] == accepted:
14384+ state[-1] = skipped
14385+ elif state[-1] == ignored:
14386+ if eval_macro(tokenize(line), self.defs):
14387+ state[-1] = accepted
14388+ elif token == 'else':
14389+ if state[-1] == accepted: state[-1] = skipped
14390+ elif state[-1] == ignored: state[-1] = accepted
14391+ elif token == 'define':
14392+ try:
14393+ self.defs[define_name(line)] = line
14394+ except:
14395+ raise PreprocError("invalid define line %s" % line)
14396+ elif token == 'undef':
14397+ m = re_mac.match(line)
14398+ if m and m.group(0) in self.defs:
14399+ self.defs.__delitem__(m.group(0))
14400+ #print "undef %s" % name
14401+ elif token == 'pragma':
14402+ if re_pragma_once.match(line.lower()):
14403+ self.ban_includes.add(self.curfile)
14404+
14405+def get_deps(node, env, nodepaths=[]):
14406+ """
14407+ Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind
14408+ #include some_macro()
14409+ """
14410+
14411+ gruik = c_parser(nodepaths)
14412+ gruik.start(node, env)
14413+ return (gruik.nodes, gruik.names)
14414+
14415+#################### dumb dependency scanner
14416+
14417+re_inc = re.compile(\
14418+ '^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',
14419+ re.IGNORECASE | re.MULTILINE)
14420+
14421+def lines_includes(filename):
14422+ code = Utils.readf(filename)
14423+ if use_trigraphs:
14424+ for (a, b) in trig_def: code = code.split(a).join(b)
14425+ code = re_nl.sub('', code)
14426+ code = re_cpp.sub(repl, code)
14427+ return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
14428+
14429+def get_deps_simple(node, env, nodepaths=[], defines={}):
14430+ """
14431+ Get the dependencies by just looking recursively at the #include statements
14432+ """
14433+
14434+ nodes = []
14435+ names = []
14436+
14437+ def find_deps(node):
14438+ lst = lines_includes(node.abspath(env))
14439+
14440+ for (_, line) in lst:
14441+ (t, filename) = extract_include(line, defines)
14442+ if filename in names:
14443+ continue
14444+
14445+ if filename.endswith('.moc'):
14446+ names.append(filename)
14447+
14448+ found = None
14449+ for n in nodepaths:
14450+ if found:
14451+ break
14452+ found = n.find_resource(filename)
14453+
14454+ if not found:
14455+ if not filename in names:
14456+ names.append(filename)
14457+ elif not found in nodes:
14458+ nodes.append(found)
14459+ find_deps(node)
14460+
14461+ find_deps(node)
14462+ return (nodes, names)
14463+
14464+
14465diff --git a/buildtools/wafadmin/Tools/python.py b/buildtools/wafadmin/Tools/python.py
14466new file mode 100644
14467index 0000000..4f73081
14468--- /dev/null
14469+++ b/buildtools/wafadmin/Tools/python.py
14470@@ -0,0 +1,413 @@
14471+#!/usr/bin/env python
14472+# encoding: utf-8
14473+# Thomas Nagy, 2007 (ita)
14474+# Gustavo Carneiro (gjc), 2007
14475+
14476+"Python support"
14477+
14478+import os, sys
14479+import TaskGen, Utils, Utils, Runner, Options, Build
14480+from Logs import debug, warn, info
14481+from TaskGen import extension, taskgen, before, after, feature
14482+from Configure import conf
14483+
14484+EXT_PY = ['.py']
14485+FRAG_2 = '''
14486+#include "Python.h"
14487+#ifdef __cplusplus
14488+extern "C" {
14489+#endif
14490+ void Py_Initialize(void);
14491+ void Py_Finalize(void);
14492+#ifdef __cplusplus
14493+}
14494+#endif
14495+int main()
14496+{
14497+ Py_Initialize();
14498+ Py_Finalize();
14499+ return 0;
14500+}
14501+'''
14502+
14503+@feature('pyext')
14504+@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars', 'apply_bundle')
14505+@after('vars_target_cshlib')
14506+def init_pyext(self):
14507+ self.default_install_path = '${PYTHONARCHDIR}'
14508+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
14509+ if not 'PYEXT' in self.uselib:
14510+ self.uselib.append('PYEXT')
14511+ self.env['MACBUNDLE'] = True
14512+
14513+@before('apply_link', 'apply_lib_vars', 'apply_type_vars')
14514+@after('apply_bundle')
14515+@feature('pyext')
14516+def pyext_shlib_ext(self):
14517+ # override shlib_PATTERN set by the osx module
14518+ self.env['shlib_PATTERN'] = self.env['pyext_PATTERN']
14519+
14520+@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
14521+@feature('pyembed')
14522+def init_pyembed(self):
14523+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
14524+ if not 'PYEMBED' in self.uselib:
14525+ self.uselib.append('PYEMBED')
14526+
14527+@extension(EXT_PY)
14528+def process_py(self, node):
14529+ if not (self.bld.is_install and self.install_path):
14530+ return
14531+ def inst_py(ctx):
14532+ install_pyfile(self, node)
14533+ self.bld.add_post_fun(inst_py)
14534+
14535+def install_pyfile(self, node):
14536+ path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env)
14537+
14538+ self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False)
14539+ if self.bld.is_install < 0:
14540+ info("* removing byte compiled python files")
14541+ for x in 'co':
14542+ try:
14543+ os.remove(path + x)
14544+ except OSError:
14545+ pass
14546+
14547+ if self.bld.is_install > 0:
14548+ if self.env['PYC'] or self.env['PYO']:
14549+ info("* byte compiling %r" % path)
14550+
14551+ if self.env['PYC']:
14552+ program = ("""
14553+import sys, py_compile
14554+for pyfile in sys.argv[1:]:
14555+ py_compile.compile(pyfile, pyfile + 'c')
14556+""")
14557+ argv = [self.env['PYTHON'], '-c', program, path]
14558+ ret = Utils.pproc.Popen(argv).wait()
14559+ if ret:
14560+ raise Utils.WafError('bytecode compilation failed %r' % path)
14561+
14562+ if self.env['PYO']:
14563+ program = ("""
14564+import sys, py_compile
14565+for pyfile in sys.argv[1:]:
14566+ py_compile.compile(pyfile, pyfile + 'o')
14567+""")
14568+ argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path]
14569+ ret = Utils.pproc.Popen(argv).wait()
14570+ if ret:
14571+ raise Utils.WafError('bytecode compilation failed %r' % path)
14572+
14573+# COMPAT
14574+class py_taskgen(TaskGen.task_gen):
14575+ def __init__(self, *k, **kw):
14576+ TaskGen.task_gen.__init__(self, *k, **kw)
14577+
14578+@before('apply_core')
14579+@after('vars_target_cprogram', 'vars_target_cshlib')
14580+@feature('py')
14581+def init_py(self):
14582+ self.default_install_path = '${PYTHONDIR}'
14583+
14584+def _get_python_variables(python_exe, variables, imports=['import sys']):
14585+ """Run a python interpreter and print some variables"""
14586+ program = list(imports)
14587+ program.append('')
14588+ for v in variables:
14589+ program.append("print(repr(%s))" % v)
14590+ os_env = dict(os.environ)
14591+ try:
14592+ del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
14593+ except KeyError:
14594+ pass
14595+ proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env)
14596+ output = proc.communicate()[0].split("\n") # do not touch, python3
14597+ if proc.returncode:
14598+ if Options.options.verbose:
14599+ warn("Python program to extract python configuration variables failed:\n%s"
14600+ % '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)]))
14601+ raise RuntimeError
14602+ return_values = []
14603+ for s in output:
14604+ s = s.strip()
14605+ if not s:
14606+ continue
14607+ if s == 'None':
14608+ return_values.append(None)
14609+ elif s[0] == "'" and s[-1] == "'":
14610+ return_values.append(s[1:-1])
14611+ elif s[0].isdigit():
14612+ return_values.append(int(s))
14613+ else: break
14614+ return return_values
14615+
14616+@conf
14617+def check_python_headers(conf, mandatory=True):
14618+ """Check for headers and libraries necessary to extend or embed python.
14619+
14620+ On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib
14621+
14622+ PYEXT: for compiling python extensions
14623+ PYEMBED: for embedding a python interpreter"""
14624+
14625+ if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
14626+ conf.fatal('load a compiler first (gcc, g++, ..)')
14627+
14628+ if not conf.env['PYTHON_VERSION']:
14629+ conf.check_python_version()
14630+
14631+ env = conf.env
14632+ python = env['PYTHON']
14633+ if not python:
14634+ conf.fatal('could not find the python executable')
14635+
14636+ ## On Mac OSX we need to use mac bundles for python plugins
14637+ if Options.platform == 'darwin':
14638+ conf.check_tool('osx')
14639+
14640+ try:
14641+ # Get some python configuration variables using distutils
14642+ v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
14643+ (python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
14644+ python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED,
14645+ python_MACOSX_DEPLOYMENT_TARGET) = \
14646+ _get_python_variables(python, ["get_config_var('%s')" % x for x in v],
14647+ ['from distutils.sysconfig import get_config_var'])
14648+ except RuntimeError:
14649+ conf.fatal("Python development headers not found (-v for details).")
14650+
14651+ conf.log.write("""Configuration returned from %r:
14652+python_prefix = %r
14653+python_SO = %r
14654+python_SYSLIBS = %r
14655+python_LDFLAGS = %r
14656+python_SHLIBS = %r
14657+python_LIBDIR = %r
14658+python_LIBPL = %r
14659+INCLUDEPY = %r
14660+Py_ENABLE_SHARED = %r
14661+MACOSX_DEPLOYMENT_TARGET = %r
14662+""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
14663+ python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET))
14664+
14665+ if python_MACOSX_DEPLOYMENT_TARGET:
14666+ conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
14667+ conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
14668+
14669+ env['pyext_PATTERN'] = '%s'+python_SO
14670+
14671+ # Check for python libraries for embedding
14672+ if python_SYSLIBS is not None:
14673+ for lib in python_SYSLIBS.split():
14674+ if lib.startswith('-l'):
14675+ lib = lib[2:] # strip '-l'
14676+ env.append_value('LIB_PYEMBED', lib)
14677+
14678+ if python_SHLIBS is not None:
14679+ for lib in python_SHLIBS.split():
14680+ if lib.startswith('-l'):
14681+ env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l'
14682+ else:
14683+ env.append_value('LINKFLAGS_PYEMBED', lib)
14684+
14685+ if Options.platform != 'darwin' and python_LDFLAGS:
14686+ env.append_value('LINKFLAGS_PYEMBED', python_LDFLAGS.split())
14687+
14688+ result = False
14689+ name = 'python' + env['PYTHON_VERSION']
14690+
14691+ if python_LIBDIR is not None:
14692+ path = [python_LIBDIR]
14693+ conf.log.write("\n\n# Trying LIBDIR: %r\n" % path)
14694+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14695+
14696+ if not result and python_LIBPL is not None:
14697+ conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
14698+ path = [python_LIBPL]
14699+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14700+
14701+ if not result:
14702+ conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
14703+ path = [os.path.join(python_prefix, "libs")]
14704+ name = 'python' + env['PYTHON_VERSION'].replace('.', '')
14705+ result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
14706+
14707+ if result:
14708+ env['LIBPATH_PYEMBED'] = path
14709+ env.append_value('LIB_PYEMBED', name)
14710+ else:
14711+ conf.log.write("\n\n### LIB NOT FOUND\n")
14712+
14713+ # under certain conditions, python extensions must link to
14714+ # python libraries, not just python embedding programs.
14715+ if (sys.platform == 'win32' or sys.platform.startswith('os2')
14716+ or sys.platform == 'darwin' or Py_ENABLE_SHARED):
14717+ env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
14718+ env['LIB_PYEXT'] = env['LIB_PYEMBED']
14719+
14720+ # We check that pythonX.Y-config exists, and if it exists we
14721+ # use it to get only the includes, else fall back to distutils.
14722+ python_config = conf.find_program(
14723+ 'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
14724+ var='PYTHON_CONFIG')
14725+ if not python_config:
14726+ python_config = conf.find_program(
14727+ 'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
14728+ var='PYTHON_CONFIG')
14729+
14730+ includes = []
14731+ if python_config:
14732+ for incstr in Utils.cmd_output("%s %s --includes" % (python, python_config)).strip().split():
14733+ # strip the -I or /I
14734+ if (incstr.startswith('-I')
14735+ or incstr.startswith('/I')):
14736+ incstr = incstr[2:]
14737+ # append include path, unless already given
14738+ if incstr not in includes:
14739+ includes.append(incstr)
14740+ conf.log.write("Include path for Python extensions "
14741+ "(found via python-config --includes): %r\n" % (includes,))
14742+ env['CPPPATH_PYEXT'] = includes
14743+ env['CPPPATH_PYEMBED'] = includes
14744+ else:
14745+ conf.log.write("Include path for Python extensions "
14746+ "(found via distutils module): %r\n" % (INCLUDEPY,))
14747+ env['CPPPATH_PYEXT'] = [INCLUDEPY]
14748+ env['CPPPATH_PYEMBED'] = [INCLUDEPY]
14749+
14750+ # Code using the Python API needs to be compiled with -fno-strict-aliasing
14751+ if env['CC_NAME'] == 'gcc':
14752+ env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing')
14753+ env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing')
14754+ if env['CXX_NAME'] == 'gcc':
14755+ env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing')
14756+ env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
14757+
14758+ # See if it compiles
14759+ conf.check(define_name='HAVE_PYTHON_H',
14760+ uselib='PYEMBED', fragment=FRAG_2,
14761+ errmsg='Could not find the python development headers', mandatory=mandatory)
14762+
14763+@conf
14764+def check_python_version(conf, minver=None):
14765+ """
14766+ Check if the python interpreter is found matching a given minimum version.
14767+ minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
14768+
14769+ If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
14770+ (eg. '2.4') of the actual python version found, and PYTHONDIR is
14771+ defined, pointing to the site-packages directory appropriate for
14772+ this python version, where modules/packages/extensions should be
14773+ installed.
14774+ """
14775+ assert minver is None or isinstance(minver, tuple)
14776+ python = conf.env['PYTHON']
14777+ if not python:
14778+ conf.fatal('could not find the python executable')
14779+
14780+ # Get python version string
14781+ cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"]
14782+ debug('python: Running python command %r' % cmd)
14783+ proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE)
14784+ lines = proc.communicate()[0].split()
14785+ assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
14786+ pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
14787+
14788+ # compare python version with the minimum required
14789+ result = (minver is None) or (pyver_tuple >= minver)
14790+
14791+ if result:
14792+ # define useful environment variables
14793+ pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
14794+ conf.env['PYTHON_VERSION'] = pyver
14795+
14796+ if 'PYTHONDIR' in conf.environ:
14797+ pydir = conf.environ['PYTHONDIR']
14798+ else:
14799+ if sys.platform == 'win32':
14800+ (python_LIBDEST, pydir) = \
14801+ _get_python_variables(python,
14802+ ["get_config_var('LIBDEST')",
14803+ "get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14804+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14805+ else:
14806+ python_LIBDEST = None
14807+ (pydir,) = \
14808+ _get_python_variables(python,
14809+ ["get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14810+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14811+ if python_LIBDEST is None:
14812+ if conf.env['LIBDIR']:
14813+ python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
14814+ else:
14815+ python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
14816+
14817+ if 'PYTHONARCHDIR' in conf.environ:
14818+ pyarchdir = conf.environ['PYTHONARCHDIR']
14819+ else:
14820+ (pyarchdir,) = _get_python_variables(python,
14821+ ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
14822+ ['from distutils.sysconfig import get_config_var, get_python_lib'])
14823+ if not pyarchdir:
14824+ pyarchdir = pydir
14825+
14826+ if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
14827+ conf.define('PYTHONDIR', pydir)
14828+ conf.define('PYTHONARCHDIR', pyarchdir)
14829+
14830+ conf.env['PYTHONDIR'] = pydir
14831+
14832+ # Feedback
14833+ pyver_full = '.'.join(map(str, pyver_tuple[:3]))
14834+ if minver is None:
14835+ conf.check_message_custom('Python version', '', pyver_full)
14836+ else:
14837+ minver_str = '.'.join(map(str, minver))
14838+ conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full)
14839+
14840+ if not result:
14841+ conf.fatal('The python version is too old (%r)' % pyver_full)
14842+
14843+@conf
14844+def check_python_module(conf, module_name):
14845+ """
14846+ Check if the selected python interpreter can import the given python module.
14847+ """
14848+ result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name],
14849+ stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait()
14850+ conf.check_message('Python module', module_name, result)
14851+ if not result:
14852+ conf.fatal('Could not find the python module %r' % module_name)
14853+
14854+def detect(conf):
14855+
14856+ if not conf.env.PYTHON:
14857+ conf.env.PYTHON = sys.executable
14858+
14859+ python = conf.find_program('python', var='PYTHON')
14860+ if not python:
14861+ conf.fatal('Could not find the path of the python executable')
14862+
14863+ v = conf.env
14864+
14865+ v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
14866+ v['PYFLAGS'] = ''
14867+ v['PYFLAGS_OPT'] = '-O'
14868+
14869+ v['PYC'] = getattr(Options.options, 'pyc', 1)
14870+ v['PYO'] = getattr(Options.options, 'pyo', 1)
14871+
14872+def set_options(opt):
14873+ opt.add_option('--nopyc',
14874+ action='store_false',
14875+ default=1,
14876+ help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
14877+ dest = 'pyc')
14878+ opt.add_option('--nopyo',
14879+ action='store_false',
14880+ default=1,
14881+ help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
14882+ dest='pyo')
14883+
14884diff --git a/buildtools/wafadmin/Tools/qt4.py b/buildtools/wafadmin/Tools/qt4.py
14885new file mode 100644
14886index 0000000..84d121a
14887--- /dev/null
14888+++ b/buildtools/wafadmin/Tools/qt4.py
14889@@ -0,0 +1,505 @@
14890+#!/usr/bin/env python
14891+# encoding: utf-8
14892+# Thomas Nagy, 2006 (ita)
14893+
14894+"""
14895+Qt4 support
14896+
14897+If QT4_ROOT is given (absolute path), the configuration will look in it first
14898+
14899+This module also demonstrates how to add tasks dynamically (when the build has started)
14900+"""
14901+
14902+try:
14903+ from xml.sax import make_parser
14904+ from xml.sax.handler import ContentHandler
14905+except ImportError:
14906+ has_xml = False
14907+ ContentHandler = object
14908+else:
14909+ has_xml = True
14910+
14911+import os, sys
14912+import ccroot, cxx
14913+import TaskGen, Task, Utils, Runner, Options, Node, Configure
14914+from TaskGen import taskgen, feature, after, extension
14915+from Logs import error
14916+from Constants import *
14917+
14918+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
14919+EXT_RCC = ['.qrc']
14920+EXT_UI = ['.ui']
14921+EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
14922+
14923+class qxx_task(Task.Task):
14924+ "A cpp task that may create a moc task dynamically"
14925+
14926+ before = ['cxx_link', 'static_link']
14927+
14928+ def __init__(self, *k, **kw):
14929+ Task.Task.__init__(self, *k, **kw)
14930+ self.moc_done = 0
14931+
14932+ def scan(self):
14933+ (nodes, names) = ccroot.scan(self)
14934+ # for some reasons (variants) the moc node may end in the list of node deps
14935+ for x in nodes:
14936+ if x.name.endswith('.moc'):
14937+ nodes.remove(x)
14938+ names.append(x.relpath_gen(self.inputs[0].parent))
14939+ return (nodes, names)
14940+
14941+ def runnable_status(self):
14942+ if self.moc_done:
14943+ # if there is a moc task, delay the computation of the file signature
14944+ for t in self.run_after:
14945+ if not t.hasrun:
14946+ return ASK_LATER
14947+ # the moc file enters in the dependency calculation
14948+ # so we need to recompute the signature when the moc file is present
14949+ self.signature()
14950+ return Task.Task.runnable_status(self)
14951+ else:
14952+ # yes, really, there are people who generate cxx files
14953+ for t in self.run_after:
14954+ if not t.hasrun:
14955+ return ASK_LATER
14956+ self.add_moc_tasks()
14957+ return ASK_LATER
14958+
14959+ def add_moc_tasks(self):
14960+
14961+ node = self.inputs[0]
14962+ tree = node.__class__.bld
14963+
14964+ try:
14965+ # compute the signature once to know if there is a moc file to create
14966+ self.signature()
14967+ except KeyError:
14968+ # the moc file may be referenced somewhere else
14969+ pass
14970+ else:
14971+ # remove the signature, it must be recomputed with the moc task
14972+ delattr(self, 'cache_sig')
14973+
14974+ moctasks=[]
14975+ mocfiles=[]
14976+ variant = node.variant(self.env)
14977+ try:
14978+ tmp_lst = tree.raw_deps[self.unique_id()]
14979+ tree.raw_deps[self.unique_id()] = []
14980+ except KeyError:
14981+ tmp_lst = []
14982+ for d in tmp_lst:
14983+ if not d.endswith('.moc'): continue
14984+ # paranoid check
14985+ if d in mocfiles:
14986+ error("paranoia owns")
14987+ continue
14988+
14989+ # process that base.moc only once
14990+ mocfiles.append(d)
14991+
14992+ # find the extension (performed only when the .cpp has changes)
14993+ base2 = d[:-4]
14994+ for path in [node.parent] + self.generator.env['INC_PATHS']:
14995+ tree.rescan(path)
14996+ vals = getattr(Options.options, 'qt_header_ext', '') or MOC_H
14997+ for ex in vals:
14998+ h_node = path.find_resource(base2 + ex)
14999+ if h_node:
15000+ break
15001+ else:
15002+ continue
15003+ break
15004+ else:
15005+ raise Utils.WafError("no header found for %s which is a moc file" % str(d))
15006+
15007+ m_node = h_node.change_ext('.moc')
15008+ tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node
15009+
15010+ # create the task
15011+ task = Task.TaskBase.classes['moc'](self.env, normal=0)
15012+ task.set_inputs(h_node)
15013+ task.set_outputs(m_node)
15014+
15015+ generator = tree.generator
15016+ generator.outstanding.insert(0, task)
15017+ generator.total += 1
15018+
15019+ moctasks.append(task)
15020+
15021+ # remove raw deps except the moc files to save space (optimization)
15022+ tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles
15023+
15024+ # look at the file inputs, it is set right above
15025+ lst = tree.node_deps.get(self.unique_id(), ())
15026+ for d in lst:
15027+ name = d.name
15028+ if name.endswith('.moc'):
15029+ task = Task.TaskBase.classes['moc'](self.env, normal=0)
15030+ task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple
15031+ task.set_outputs(d)
15032+
15033+ generator = tree.generator
15034+ generator.outstanding.insert(0, task)
15035+ generator.total += 1
15036+
15037+ moctasks.append(task)
15038+
15039+ # simple scheduler dependency: run the moc task before others
15040+ self.run_after = moctasks
15041+ self.moc_done = 1
15042+
15043+ run = Task.TaskBase.classes['cxx'].__dict__['run']
15044+
15045+def translation_update(task):
15046+ outs = [a.abspath(task.env) for a in task.outputs]
15047+ outs = " ".join(outs)
15048+ lupdate = task.env['QT_LUPDATE']
15049+
15050+ for x in task.inputs:
15051+ file = x.abspath(task.env)
15052+ cmd = "%s %s -ts %s" % (lupdate, file, outs)
15053+ Utils.pprint('BLUE', cmd)
15054+ task.generator.bld.exec_command(cmd)
15055+
15056+class XMLHandler(ContentHandler):
15057+ def __init__(self):
15058+ self.buf = []
15059+ self.files = []
15060+ def startElement(self, name, attrs):
15061+ if name == 'file':
15062+ self.buf = []
15063+ def endElement(self, name):
15064+ if name == 'file':
15065+ self.files.append(''.join(self.buf))
15066+ def characters(self, cars):
15067+ self.buf.append(cars)
15068+
15069+def scan(self):
15070+ "add the dependency on the files referenced in the qrc"
15071+ node = self.inputs[0]
15072+ parser = make_parser()
15073+ curHandler = XMLHandler()
15074+ parser.setContentHandler(curHandler)
15075+ fi = open(self.inputs[0].abspath(self.env))
15076+ parser.parse(fi)
15077+ fi.close()
15078+
15079+ nodes = []
15080+ names = []
15081+ root = self.inputs[0].parent
15082+ for x in curHandler.files:
15083+ nd = root.find_resource(x)
15084+ if nd: nodes.append(nd)
15085+ else: names.append(x)
15086+
15087+ return (nodes, names)
15088+
15089+@extension(EXT_RCC)
15090+def create_rcc_task(self, node):
15091+ "hook for rcc files"
15092+ rcnode = node.change_ext('_rc.cpp')
15093+ rcctask = self.create_task('rcc', node, rcnode)
15094+ cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
15095+ self.compiled_tasks.append(cpptask)
15096+ return cpptask
15097+
15098+@extension(EXT_UI)
15099+def create_uic_task(self, node):
15100+ "hook for uic tasks"
15101+ uictask = self.create_task('ui4', node)
15102+ uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
15103+ return uictask
15104+
15105+class qt4_taskgen(cxx.cxx_taskgen):
15106+ def __init__(self, *k, **kw):
15107+ cxx.cxx_taskgen.__init__(self, *k, **kw)
15108+ self.features.append('qt4')
15109+
15110+@extension('.ts')
15111+def add_lang(self, node):
15112+ """add all the .ts file into self.lang"""
15113+ self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
15114+
15115+@feature('qt4')
15116+@after('apply_link')
15117+def apply_qt4(self):
15118+ if getattr(self, 'lang', None):
15119+ update = getattr(self, 'update', None)
15120+ lst=[]
15121+ trans=[]
15122+ for l in self.to_list(self.lang):
15123+
15124+ if not isinstance(l, Node.Node):
15125+ l = self.path.find_resource(l+'.ts')
15126+
15127+ t = self.create_task('ts2qm', l, l.change_ext('.qm'))
15128+ lst.append(t.outputs[0])
15129+
15130+ if update:
15131+ trans.append(t.inputs[0])
15132+
15133+ trans_qt4 = getattr(Options.options, 'trans_qt4', False)
15134+ if update and trans_qt4:
15135+ # we need the cpp files given, except the rcc task we create after
15136+ # FIXME may be broken
15137+ u = Task.TaskCmd(translation_update, self.env, 2)
15138+ u.inputs = [a.inputs[0] for a in self.compiled_tasks]
15139+ u.outputs = trans
15140+
15141+ if getattr(self, 'langname', None):
15142+ t = Task.TaskBase.classes['qm2rcc'](self.env)
15143+ t.set_inputs(lst)
15144+ t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
15145+ t.path = self.path
15146+ k = create_rcc_task(self, t.outputs[0])
15147+ self.link_task.inputs.append(k.outputs[0])
15148+
15149+ self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS)
15150+ self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS)
15151+
15152+@extension(EXT_QT4)
15153+def cxx_hook(self, node):
15154+ # create the compilation task: cpp or cc
15155+ try: obj_ext = self.obj_ext
15156+ except AttributeError: obj_ext = '_%d.o' % self.idx
15157+
15158+ task = self.create_task('qxx', node, node.change_ext(obj_ext))
15159+ self.compiled_tasks.append(task)
15160+ return task
15161+
15162+def process_qm2rcc(task):
15163+ outfile = task.outputs[0].abspath(task.env)
15164+ f = open(outfile, 'w')
15165+ f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
15166+ for k in task.inputs:
15167+ f.write(' <file>')
15168+ #f.write(k.name)
15169+ f.write(k.path_to_parent(task.path))
15170+ f.write('</file>\n')
15171+ f.write('</qresource>\n</RCC>')
15172+ f.close()
15173+
15174+b = Task.simple_task_type
15175+b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False)
15176+cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False)
15177+cls.scan = scan
15178+b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False)
15179+b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False)
15180+
15181+Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm')
15182+
15183+def detect_qt4(conf):
15184+ env = conf.env
15185+ opt = Options.options
15186+
15187+ qtdir = getattr(opt, 'qtdir', '')
15188+ qtbin = getattr(opt, 'qtbin', '')
15189+ qtlibs = getattr(opt, 'qtlibs', '')
15190+ useframework = getattr(opt, 'use_qt4_osxframework', True)
15191+
15192+ paths = []
15193+
15194+ # the path to qmake has been given explicitely
15195+ if qtbin:
15196+ paths = [qtbin]
15197+
15198+ # the qt directory has been given - we deduce the qt binary path
15199+ if not qtdir:
15200+ qtdir = conf.environ.get('QT4_ROOT', '')
15201+ qtbin = os.path.join(qtdir, 'bin')
15202+ paths = [qtbin]
15203+
15204+ # no qtdir, look in the path and in /usr/local/Trolltech
15205+ if not qtdir:
15206+ paths = os.environ.get('PATH', '').split(os.pathsep)
15207+ paths.append('/usr/share/qt4/bin/')
15208+ try:
15209+ lst = os.listdir('/usr/local/Trolltech/')
15210+ except OSError:
15211+ pass
15212+ else:
15213+ if lst:
15214+ lst.sort()
15215+ lst.reverse()
15216+
15217+ # keep the highest version
15218+ qtdir = '/usr/local/Trolltech/%s/' % lst[0]
15219+ qtbin = os.path.join(qtdir, 'bin')
15220+ paths.append(qtbin)
15221+
15222+ # at the end, try to find qmake in the paths given
15223+ # keep the one with the highest version
15224+ cand = None
15225+ prev_ver = ['4', '0', '0']
15226+ for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
15227+ qmake = conf.find_program(qmk, path_list=paths)
15228+ if qmake:
15229+ try:
15230+ version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip()
15231+ except ValueError:
15232+ pass
15233+ else:
15234+ if version:
15235+ new_ver = version.split('.')
15236+ if new_ver > prev_ver:
15237+ cand = qmake
15238+ prev_ver = new_ver
15239+ if cand:
15240+ qmake = cand
15241+ else:
15242+ conf.fatal('could not find qmake for qt4')
15243+
15244+ conf.env.QMAKE = qmake
15245+ qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip()
15246+ qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
15247+ qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
15248+
15249+ if not qtlibs:
15250+ try:
15251+ qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep
15252+ except ValueError:
15253+ qtlibs = os.path.join(qtdir, 'lib')
15254+
15255+ def find_bin(lst, var):
15256+ for f in lst:
15257+ ret = conf.find_program(f, path_list=paths)
15258+ if ret:
15259+ env[var]=ret
15260+ break
15261+
15262+ vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
15263+
15264+ find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
15265+ find_bin(['uic-qt4', 'uic'], 'QT_UIC')
15266+ if not env['QT_UIC']:
15267+ conf.fatal('cannot find the uic compiler for qt4')
15268+
15269+ try:
15270+ version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip()
15271+ except ValueError:
15272+ conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
15273+
15274+ version = version.replace('Qt User Interface Compiler ','')
15275+ version = version.replace('User Interface Compiler for Qt', '')
15276+ if version.find(" 3.") != -1:
15277+ conf.check_message('uic version', '(too old)', 0, option='(%s)'%version)
15278+ sys.exit(1)
15279+ conf.check_message('uic version', '', 1, option='(%s)'%version)
15280+
15281+ find_bin(['moc-qt4', 'moc'], 'QT_MOC')
15282+ find_bin(['rcc'], 'QT_RCC')
15283+ find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
15284+ find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
15285+
15286+ env['UIC3_ST']= '%s -o %s'
15287+ env['UIC_ST'] = '%s -o %s'
15288+ env['MOC_ST'] = '-o'
15289+ env['ui_PATTERN'] = 'ui_%s.h'
15290+ env['QT_LRELEASE_FLAGS'] = ['-silent']
15291+
15292+ vars_debug = [a+'_debug' for a in vars]
15293+
15294+ try:
15295+ conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True)
15296+
15297+ except Configure.ConfigurationError:
15298+
15299+ for lib in vars_debug+vars:
15300+ uselib = lib.upper()
15301+
15302+ d = (lib.find('_debug') > 0) and 'd' or ''
15303+
15304+ # original author seems to prefer static to shared libraries
15305+ for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')):
15306+
15307+ conf.check_message_1('Checking for %s %s' % (lib, kind))
15308+
15309+ for ext in ['', '4']:
15310+ path = os.path.join(qtlibs, pat % (lib + d + ext))
15311+ if os.path.exists(path):
15312+ env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
15313+ conf.check_message_2('ok ' + path, 'GREEN')
15314+ break
15315+ path = os.path.join(qtbin, pat % (lib + d + ext))
15316+ if os.path.exists(path):
15317+ env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
15318+ conf.check_message_2('ok ' + path, 'GREEN')
15319+ break
15320+ else:
15321+ conf.check_message_2('not found', 'YELLOW')
15322+ continue
15323+ break
15324+
15325+ env.append_unique('LIBPATH_' + uselib, qtlibs)
15326+ env.append_unique('CPPPATH_' + uselib, qtincludes)
15327+ env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib)
15328+ else:
15329+ for i in vars_debug+vars:
15330+ try:
15331+ conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig)
15332+ except ValueError:
15333+ pass
15334+
15335+ # the libpaths are set nicely, unfortunately they make really long command-lines
15336+ # remove the qtcore ones from qtgui, etc
15337+ def process_lib(vars_, coreval):
15338+ for d in vars_:
15339+ var = d.upper()
15340+ if var == 'QTCORE': continue
15341+
15342+ value = env['LIBPATH_'+var]
15343+ if value:
15344+ core = env[coreval]
15345+ accu = []
15346+ for lib in value:
15347+ if lib in core: continue
15348+ accu.append(lib)
15349+ env['LIBPATH_'+var] = accu
15350+
15351+ process_lib(vars, 'LIBPATH_QTCORE')
15352+ process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG')
15353+
15354+ # rpath if wanted
15355+ want_rpath = getattr(Options.options, 'want_rpath', 1)
15356+ if want_rpath:
15357+ def process_rpath(vars_, coreval):
15358+ for d in vars_:
15359+ var = d.upper()
15360+ value = env['LIBPATH_'+var]
15361+ if value:
15362+ core = env[coreval]
15363+ accu = []
15364+ for lib in value:
15365+ if var != 'QTCORE':
15366+ if lib in core:
15367+ continue
15368+ accu.append('-Wl,--rpath='+lib)
15369+ env['RPATH_'+var] = accu
15370+ process_rpath(vars, 'LIBPATH_QTCORE')
15371+ process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG')
15372+
15373+ env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale'
15374+
15375+def detect(conf):
15376+ detect_qt4(conf)
15377+
15378+def set_options(opt):
15379+ opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
15380+
15381+ opt.add_option('--header-ext',
15382+ type='string',
15383+ default='',
15384+ help='header extension for moc files',
15385+ dest='qt_header_ext')
15386+
15387+ for i in 'qtdir qtbin qtlibs'.split():
15388+ opt.add_option('--'+i, type='string', default='', dest=i)
15389+
15390+ if sys.platform == "darwin":
15391+ opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True)
15392+
15393+ opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
15394+
15395diff --git a/buildtools/wafadmin/Tools/ruby.py b/buildtools/wafadmin/Tools/ruby.py
15396new file mode 100644
15397index 0000000..d3b7569
15398--- /dev/null
15399+++ b/buildtools/wafadmin/Tools/ruby.py
15400@@ -0,0 +1,120 @@
15401+#!/usr/bin/env python
15402+# encoding: utf-8
15403+# daniel.svensson at purplescout.se 2008
15404+
15405+import os
15406+import Task, Options, Utils
15407+from TaskGen import before, feature, after
15408+from Configure import conf
15409+
15410+@feature('rubyext')
15411+@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle')
15412+@after('default_cc', 'vars_target_cshlib')
15413+def init_rubyext(self):
15414+ self.default_install_path = '${ARCHDIR_RUBY}'
15415+ self.uselib = self.to_list(getattr(self, 'uselib', ''))
15416+ if not 'RUBY' in self.uselib:
15417+ self.uselib.append('RUBY')
15418+ if not 'RUBYEXT' in self.uselib:
15419+ self.uselib.append('RUBYEXT')
15420+
15421+@feature('rubyext')
15422+@before('apply_link')
15423+def apply_ruby_so_name(self):
15424+ self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN']
15425+
15426+@conf
15427+def check_ruby_version(conf, minver=()):
15428+ """
15429+ Checks if ruby is installed.
15430+ If installed the variable RUBY will be set in environment.
15431+ Ruby binary can be overridden by --with-ruby-binary config variable
15432+ """
15433+
15434+ if Options.options.rubybinary:
15435+ conf.env.RUBY = Options.options.rubybinary
15436+ else:
15437+ conf.find_program("ruby", var="RUBY", mandatory=True)
15438+
15439+ ruby = conf.env.RUBY
15440+
15441+ try:
15442+ version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
15443+ except:
15444+ conf.fatal('could not determine ruby version')
15445+ conf.env.RUBY_VERSION = version
15446+
15447+ try:
15448+ ver = tuple(map(int, version.split(".")))
15449+ except:
15450+ conf.fatal('unsupported ruby version %r' % version)
15451+
15452+ cver = ''
15453+ if minver:
15454+ if ver < minver:
15455+ conf.fatal('ruby is too old')
15456+ cver = ".".join([str(x) for x in minver])
15457+
15458+ conf.check_message('ruby', cver, True, version)
15459+
15460+@conf
15461+def check_ruby_ext_devel(conf):
15462+ if not conf.env.RUBY:
15463+ conf.fatal('ruby detection is required first')
15464+
15465+ if not conf.env.CC_NAME and not conf.env.CXX_NAME:
15466+ conf.fatal('load a c/c++ compiler first')
15467+
15468+ version = tuple(map(int, conf.env.RUBY_VERSION.split(".")))
15469+
15470+ def read_out(cmd):
15471+ return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd]))
15472+
15473+ def read_config(key):
15474+ return read_out('puts Config::CONFIG[%r]' % key)
15475+
15476+ ruby = conf.env['RUBY']
15477+ archdir = read_config('archdir')
15478+ cpppath = archdir
15479+ if version >= (1, 9, 0):
15480+ ruby_hdrdir = read_config('rubyhdrdir')
15481+ cpppath += ruby_hdrdir
15482+ cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
15483+
15484+ conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file')
15485+
15486+ conf.env.LIBPATH_RUBYEXT = read_config('libdir')
15487+ conf.env.LIBPATH_RUBYEXT += archdir
15488+ conf.env.CPPPATH_RUBYEXT = cpppath
15489+ conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS")
15490+ conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
15491+
15492+ # ok this is really stupid, but the command and flags are combined.
15493+ # so we try to find the first argument...
15494+ flags = read_config('LDSHARED')
15495+ while flags and flags[0][0] != '-':
15496+ flags = flags[1:]
15497+
15498+ # we also want to strip out the deprecated ppc flags
15499+ if len(flags) > 1 and flags[1] == "ppc":
15500+ flags = flags[2:]
15501+
15502+ conf.env.LINKFLAGS_RUBYEXT = flags
15503+ conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS")
15504+ conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED")
15505+
15506+ if Options.options.rubyarchdir:
15507+ conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir
15508+ else:
15509+ conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
15510+
15511+ if Options.options.rubylibdir:
15512+ conf.env.LIBDIR_RUBY = Options.options.rubylibdir
15513+ else:
15514+ conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
15515+
15516+def set_options(opt):
15517+ opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
15518+ opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
15519+ opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
15520+
15521diff --git a/buildtools/wafadmin/Tools/suncc.py b/buildtools/wafadmin/Tools/suncc.py
15522new file mode 100644
15523index 0000000..b1a2aad
15524--- /dev/null
15525+++ b/buildtools/wafadmin/Tools/suncc.py
15526@@ -0,0 +1,76 @@
15527+#!/usr/bin/env python
15528+# encoding: utf-8
15529+# Thomas Nagy, 2006 (ita)
15530+# Ralf Habacker, 2006 (rh)
15531+
15532+import os, optparse
15533+import Utils, Options, Configure
15534+import ccroot, ar
15535+from Configure import conftest
15536+
15537+@conftest
15538+def find_scc(conf):
15539+ v = conf.env
15540+ cc = None
15541+ if v['CC']: cc = v['CC']
15542+ elif 'CC' in conf.environ: cc = conf.environ['CC']
15543+ #if not cc: cc = conf.find_program('gcc', var='CC')
15544+ if not cc: cc = conf.find_program('cc', var='CC')
15545+ if not cc: conf.fatal('suncc was not found')
15546+ cc = conf.cmd_to_list(cc)
15547+
15548+ try:
15549+ if not Utils.cmd_output(cc + ['-flags']):
15550+ conf.fatal('suncc %r was not found' % cc)
15551+ except ValueError:
15552+ conf.fatal('suncc -flags could not be executed')
15553+
15554+ v['CC'] = cc
15555+ v['CC_NAME'] = 'sun'
15556+
15557+@conftest
15558+def scc_common_flags(conf):
15559+ v = conf.env
15560+
15561+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
15562+
15563+ v['CC_SRC_F'] = ''
15564+ v['CC_TGT_F'] = ['-c', '-o', '']
15565+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
15566+
15567+ # linker
15568+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
15569+ v['CCLNK_SRC_F'] = ''
15570+ v['CCLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
15571+
15572+ v['LIB_ST'] = '-l%s' # template for adding libs
15573+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
15574+ v['STATICLIB_ST'] = '-l%s'
15575+ v['STATICLIBPATH_ST'] = '-L%s'
15576+ v['CCDEFINES_ST'] = '-D%s'
15577+
15578+ v['SONAME_ST'] = '-Wl,-h -Wl,%s'
15579+ v['SHLIB_MARKER'] = '-Bdynamic'
15580+ v['STATICLIB_MARKER'] = '-Bstatic'
15581+
15582+ # program
15583+ v['program_PATTERN'] = '%s'
15584+
15585+ # shared library
15586+ v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
15587+ v['shlib_LINKFLAGS'] = ['-G']
15588+ v['shlib_PATTERN'] = 'lib%s.so'
15589+
15590+ # static lib
15591+ v['staticlib_LINKFLAGS'] = ['-Bstatic']
15592+ v['staticlib_PATTERN'] = 'lib%s.a'
15593+
15594+detect = '''
15595+find_scc
15596+find_cpp
15597+find_ar
15598+scc_common_flags
15599+cc_load_tools
15600+cc_add_flags
15601+link_add_flags
15602+'''
15603diff --git a/buildtools/wafadmin/Tools/suncxx.py b/buildtools/wafadmin/Tools/suncxx.py
15604new file mode 100644
15605index 0000000..8754b6c
15606--- /dev/null
15607+++ b/buildtools/wafadmin/Tools/suncxx.py
15608@@ -0,0 +1,75 @@
15609+#!/usr/bin/env python
15610+# encoding: utf-8
15611+# Thomas Nagy, 2006 (ita)
15612+# Ralf Habacker, 2006 (rh)
15613+
15614+import os, optparse
15615+import Utils, Options, Configure
15616+import ccroot, ar
15617+from Configure import conftest
15618+
15619+@conftest
15620+def find_sxx(conf):
15621+ v = conf.env
15622+ cc = None
15623+ if v['CXX']: cc = v['CXX']
15624+ elif 'CXX' in conf.environ: cc = conf.environ['CXX']
15625+ if not cc: cc = conf.find_program('c++', var='CXX')
15626+ if not cc: conf.fatal('sunc++ was not found')
15627+ cc = conf.cmd_to_list(cc)
15628+
15629+ try:
15630+ if not Utils.cmd_output(cc + ['-flags']):
15631+ conf.fatal('sunc++ %r was not found' % cc)
15632+ except ValueError:
15633+ conf.fatal('sunc++ -flags could not be executed')
15634+
15635+ v['CXX'] = cc
15636+ v['CXX_NAME'] = 'sun'
15637+
15638+@conftest
15639+def sxx_common_flags(conf):
15640+ v = conf.env
15641+
15642+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
15643+
15644+ v['CXX_SRC_F'] = ''
15645+ v['CXX_TGT_F'] = ['-c', '-o', '']
15646+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
15647+
15648+ # linker
15649+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
15650+ v['CXXLNK_SRC_F'] = ''
15651+ v['CXXLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
15652+
15653+ v['LIB_ST'] = '-l%s' # template for adding libs
15654+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
15655+ v['STATICLIB_ST'] = '-l%s'
15656+ v['STATICLIBPATH_ST'] = '-L%s'
15657+ v['CXXDEFINES_ST'] = '-D%s'
15658+
15659+ v['SONAME_ST'] = '-Wl,-h -Wl,%s'
15660+ v['SHLIB_MARKER'] = '-Bdynamic'
15661+ v['STATICLIB_MARKER'] = '-Bstatic'
15662+
15663+ # program
15664+ v['program_PATTERN'] = '%s'
15665+
15666+ # shared library
15667+ v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC']
15668+ v['shlib_LINKFLAGS'] = ['-G']
15669+ v['shlib_PATTERN'] = 'lib%s.so'
15670+
15671+ # static lib
15672+ v['staticlib_LINKFLAGS'] = ['-Bstatic']
15673+ v['staticlib_PATTERN'] = 'lib%s.a'
15674+
15675+detect = '''
15676+find_sxx
15677+find_cpp
15678+find_ar
15679+sxx_common_flags
15680+cxx_load_tools
15681+cxx_add_flags
15682+link_add_flags
15683+'''
15684diff --git a/buildtools/wafadmin/Tools/tex.py b/buildtools/wafadmin/Tools/tex.py
15685new file mode 100644
15686index 0000000..2dd748b
15687--- /dev/null
15688+++ b/buildtools/wafadmin/Tools/tex.py
15689@@ -0,0 +1,251 @@
15690+#!/usr/bin/env python
15691+# encoding: utf-8
15692+# Thomas Nagy, 2006 (ita)
15693+
15694+"TeX/LaTeX/PDFLaTeX support"
15695+
15696+import os, re
15697+import Utils, TaskGen, Task, Runner, Build
15698+from TaskGen import feature, before
15699+from Logs import error, warn, debug
15700+
15701+re_tex = re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}', re.M)
15702+def scan(self):
15703+ node = self.inputs[0]
15704+ env = self.env
15705+
15706+ nodes = []
15707+ names = []
15708+ if not node: return (nodes, names)
15709+
15710+ code = Utils.readf(node.abspath(env))
15711+
15712+ curdirnode = self.curdirnode
15713+ abs = curdirnode.abspath()
15714+ for match in re_tex.finditer(code):
15715+ path = match.group('file')
15716+ if path:
15717+ for k in ['', '.tex', '.ltx']:
15718+ # add another loop for the tex include paths?
15719+ debug('tex: trying %s%s' % (path, k))
15720+ try:
15721+ os.stat(abs+os.sep+path+k)
15722+ except OSError:
15723+ continue
15724+ found = path+k
15725+ node = curdirnode.find_resource(found)
15726+ if node:
15727+ nodes.append(node)
15728+ else:
15729+ debug('tex: could not find %s' % path)
15730+ names.append(path)
15731+
15732+ debug("tex: found the following : %s and names %s" % (nodes, names))
15733+ return (nodes, names)
15734+
15735+latex_fun, _ = Task.compile_fun('latex', '${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
15736+pdflatex_fun, _ = Task.compile_fun('pdflatex', '${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
15737+bibtex_fun, _ = Task.compile_fun('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
15738+makeindex_fun, _ = Task.compile_fun('bibtex', '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
15739+
15740+g_bibtex_re = re.compile('bibdata', re.M)
15741+def tex_build(task, command='LATEX'):
15742+ env = task.env
15743+ bld = task.generator.bld
15744+
15745+ if not env['PROMPT_LATEX']:
15746+ env.append_value('LATEXFLAGS', '-interaction=batchmode')
15747+ env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
15748+
15749+ fun = latex_fun
15750+ if command == 'PDFLATEX':
15751+ fun = pdflatex_fun
15752+
15753+ node = task.inputs[0]
15754+ reldir = node.bld_dir(env)
15755+
15756+ #lst = []
15757+ #for c in Utils.split_path(reldir):
15758+ # if c: lst.append('..')
15759+ #srcfile = os.path.join(*(lst + [node.srcpath(env)]))
15760+ #sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
15761+ srcfile = node.abspath(env)
15762+ sr2 = node.parent.abspath() + os.pathsep + node.parent.abspath(env) + os.pathsep
15763+
15764+ aux_node = node.change_ext('.aux')
15765+ idx_node = node.change_ext('.idx')
15766+
15767+ nm = aux_node.name
15768+ docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
15769+
15770+ # important, set the cwd for everybody
15771+ task.cwd = task.inputs[0].parent.abspath(task.env)
15772+
15773+
15774+ warn('first pass on %s' % command)
15775+
15776+ task.env.env = {'TEXINPUTS': sr2}
15777+ task.env.SRCFILE = srcfile
15778+ ret = fun(task)
15779+ if ret:
15780+ return ret
15781+
15782+ # look in the .aux file if there is a bibfile to process
15783+ try:
15784+ ct = Utils.readf(aux_node.abspath(env))
15785+ except (OSError, IOError):
15786+ error('error bibtex scan')
15787+ else:
15788+ fo = g_bibtex_re.findall(ct)
15789+
15790+ # there is a .aux file to process
15791+ if fo:
15792+ warn('calling bibtex')
15793+
15794+ task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2}
15795+ task.env.SRCFILE = docuname
15796+ ret = bibtex_fun(task)
15797+ if ret:
15798+ error('error when calling bibtex %s' % docuname)
15799+ return ret
15800+
15801+ # look on the filesystem if there is a .idx file to process
15802+ try:
15803+ idx_path = idx_node.abspath(env)
15804+ os.stat(idx_path)
15805+ except OSError:
15806+ error('error file.idx scan')
15807+ else:
15808+ warn('calling makeindex')
15809+
15810+ task.env.SRCFILE = idx_node.name
15811+ task.env.env = {}
15812+ ret = makeindex_fun(task)
15813+ if ret:
15814+ error('error when calling makeindex %s' % idx_path)
15815+ return ret
15816+
15817+
15818+ hash = ''
15819+ i = 0
15820+ while i < 10:
15821+ # prevent against infinite loops - one never knows
15822+ i += 1
15823+
15824+ # watch the contents of file.aux
15825+ prev_hash = hash
15826+ try:
15827+ hash = Utils.h_file(aux_node.abspath(env))
15828+ except KeyError:
15829+ error('could not read aux.h -> %s' % aux_node.abspath(env))
15830+ pass
15831+
15832+ # debug
15833+ #print "hash is, ", hash, " ", old_hash
15834+
15835+ # stop if file.aux does not change anymore
15836+ if hash and hash == prev_hash:
15837+ break
15838+
15839+ # run the command
15840+ warn('calling %s' % command)
15841+
15842+ task.env.env = {'TEXINPUTS': sr2 + os.pathsep}
15843+ task.env.SRCFILE = srcfile
15844+ ret = fun(task)
15845+ if ret:
15846+ error('error when calling %s %s' % (command, latex_compile_cmd))
15847+ return ret
15848+
15849+ return None # ok
15850+
15851+latex_vardeps = ['LATEX', 'LATEXFLAGS']
15852+def latex_build(task):
15853+ return tex_build(task, 'LATEX')
15854+
15855+pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS']
15856+def pdflatex_build(task):
15857+ return tex_build(task, 'PDFLATEX')
15858+
15859+class tex_taskgen(TaskGen.task_gen):
15860+ def __init__(self, *k, **kw):
15861+ TaskGen.task_gen.__init__(self, *k, **kw)
15862+
15863+@feature('tex')
15864+@before('apply_core')
15865+def apply_tex(self):
15866+ if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
15867+ self.type = 'pdflatex'
15868+
15869+ tree = self.bld
15870+ outs = Utils.to_list(getattr(self, 'outs', []))
15871+
15872+ # prompt for incomplete files (else the batchmode is used)
15873+ self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
15874+
15875+ deps_lst = []
15876+
15877+ if getattr(self, 'deps', None):
15878+ deps = self.to_list(self.deps)
15879+ for filename in deps:
15880+ n = self.path.find_resource(filename)
15881+ if not n in deps_lst: deps_lst.append(n)
15882+
15883+ self.source = self.to_list(self.source)
15884+ for filename in self.source:
15885+ base, ext = os.path.splitext(filename)
15886+
15887+ node = self.path.find_resource(filename)
15888+ if not node: raise Utils.WafError('cannot find %s' % filename)
15889+
15890+ if self.type == 'latex':
15891+ task = self.create_task('latex', node, node.change_ext('.dvi'))
15892+ elif self.type == 'pdflatex':
15893+ task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
15894+
15895+ task.env = self.env
15896+ task.curdirnode = self.path
15897+
15898+ # add the manual dependencies
15899+ if deps_lst:
15900+ variant = node.variant(self.env)
15901+ try:
15902+ lst = tree.node_deps[task.unique_id()]
15903+ for n in deps_lst:
15904+ if not n in lst:
15905+ lst.append(n)
15906+ except KeyError:
15907+ tree.node_deps[task.unique_id()] = deps_lst
15908+
15909+ if self.type == 'latex':
15910+ if 'ps' in outs:
15911+ tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
15912+ tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
15913+ if 'pdf' in outs:
15914+ tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
15915+ tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
15916+ elif self.type == 'pdflatex':
15917+ if 'ps' in outs:
15918+ self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
15919+ self.source = []
15920+
15921+def detect(conf):
15922+ v = conf.env
15923+ for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
15924+ conf.find_program(p, var=p.upper())
15925+ v[p.upper()+'FLAGS'] = ''
15926+ v['DVIPSFLAGS'] = '-Ppdf'
15927+
15928+b = Task.simple_task_type
15929+b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
15930+b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
15931+b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
15932+b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
15933+b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
15934+
15935+b = Task.task_type_from_func
15936+cls = b('latex', latex_build, vars=latex_vardeps)
15937+cls.scan = scan
15938+cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
15939+cls.scan = scan
15940+
15941diff --git a/buildtools/wafadmin/Tools/unittestw.py b/buildtools/wafadmin/Tools/unittestw.py
15942new file mode 100644
15943index 0000000..0e30a51
15944--- /dev/null
15945+++ b/buildtools/wafadmin/Tools/unittestw.py
15946@@ -0,0 +1,310 @@
15947+#!/usr/bin/env python
15948+# encoding: utf-8
15949+# Carlos Rafael Giani, 2006
15950+
15951+"""
15952+Unit tests run in the shutdown() method, and for c/c++ programs
15953+
15954+One should NOT have to give parameters to programs to execute
15955+
15956+In the shutdown method, add the following code:
15957+
15958+ >>> def shutdown():
15959+ ... ut = UnitTest.unit_test()
15960+ ... ut.run()
15961+ ... ut.print_results()
15962+
15963+
15964+Each object to use as a unit test must be a program and must have X{obj.unit_test=1}
15965+"""
15966+import os, sys
15967+import Build, TaskGen, Utils, Options, Logs, Task
15968+from TaskGen import before, after, feature
15969+from Constants import *
15970+
15971+class unit_test(object):
15972+ "Unit test representation"
15973+ def __init__(self):
15974+ self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one
15975+ # will cause the unit test to be marked as "FAILED".
15976+
15977+ # The following variables are filled with data by run().
15978+
15979+ # print_results() uses these for printing the unit test summary,
15980+ # but if there is need for direct access to the results,
15981+ # they can be retrieved here, after calling run().
15982+
15983+ self.num_tests_ok = 0 # Number of successful unit tests
15984+ self.num_tests_failed = 0 # Number of failed unit tests
15985+ self.num_tests_err = 0 # Tests that have not even run
15986+ self.total_num_tests = 0 # Total amount of unit tests
15987+ self.max_label_length = 0 # Maximum label length (pretty-print the output)
15988+
15989+ self.unit_tests = Utils.ordered_dict() # Unit test dictionary. Key: the label (unit test filename relative
15990+ # to the build dir), value: unit test filename with absolute path
15991+ self.unit_test_results = {} # Dictionary containing the unit test results.
15992+ # Key: the label, value: result (true = success false = failure)
15993+ self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests.
15994+ # Key: the label, value: true = unit test has an error false = unit test is ok
15995+ self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir
15996+ self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites)
15997+ self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites)
15998+ self.run_if_waf_does = 'check' #build was the old default
15999+
16000+ def run(self):
16001+ "Run the unit tests and gather results (note: no output here)"
16002+
16003+ self.num_tests_ok = 0
16004+ self.num_tests_failed = 0
16005+ self.num_tests_err = 0
16006+ self.total_num_tests = 0
16007+ self.max_label_length = 0
16008+
16009+ self.unit_tests = Utils.ordered_dict()
16010+ self.unit_test_results = {}
16011+ self.unit_test_erroneous = {}
16012+
16013+ ld_library_path = []
16014+
16015+ # If waf is not building, don't run anything
16016+ if not Options.commands[self.run_if_waf_does]: return
16017+
16018+ # Get the paths for the shared libraries, and obtain the unit tests to execute
16019+ for obj in Build.bld.all_task_gen:
16020+ try:
16021+ link_task = obj.link_task
16022+ except AttributeError:
16023+ pass
16024+ else:
16025+ lib_path = link_task.outputs[0].parent.abspath(obj.env)
16026+ if lib_path not in ld_library_path:
16027+ ld_library_path.append(lib_path)
16028+
16029+ unit_test = getattr(obj, 'unit_test', '')
16030+ if unit_test and 'cprogram' in obj.features:
16031+ try:
16032+ output = obj.path
16033+ filename = os.path.join(output.abspath(obj.env), obj.target)
16034+ srcdir = output.abspath()
16035+ label = os.path.join(output.bldpath(obj.env), obj.target)
16036+ self.max_label_length = max(self.max_label_length, len(label))
16037+ self.unit_tests[label] = (filename, srcdir)
16038+ except KeyError:
16039+ pass
16040+ self.total_num_tests = len(self.unit_tests)
16041+ # Now run the unit tests
16042+ Utils.pprint('GREEN', 'Running the unit tests')
16043+ count = 0
16044+ result = 1
16045+
16046+ for label in self.unit_tests.allkeys:
16047+ file_and_src = self.unit_tests[label]
16048+ filename = file_and_src[0]
16049+ srcdir = file_and_src[1]
16050+ count += 1
16051+ line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL)
16052+ if Options.options.progress_bar and line:
16053+ sys.stderr.write(line)
16054+ sys.stderr.flush()
16055+ try:
16056+ kwargs = {}
16057+ kwargs['env'] = os.environ.copy()
16058+ if self.change_to_testfile_dir:
16059+ kwargs['cwd'] = srcdir
16060+ if not self.want_to_see_test_output:
16061+ kwargs['stdout'] = Utils.pproc.PIPE # PIPE for ignoring output
16062+ if not self.want_to_see_test_error:
16063+ kwargs['stderr'] = Utils.pproc.PIPE # PIPE for ignoring output
16064+ if ld_library_path:
16065+ v = kwargs['env']
16066+ def add_path(dct, path, var):
16067+ dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
16068+ if sys.platform == 'win32':
16069+ add_path(v, ld_library_path, 'PATH')
16070+ elif sys.platform == 'darwin':
16071+ add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH')
16072+ add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
16073+ else:
16074+ add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
16075+
16076+ pp = Utils.pproc.Popen(filename, **kwargs)
16077+ (out, err) = pp.communicate() # uh, and the output is ignored?? - fortunately this is going to disappear
16078+
16079+ result = int(pp.returncode == self.returncode_ok)
16080+
16081+ if result:
16082+ self.num_tests_ok += 1
16083+ else:
16084+ self.num_tests_failed += 1
16085+
16086+ self.unit_test_results[label] = result
16087+ self.unit_test_erroneous[label] = 0
16088+ except OSError:
16089+ self.unit_test_erroneous[label] = 1
16090+ self.num_tests_err += 1
16091+ except KeyboardInterrupt:
16092+ pass
16093+ if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on)
16094+
16095+ def print_results(self):
16096+ "Pretty-prints a summary of all unit tests, along with some statistics"
16097+
16098+ # If waf is not building, don't output anything
16099+ if not Options.commands[self.run_if_waf_does]: return
16100+
16101+ p = Utils.pprint
16102+ # Early quit if no tests were performed
16103+ if self.total_num_tests == 0:
16104+ p('YELLOW', 'No unit tests present')
16105+ return
16106+
16107+ for label in self.unit_tests.allkeys:
16108+ filename = self.unit_tests[label]
16109+ err = 0
16110+ result = 0
16111+
16112+ try: err = self.unit_test_erroneous[label]
16113+ except KeyError: pass
16114+
16115+ try: result = self.unit_test_results[label]
16116+ except KeyError: pass
16117+
16118+ n = self.max_label_length - len(label)
16119+ if err: n += 4
16120+ elif result: n += 7
16121+ else: n += 3
16122+
16123+ line = '%s %s' % (label, '.' * n)
16124+
16125+ if err: p('RED', '%sERROR' % line)
16126+ elif result: p('GREEN', '%sOK' % line)
16127+ else: p('YELLOW', '%sFAILED' % line)
16128+
16129+ percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0
16130+ percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0
16131+ percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0
16132+
16133+ p('NORMAL', '''
16134+Successful tests: %i (%.1f%%)
16135+Failed tests: %i (%.1f%%)
16136+Erroneous tests: %i (%.1f%%)
16137+
16138+Total number of tests: %i
16139+''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed,
16140+ self.num_tests_err, percentage_erroneous, self.total_num_tests))
16141+ p('GREEN', 'Unit tests finished')
16142+
16143+
16144+############################################################################################
16145+
16146+"""
16147+New unit test system
16148+
16149+The targets with feature 'test' are executed after they are built
16150+bld(features='cprogram cc test', ...)
16151+
16152+To display the results:
16153+import UnitTest
16154+bld.add_post_fun(UnitTest.summary)
16155+"""
16156+
16157+import threading
16158+testlock = threading.Lock()
16159+
16160+def set_options(opt):
16161+ opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests')
16162+
16163+@feature('test')
16164+@after('apply_link', 'vars_target_cprogram')
16165+def make_test(self):
16166+ if not 'cprogram' in self.features:
16167+ Logs.error('test cannot be executed %s' % self)
16168+ return
16169+
16170+ self.default_install_path = None
16171+ self.create_task('utest', self.link_task.outputs)
16172+
16173+def exec_test(self):
16174+
16175+ status = 0
16176+
16177+ variant = self.env.variant()
16178+
16179+ filename = self.inputs[0].abspath(self.env)
16180+ self.ut_exec = getattr(self, 'ut_exec', [filename])
16181+ if getattr(self.generator, 'ut_fun', None):
16182+ self.generator.ut_fun(self)
16183+
16184+ try:
16185+ fu = getattr(self.generator.bld, 'all_test_paths')
16186+ except AttributeError:
16187+ fu = os.environ.copy()
16188+ self.generator.bld.all_test_paths = fu
16189+
16190+ lst = []
16191+ for obj in self.generator.bld.all_task_gen:
16192+ link_task = getattr(obj, 'link_task', None)
16193+ if link_task and link_task.env.variant() == variant:
16194+ lst.append(link_task.outputs[0].parent.abspath(obj.env))
16195+
16196+ def add_path(dct, path, var):
16197+ dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
16198+
16199+ if sys.platform == 'win32':
16200+ add_path(fu, lst, 'PATH')
16201+ elif sys.platform == 'darwin':
16202+ add_path(fu, lst, 'DYLD_LIBRARY_PATH')
16203+ add_path(fu, lst, 'LD_LIBRARY_PATH')
16204+ else:
16205+ add_path(fu, lst, 'LD_LIBRARY_PATH')
16206+
16207+
16208+ cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env)
16209+ proc = Utils.pproc.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE)
16210+ (stdout, stderr) = proc.communicate()
16211+
16212+ tup = (filename, proc.returncode, stdout, stderr)
16213+ self.generator.utest_result = tup
16214+
16215+ testlock.acquire()
16216+ try:
16217+ bld = self.generator.bld
16218+ Logs.debug("ut: %r", tup)
16219+ try:
16220+ bld.utest_results.append(tup)
16221+ except AttributeError:
16222+ bld.utest_results = [tup]
16223+ finally:
16224+ testlock.release()
16225+
16226+cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin')
16227+
16228+old = cls.runnable_status
16229+def test_status(self):
16230+ ret = old(self)
16231+ if ret == SKIP_ME and getattr(Options.options, 'all_tests', False):
16232+ return RUN_ME
16233+ return ret
16234+
16235+cls.runnable_status = test_status
16236+cls.quiet = 1
16237+
16238+def summary(bld):
16239+ lst = getattr(bld, 'utest_results', [])
16240+ if lst:
16241+ Utils.pprint('CYAN', 'execution summary')
16242+
16243+ total = len(lst)
16244+ tfail = len([x for x in lst if x[1]])
16245+
16246+ Utils.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
16247+ for (f, code, out, err) in lst:
16248+ if not code:
16249+ Utils.pprint('CYAN', ' %s' % f)
16250+
16251+ Utils.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
16252+ for (f, code, out, err) in lst:
16253+ if code:
16254+ Utils.pprint('CYAN', ' %s' % f)
16255+
16256+
16257diff --git a/buildtools/wafadmin/Tools/vala.py b/buildtools/wafadmin/Tools/vala.py
16258new file mode 100644
16259index 0000000..753ee8d
16260--- /dev/null
16261+++ b/buildtools/wafadmin/Tools/vala.py
16262@@ -0,0 +1,308 @@
16263+#!/usr/bin/env python
16264+# encoding: utf-8
16265+# Ali Sabil, 2007
16266+
16267+import os.path, shutil
16268+import Task, Runner, Utils, Logs, Build, Node, Options
16269+from TaskGen import extension, after, before
16270+
16271+EXT_VALA = ['.vala', '.gs']
16272+
16273+class valac_task(Task.Task):
16274+
16275+ vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS")
16276+ before = ("cc", "cxx")
16277+
16278+ def run(self):
16279+ env = self.env
16280+ inputs = [a.srcpath(env) for a in self.inputs]
16281+ valac = env['VALAC']
16282+ vala_flags = env.get_flat('VALAFLAGS')
16283+ top_src = self.generator.bld.srcnode.abspath()
16284+ top_bld = self.generator.bld.srcnode.abspath(env)
16285+
16286+ if env['VALAC_VERSION'] > (0, 1, 6):
16287+ cmd = [valac, '-C', '--quiet', vala_flags]
16288+ else:
16289+ cmd = [valac, '-C', vala_flags]
16290+
16291+ if self.threading:
16292+ cmd.append('--thread')
16293+
16294+ if self.profile:
16295+ cmd.append('--profile=%s' % self.profile)
16296+
16297+ if self.target_glib:
16298+ cmd.append('--target-glib=%s' % self.target_glib)
16299+
16300+ features = self.generator.features
16301+
16302+ if 'cshlib' in features or 'cstaticlib' in features:
16303+ output_dir = self.outputs[0].bld_dir(env)
16304+ cmd.append('--library ' + self.target)
16305+ if env['VALAC_VERSION'] >= (0, 7, 0):
16306+ for x in self.outputs:
16307+ if x.name.endswith('.h'):
16308+ cmd.append('--header ' + x.bldpath(self.env))
16309+ cmd.append('--basedir ' + top_src)
16310+ cmd.append('-d ' + top_bld)
16311+ if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'):
16312+ cmd.append('--gir=%s.gir' % self.gir)
16313+
16314+ else:
16315+ output_dir = self.outputs[0].bld_dir(env)
16316+ cmd.append('-d %s' % output_dir)
16317+
16318+ for vapi_dir in self.vapi_dirs:
16319+ cmd.append('--vapidir=%s' % vapi_dir)
16320+
16321+ for package in self.packages:
16322+ cmd.append('--pkg %s' % package)
16323+
16324+ for package in self.packages_private:
16325+ cmd.append('--pkg %s' % package)
16326+
16327+ cmd.append(" ".join(inputs))
16328+ result = self.generator.bld.exec_command(" ".join(cmd))
16329+
16330+ if not 'cprogram' in features:
16331+ # generate the .deps file
16332+ if self.packages:
16333+ filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target)
16334+ deps = open(filename, 'w')
16335+ for package in self.packages:
16336+ deps.write(package + '\n')
16337+ deps.close()
16338+
16339+ # handle vala 0.1.6 who doesn't honor --directory for the generated .vapi
16340+ self._fix_output("../%s.vapi" % self.target)
16341+ # handle vala >= 0.1.7 who has a weid definition for --directory
16342+ self._fix_output("%s.vapi" % self.target)
16343+ # handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl
16344+ self._fix_output("%s.gidl" % self.target)
16345+ # handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir
16346+ self._fix_output("%s.gir" % self.target)
16347+ if hasattr(self, 'gir'):
16348+ self._fix_output("%s.gir" % self.gir)
16349+
16350+ first = None
16351+ for node in self.outputs:
16352+ if not first:
16353+ first = node
16354+ else:
16355+ if first.parent.id != node.parent.id:
16356+ # issue #483
16357+ if env['VALAC_VERSION'] < (0, 7, 0):
16358+ shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env))
16359+ return result
16360+
16361+ def install(self):
16362+ bld = self.generator.bld
16363+ features = self.generator.features
16364+
16365+ if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features):
16366+ headers_list = [o for o in self.outputs if o.suffix() == ".h"]
16367+ vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))]
16368+ gir_list = [o for o in self.outputs if o.suffix() == ".gir"]
16369+
16370+ for header in headers_list:
16371+ top_src = self.generator.bld.srcnode
16372+ package = self.env['PACKAGE']
16373+ try:
16374+ api_version = Utils.g_module.API_VERSION
16375+ except AttributeError:
16376+ version = Utils.g_module.VERSION.split(".")
16377+ if version[0] == "0":
16378+ api_version = "0." + version[1]
16379+ else:
16380+ api_version = version[0] + ".0"
16381+ install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src))
16382+ bld.install_as(install_path, header, self.env)
16383+ bld.install_files('${DATAROOTDIR}/vala/vapi', vapi_list, self.env)
16384+ bld.install_files('${DATAROOTDIR}/gir-1.0', gir_list, self.env)
16385+
16386+ def _fix_output(self, output):
16387+ top_bld = self.generator.bld.srcnode.abspath(self.env)
16388+ try:
16389+ src = os.path.join(top_bld, output)
16390+ dst = self.generator.path.abspath (self.env)
16391+ shutil.move(src, dst)
16392+ except:
16393+ pass
16394+
16395+@extension(EXT_VALA)
16396+def vala_file(self, node):
16397+ valatask = getattr(self, "valatask", None)
16398+ # there is only one vala task and it compiles all vala files .. :-/
16399+ if not valatask:
16400+ valatask = self.create_task('valac')
16401+ self.valatask = valatask
16402+ self.includes = Utils.to_list(getattr(self, 'includes', []))
16403+ self.uselib = self.to_list(self.uselib)
16404+ valatask.packages = []
16405+ valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', []))
16406+ valatask.vapi_dirs = []
16407+ valatask.target = self.target
16408+ valatask.threading = False
16409+ valatask.install_path = self.install_path
16410+ valatask.profile = getattr (self, 'profile', 'gobject')
16411+ valatask.target_glib = None #Deprecated
16412+
16413+ packages = Utils.to_list(getattr(self, 'packages', []))
16414+ vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
16415+ includes = []
16416+
16417+ if hasattr(self, 'uselib_local'):
16418+ local_packages = Utils.to_list(self.uselib_local)
16419+ seen = []
16420+ while len(local_packages) > 0:
16421+ package = local_packages.pop()
16422+ if package in seen:
16423+ continue
16424+ seen.append(package)
16425+
16426+ # check if the package exists
16427+ package_obj = self.name_to_obj(package)
16428+ if not package_obj:
16429+ raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name))
16430+
16431+ package_name = package_obj.target
16432+ package_node = package_obj.path
16433+ package_dir = package_node.relpath_gen(self.path)
16434+
16435+ for task in package_obj.tasks:
16436+ for output in task.outputs:
16437+ if output.name == package_name + ".vapi":
16438+ valatask.set_run_after(task)
16439+ if package_name not in packages:
16440+ packages.append(package_name)
16441+ if package_dir not in vapi_dirs:
16442+ vapi_dirs.append(package_dir)
16443+ if package_dir not in includes:
16444+ includes.append(package_dir)
16445+
16446+ if hasattr(package_obj, 'uselib_local'):
16447+ lst = self.to_list(package_obj.uselib_local)
16448+ lst.reverse()
16449+ local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
16450+
16451+ valatask.packages = packages
16452+ for vapi_dir in vapi_dirs:
16453+ try:
16454+ valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
16455+ valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
16456+ except AttributeError:
16457+ Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir)
16458+
16459+ self.includes.append(node.bld.srcnode.abspath())
16460+ self.includes.append(node.bld.srcnode.abspath(self.env))
16461+ for include in includes:
16462+ try:
16463+ self.includes.append(self.path.find_dir(include).abspath())
16464+ self.includes.append(self.path.find_dir(include).abspath(self.env))
16465+ except AttributeError:
16466+ Logs.warn("Unable to locate include directory: '%s'" % include)
16467+
16468+ if valatask.profile == 'gobject':
16469+ if hasattr(self, 'target_glib'):
16470+ Logs.warn ('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options')
16471+
16472+ if getattr(Options.options, 'vala_target_glib', None):
16473+ valatask.target_glib = Options.options.vala_target_glib
16474+
16475+ if not 'GOBJECT' in self.uselib:
16476+ self.uselib.append('GOBJECT')
16477+
16478+ if hasattr(self, 'threading'):
16479+ if valatask.profile == 'gobject':
16480+ valatask.threading = self.threading
16481+ if not 'GTHREAD' in self.uselib:
16482+ self.uselib.append('GTHREAD')
16483+ else:
16484+ #Vala doesn't have threading support for dova nor posix
16485+ Logs.warn("Profile %s does not have threading support" % valatask.profile)
16486+
16487+ if hasattr(self, 'gir'):
16488+ valatask.gir = self.gir
16489+
16490+ env = valatask.env
16491+
16492+ output_nodes = []
16493+
16494+ c_node = node.change_ext('.c')
16495+ output_nodes.append(c_node)
16496+ self.allnodes.append(c_node)
16497+
16498+ if env['VALAC_VERSION'] < (0, 7, 0):
16499+ output_nodes.append(node.change_ext('.h'))
16500+ else:
16501+ if not 'cprogram' in self.features:
16502+ output_nodes.append(self.path.find_or_declare('%s.h' % self.target))
16503+
16504+ if not 'cprogram' in self.features:
16505+ output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target))
16506+ if env['VALAC_VERSION'] > (0, 7, 2):
16507+ if hasattr(self, 'gir'):
16508+ output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir))
16509+ elif env['VALAC_VERSION'] > (0, 3, 5):
16510+ output_nodes.append(self.path.find_or_declare('%s.gir' % self.target))
16511+ elif env['VALAC_VERSION'] > (0, 1, 7):
16512+ output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target))
16513+ if valatask.packages:
16514+ output_nodes.append(self.path.find_or_declare('%s.deps' % self.target))
16515+
16516+ valatask.inputs.append(node)
16517+ valatask.outputs.extend(output_nodes)
16518+
16519+def detect(conf):
16520+ min_version = (0, 1, 6)
16521+ min_version_str = "%d.%d.%d" % min_version
16522+
16523+ valac = conf.find_program('valac', var='VALAC', mandatory=True)
16524+
16525+ if not conf.env["HAVE_GOBJECT"]:
16526+ pkg_args = {'package': 'gobject-2.0',
16527+ 'uselib_store': 'GOBJECT',
16528+ 'args': '--cflags --libs'}
16529+ if getattr(Options.options, 'vala_target_glib', None):
16530+ pkg_args['atleast_version'] = Options.options.vala_target_glib
16531+
16532+ conf.check_cfg(**pkg_args)
16533+
16534+ if not conf.env["HAVE_GTHREAD"]:
16535+ pkg_args = {'package': 'gthread-2.0',
16536+ 'uselib_store': 'GTHREAD',
16537+ 'args': '--cflags --libs'}
16538+ if getattr(Options.options, 'vala_target_glib', None):
16539+ pkg_args['atleast_version'] = Options.options.vala_target_glib
16540+
16541+ conf.check_cfg(**pkg_args)
16542+
16543+ try:
16544+ output = Utils.cmd_output(valac + " --version", silent=True)
16545+ version = output.split(' ', 1)[-1].strip().split(".")[0:3]
16546+ version = [int(x) for x in version]
16547+ valac_version = tuple(version)
16548+ except Exception:
16549+ valac_version = (0, 0, 0)
16550+
16551+ conf.check_message('program version',
16552+ 'valac >= ' + min_version_str,
16553+ valac_version >= min_version,
16554+ "%d.%d.%d" % valac_version)
16555+
16556+ conf.check_tool('gnu_dirs')
16557+
16558+ if valac_version < min_version:
16559+ conf.fatal("valac version too old to be used with this tool")
16560+ return
16561+
16562+ conf.env['VALAC_VERSION'] = valac_version
16563+ conf.env['VALAFLAGS'] = ''
16564+
16565+def set_options (opt):
16566+ valaopts = opt.add_option_group('Vala Compiler Options')
16567+ valaopts.add_option ('--vala-target-glib', default=None,
16568+ dest='vala_target_glib', metavar='MAJOR.MINOR',
16569+ help='Target version of glib for Vala GObject code generation')
16570+
16571diff --git a/buildtools/wafadmin/Tools/winres.py b/buildtools/wafadmin/Tools/winres.py
16572new file mode 100644
16573index 0000000..2500d43
16574--- /dev/null
16575+++ b/buildtools/wafadmin/Tools/winres.py
16576@@ -0,0 +1,45 @@
16577+#!/usr/bin/env python
16578+# encoding: utf-8
16579+# Brant Young, 2007
16580+
16581+"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}"
16582+
16583+import os, sys, re
16584+import TaskGen, Task
16585+from Utils import quote_whitespace
16586+from TaskGen import extension
16587+
16588+EXT_WINRC = ['.rc']
16589+
16590+winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
16591+
16592+@extension(EXT_WINRC)
16593+def rc_file(self, node):
16594+ obj_ext = '.rc.o'
16595+ if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res'
16596+
16597+ rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
16598+ self.compiled_tasks.append(rctask)
16599+
16600+# create our action, for use with rc file
16601+Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False)
16602+
16603+def detect(conf):
16604+ v = conf.env
16605+
16606+ winrc = v['WINRC']
16607+ v['WINRC_TGT_F'] = '-o'
16608+ v['WINRC_SRC_F'] = '-i'
16609+ # find rc.exe
16610+ if not winrc:
16611+ if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']:
16612+ winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH'])
16613+ elif v['CC_NAME'] == 'msvc':
16614+ winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH'])
16615+ v['WINRC_TGT_F'] = '/fo'
16616+ v['WINRC_SRC_F'] = ''
16617+ if not winrc:
16618+ conf.fatal('winrc was not found!')
16619+
16620+ v['WINRCFLAGS'] = ''
16621+
16622diff --git a/buildtools/wafadmin/Tools/xlc.py b/buildtools/wafadmin/Tools/xlc.py
16623new file mode 100644
16624index 0000000..e33b7a1
16625--- /dev/null
16626+++ b/buildtools/wafadmin/Tools/xlc.py
16627@@ -0,0 +1,78 @@
16628+#!/usr/bin/env python
16629+# encoding: utf-8
16630+# Thomas Nagy, 2006-2008 (ita)
16631+# Ralf Habacker, 2006 (rh)
16632+# Yinon Ehrlich, 2009
16633+# Michael Kuhn, 2009
16634+
16635+import os, sys
16636+import Configure, Options, Utils
16637+import ccroot, ar
16638+from Configure import conftest
16639+
16640+@conftest
16641+def find_xlc(conf):
16642+ cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True)
16643+ cc = conf.cmd_to_list(cc)
16644+ conf.env.CC_NAME = 'xlc'
16645+ conf.env.CC = cc
16646+
16647+@conftest
16648+def find_cpp(conf):
16649+ v = conf.env
16650+ cpp = None
16651+ if v['CPP']: cpp = v['CPP']
16652+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
16653+ #if not cpp: cpp = v['CC']
16654+ v['CPP'] = cpp
16655+
16656+@conftest
16657+def xlc_common_flags(conf):
16658+ v = conf.env
16659+
16660+ # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
16661+ v['CCFLAGS_DEBUG'] = ['-g']
16662+ v['CCFLAGS_RELEASE'] = ['-O2']
16663+
16664+ v['CC_SRC_F'] = ''
16665+ v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
16666+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
16667+
16668+ # linker
16669+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
16670+ v['CCLNK_SRC_F'] = ''
16671+ v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
16672+
16673+ v['LIB_ST'] = '-l%s' # template for adding libs
16674+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
16675+ v['STATICLIB_ST'] = '-l%s'
16676+ v['STATICLIBPATH_ST'] = '-L%s'
16677+ v['RPATH_ST'] = '-Wl,-rpath,%s'
16678+ v['CCDEFINES_ST'] = '-D%s'
16679+
16680+ v['SONAME_ST'] = ''
16681+ v['SHLIB_MARKER'] = ''
16682+ v['STATICLIB_MARKER'] = ''
16683+ v['FULLSTATIC_MARKER'] = '-static'
16684+
16685+ # program
16686+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
16687+ v['program_PATTERN'] = '%s'
16688+
16689+ # shared library
16690+ v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
16691+ v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
16692+ v['shlib_PATTERN'] = 'lib%s.so'
16693+
16694+ # static lib
16695+ v['staticlib_LINKFLAGS'] = ''
16696+ v['staticlib_PATTERN'] = 'lib%s.a'
16697+
16698+def detect(conf):
16699+ conf.find_xlc()
16700+ conf.find_cpp()
16701+ conf.find_ar()
16702+ conf.xlc_common_flags()
16703+ conf.cc_load_tools()
16704+ conf.cc_add_flags()
16705+ conf.link_add_flags()
16706diff --git a/buildtools/wafadmin/Tools/xlcxx.py b/buildtools/wafadmin/Tools/xlcxx.py
16707new file mode 100644
16708index 0000000..6e84662
16709--- /dev/null
16710+++ b/buildtools/wafadmin/Tools/xlcxx.py
16711@@ -0,0 +1,78 @@
16712+#!/usr/bin/env python
16713+# encoding: utf-8
16714+# Thomas Nagy, 2006 (ita)
16715+# Ralf Habacker, 2006 (rh)
16716+# Yinon Ehrlich, 2009
16717+# Michael Kuhn, 2009
16718+
16719+import os, sys
16720+import Configure, Options, Utils
16721+import ccroot, ar
16722+from Configure import conftest
16723+
16724+@conftest
16725+def find_xlcxx(conf):
16726+ cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
16727+ cxx = conf.cmd_to_list(cxx)
16728+ conf.env.CXX_NAME = 'xlc++'
16729+ conf.env.CXX = cxx
16730+
16731+@conftest
16732+def find_cpp(conf):
16733+ v = conf.env
16734+ cpp = None
16735+ if v['CPP']: cpp = v['CPP']
16736+ elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
16737+ #if not cpp: cpp = v['CXX']
16738+ v['CPP'] = cpp
16739+
16740+@conftest
16741+def xlcxx_common_flags(conf):
16742+ v = conf.env
16743+
16744+ # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
16745+ v['CXXFLAGS_DEBUG'] = ['-g']
16746+ v['CXXFLAGS_RELEASE'] = ['-O2']
16747+
16748+ v['CXX_SRC_F'] = ''
16749+ v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
16750+ v['CPPPATH_ST'] = '-I%s' # template for adding include paths
16751+
16752+ # linker
16753+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
16754+ v['CXXLNK_SRC_F'] = ''
16755+ v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
16756+
16757+ v['LIB_ST'] = '-l%s' # template for adding libs
16758+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
16759+ v['STATICLIB_ST'] = '-l%s'
16760+ v['STATICLIBPATH_ST'] = '-L%s'
16761+ v['RPATH_ST'] = '-Wl,-rpath,%s'
16762+ v['CXXDEFINES_ST'] = '-D%s'
16763+
16764+ v['SONAME_ST'] = ''
16765+ v['SHLIB_MARKER'] = ''
16766+ v['STATICLIB_MARKER'] = ''
16767+ v['FULLSTATIC_MARKER'] = '-static'
16768+
16769+ # program
16770+ v['program_LINKFLAGS'] = ['-Wl,-brtl']
16771+ v['program_PATTERN'] = '%s'
16772+
16773+ # shared library
16774+ v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
16775+ v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
16776+ v['shlib_PATTERN'] = 'lib%s.so'
16777+
16778+ # static lib
16779+ v['staticlib_LINKFLAGS'] = ''
16780+ v['staticlib_PATTERN'] = 'lib%s.a'
16781+
16782+def detect(conf):
16783+ conf.find_xlcxx()
16784+ conf.find_cpp()
16785+ conf.find_ar()
16786+ conf.xlcxx_common_flags()
16787+ conf.cxx_load_tools()
16788+ conf.cxx_add_flags()
16789+ conf.link_add_flags()
16790diff --git a/buildtools/wafadmin/Utils.py b/buildtools/wafadmin/Utils.py
16791new file mode 100644
16792index 0000000..41dad57
16793--- /dev/null
16794+++ b/buildtools/wafadmin/Utils.py
16795@@ -0,0 +1,726 @@
16796+#!/usr/bin/env python
16797+# encoding: utf-8
16798+# Thomas Nagy, 2005 (ita)
16799+
16800+"""
16801+Utilities, the stable ones are the following:
16802+
16803+* h_file: compute a unique value for a file (hash), it uses
16804+ the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
16805+ else, md5 (see the python docs)
16806+
16807+ For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
16808+ it is possible to use a hashing based on the path and the size (may give broken cache results)
16809+ The method h_file MUST raise an OSError if the file is a folder
16810+
16811+ import stat
16812+ def h_file(filename):
16813+ st = os.stat(filename)
16814+ if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
16815+ m = Utils.md5()
16816+ m.update(str(st.st_mtime))
16817+ m.update(str(st.st_size))
16818+ m.update(filename)
16819+ return m.digest()
16820+
16821+ To replace the function in your project, use something like this:
16822+ import Utils
16823+ Utils.h_file = h_file
16824+
16825+* h_list
16826+* h_fun
16827+* get_term_cols
16828+* ordered_dict
16829+
16830+"""
16831+
16832+import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
16833+
16834+# In python 3.0 we can get rid of all this
16835+try: from UserDict import UserDict
16836+except ImportError: from collections import UserDict
16837+if sys.hexversion >= 0x2060000 or os.name == 'java':
16838+ import subprocess as pproc
16839+else:
16840+ import pproc
16841+import Logs
16842+from Constants import *
16843+
16844+try:
16845+ from collections import deque
16846+except ImportError:
16847+ class deque(list):
16848+ def popleft(self):
16849+ return self.pop(0)
16850+
16851+is_win32 = sys.platform == 'win32'
16852+
16853+try:
16854+ # defaultdict in python 2.5
16855+ from collections import defaultdict as DefaultDict
16856+except ImportError:
16857+ class DefaultDict(dict):
16858+ def __init__(self, default_factory):
16859+ super(DefaultDict, self).__init__()
16860+ self.default_factory = default_factory
16861+ def __getitem__(self, key):
16862+ try:
16863+ return super(DefaultDict, self).__getitem__(key)
16864+ except KeyError:
16865+ value = self.default_factory()
16866+ self[key] = value
16867+ return value
16868+
16869+class WafError(Exception):
16870+ def __init__(self, *args):
16871+ self.args = args
16872+ try:
16873+ self.stack = traceback.extract_stack()
16874+ except:
16875+ pass
16876+ Exception.__init__(self, *args)
16877+ def __str__(self):
16878+ return str(len(self.args) == 1 and self.args[0] or self.args)
16879+
16880+class WscriptError(WafError):
16881+ def __init__(self, message, wscript_file=None):
16882+ if wscript_file:
16883+ self.wscript_file = wscript_file
16884+ self.wscript_line = None
16885+ else:
16886+ try:
16887+ (self.wscript_file, self.wscript_line) = self.locate_error()
16888+ except:
16889+ (self.wscript_file, self.wscript_line) = (None, None)
16890+
16891+ msg_file_line = ''
16892+ if self.wscript_file:
16893+ msg_file_line = "%s:" % self.wscript_file
16894+ if self.wscript_line:
16895+ msg_file_line += "%s:" % self.wscript_line
16896+ err_message = "%s error: %s" % (msg_file_line, message)
16897+ WafError.__init__(self, err_message)
16898+
16899+ def locate_error(self):
16900+ stack = traceback.extract_stack()
16901+ stack.reverse()
16902+ for frame in stack:
16903+ file_name = os.path.basename(frame[0])
16904+ is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
16905+ if is_wscript:
16906+ return (frame[0], frame[1])
16907+ return (None, None)
16908+
16909+indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
16910+
16911+try:
16912+ from fnv import new as md5
16913+ import Constants
16914+ Constants.SIG_NIL = 'signofnv'
16915+
16916+ def h_file(filename):
16917+ m = md5()
16918+ try:
16919+ m.hfile(filename)
16920+ x = m.digest()
16921+ if x is None: raise OSError("not a file")
16922+ return x
16923+ except SystemError:
16924+ raise OSError("not a file" + filename)
16925+
16926+except ImportError:
16927+ try:
16928+ try:
16929+ from hashlib import md5
16930+ except ImportError:
16931+ from md5 import md5
16932+
16933+ def h_file(filename):
16934+ f = open(filename, 'rb')
16935+ m = md5()
16936+ while (filename):
16937+ filename = f.read(100000)
16938+ m.update(filename)
16939+ f.close()
16940+ return m.digest()
16941+ except ImportError:
16942+ # portability fixes may be added elsewhere (although, md5 should be everywhere by now)
16943+ md5 = None
16944+
16945+class ordered_dict(UserDict):
16946+ def __init__(self, dict = None):
16947+ self.allkeys = []
16948+ UserDict.__init__(self, dict)
16949+
16950+ def __delitem__(self, key):
16951+ self.allkeys.remove(key)
16952+ UserDict.__delitem__(self, key)
16953+
16954+ def __setitem__(self, key, item):
16955+ if key not in self.allkeys: self.allkeys.append(key)
16956+ UserDict.__setitem__(self, key, item)
16957+
16958+def exec_command(s, **kw):
16959+ if 'log' in kw:
16960+ kw['stdout'] = kw['stderr'] = kw['log']
16961+ del(kw['log'])
16962+ kw['shell'] = isinstance(s, str)
16963+
16964+ try:
16965+ proc = pproc.Popen(s, **kw)
16966+ return proc.wait()
16967+ except OSError:
16968+ return -1
16969+
16970+if is_win32:
16971+ def exec_command(s, **kw):
16972+ if 'log' in kw:
16973+ kw['stdout'] = kw['stderr'] = kw['log']
16974+ del(kw['log'])
16975+ kw['shell'] = isinstance(s, str)
16976+
16977+ if len(s) > 2000:
16978+ startupinfo = pproc.STARTUPINFO()
16979+ startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
16980+ kw['startupinfo'] = startupinfo
16981+
16982+ try:
16983+ if 'stdout' not in kw:
16984+ kw['stdout'] = pproc.PIPE
16985+ kw['stderr'] = pproc.PIPE
16986+ kw['universal_newlines'] = True
16987+ proc = pproc.Popen(s,**kw)
16988+ (stdout, stderr) = proc.communicate()
16989+ Logs.info(stdout)
16990+ if stderr:
16991+ Logs.error(stderr)
16992+ return proc.returncode
16993+ else:
16994+ proc = pproc.Popen(s,**kw)
16995+ return proc.wait()
16996+ except OSError:
16997+ return -1
16998+
16999+listdir = os.listdir
17000+if is_win32:
17001+ def listdir_win32(s):
17002+ if re.match('^[A-Za-z]:$', s):
17003+ # os.path.isdir fails if s contains only the drive name... (x:)
17004+ s += os.sep
17005+ if not os.path.isdir(s):
17006+ e = OSError()
17007+ e.errno = errno.ENOENT
17008+ raise e
17009+ return os.listdir(s)
17010+ listdir = listdir_win32
17011+
17012+def waf_version(mini = 0x010000, maxi = 0x100000):
17013+ "Halts if the waf version is wrong"
17014+ ver = HEXVERSION
17015+ try: min_val = mini + 0
17016+ except TypeError: min_val = int(mini.replace('.', '0'), 16)
17017+
17018+ if min_val > ver:
17019+ Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
17020+ sys.exit(1)
17021+
17022+ try: max_val = maxi + 0
17023+ except TypeError: max_val = int(maxi.replace('.', '0'), 16)
17024+
17025+ if max_val < ver:
17026+ Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
17027+ sys.exit(1)
17028+
17029+def python_24_guard():
17030+ if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
17031+ raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
17032+
17033+def ex_stack():
17034+ exc_type, exc_value, tb = sys.exc_info()
17035+ if Logs.verbose > 1:
17036+ exc_lines = traceback.format_exception(exc_type, exc_value, tb)
17037+ return ''.join(exc_lines)
17038+ return str(exc_value)
17039+
17040+def to_list(sth):
17041+ if isinstance(sth, str):
17042+ return sth.split()
17043+ else:
17044+ return sth
17045+
17046+g_loaded_modules = {}
17047+"index modules by absolute path"
17048+
17049+g_module=None
17050+"the main module is special"
17051+
17052+def load_module(file_path, name=WSCRIPT_FILE):
17053+ "this function requires an absolute path"
17054+ try:
17055+ return g_loaded_modules[file_path]
17056+ except KeyError:
17057+ pass
17058+
17059+ module = imp.new_module(name)
17060+
17061+ try:
17062+ code = readf(file_path, m='rU')
17063+ except (IOError, OSError):
17064+ raise WscriptError('Could not read the file %r' % file_path)
17065+
17066+ module.waf_hash_val = code
17067+
17068+ dt = os.path.dirname(file_path)
17069+ sys.path.insert(0, dt)
17070+ try:
17071+ exec(compile(code, file_path, 'exec'), module.__dict__)
17072+ except Exception:
17073+ exc_type, exc_value, tb = sys.exc_info()
17074+ raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
17075+ sys.path.remove(dt)
17076+
17077+ g_loaded_modules[file_path] = module
17078+
17079+ return module
17080+
17081+def set_main_module(file_path):
17082+ "Load custom options, if defined"
17083+ global g_module
17084+ g_module = load_module(file_path, 'wscript_main')
17085+ g_module.root_path = file_path
17086+
17087+ try:
17088+ g_module.APPNAME
17089+ except:
17090+ g_module.APPNAME = 'noname'
17091+ try:
17092+ g_module.VERSION
17093+ except:
17094+ g_module.VERSION = '1.0'
17095+
17096+ # note: to register the module globally, use the following:
17097+ # sys.modules['wscript_main'] = g_module
17098+
17099+def to_hashtable(s):
17100+ "used for importing env files"
17101+ tbl = {}
17102+ lst = s.split('\n')
17103+ for line in lst:
17104+ if not line: continue
17105+ mems = line.split('=')
17106+ tbl[mems[0]] = mems[1]
17107+ return tbl
17108+
17109+def get_term_cols():
17110+ "console width"
17111+ return 80
17112+try:
17113+ import struct, fcntl, termios
17114+except ImportError:
17115+ pass
17116+else:
17117+ if Logs.got_tty:
17118+ def myfun():
17119+ dummy_lines, cols = struct.unpack("HHHH", \
17120+ fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
17121+ struct.pack("HHHH", 0, 0, 0, 0)))[:2]
17122+ return cols
17123+ # we actually try the function once to see if it is suitable
17124+ try:
17125+ myfun()
17126+ except:
17127+ pass
17128+ else:
17129+ get_term_cols = myfun
17130+
17131+rot_idx = 0
17132+rot_chr = ['\\', '|', '/', '-']
17133+"the rotation character in the progress bar"
17134+
17135+
17136+def split_path(path):
17137+ return path.split('/')
17138+
17139+def split_path_cygwin(path):
17140+ if path.startswith('//'):
17141+ ret = path.split('/')[2:]
17142+ ret[0] = '/' + ret[0]
17143+ return ret
17144+ return path.split('/')
17145+
17146+re_sp = re.compile('[/\\\\]')
17147+def split_path_win32(path):
17148+ if path.startswith('\\\\'):
17149+ ret = re.split(re_sp, path)[2:]
17150+ ret[0] = '\\' + ret[0]
17151+ return ret
17152+ return re.split(re_sp, path)
17153+
17154+if sys.platform == 'cygwin':
17155+ split_path = split_path_cygwin
17156+elif is_win32:
17157+ split_path = split_path_win32
17158+
17159+def copy_attrs(orig, dest, names, only_if_set=False):
17160+ for a in to_list(names):
17161+ u = getattr(orig, a, ())
17162+ if u or not only_if_set:
17163+ setattr(dest, a, u)
17164+
17165+def def_attrs(cls, **kw):
17166+ '''
17167+ set attributes for class.
17168+ @param cls [any class]: the class to update the given attributes in.
17169+ @param kw [dictionary]: dictionary of attributes names and values.
17170+
17171+ if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
17172+ '''
17173+ for k, v in kw.iteritems():
17174+ if not hasattr(cls, k):
17175+ setattr(cls, k, v)
17176+
17177+def quote_define_name(path):
17178+ fu = re.compile("[^a-zA-Z0-9]").sub("_", path)
17179+ fu = fu.upper()
17180+ return fu
17181+
17182+def quote_whitespace(path):
17183+ return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
17184+
17185+def trimquotes(s):
17186+ if not s: return ''
17187+ s = s.rstrip()
17188+ if s[0] == "'" and s[-1] == "'": return s[1:-1]
17189+ return s
17190+
17191+def h_list(lst):
17192+ m = md5()
17193+ m.update(str(lst))
17194+ return m.digest()
17195+
17196+def h_fun(fun):
17197+ try:
17198+ return fun.code
17199+ except AttributeError:
17200+ try:
17201+ h = inspect.getsource(fun)
17202+ except IOError:
17203+ h = "nocode"
17204+ try:
17205+ fun.code = h
17206+ except AttributeError:
17207+ pass
17208+ return h
17209+
17210+def pprint(col, str, label='', sep='\n'):
17211+ "print messages in color"
17212+ sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
17213+
17214+def check_dir(dir):
17215+ """If a folder doesn't exists, create it."""
17216+ try:
17217+ os.stat(dir)
17218+ except OSError:
17219+ try:
17220+ os.makedirs(dir)
17221+ except OSError, e:
17222+ raise WafError("Cannot create folder '%s' (original error: %s)" % (dir, e))
17223+
17224+def cmd_output(cmd, **kw):
17225+
17226+ silent = False
17227+ if 'silent' in kw:
17228+ silent = kw['silent']
17229+ del(kw['silent'])
17230+
17231+ if 'e' in kw:
17232+ tmp = kw['e']
17233+ del(kw['e'])
17234+ kw['env'] = tmp
17235+
17236+ kw['shell'] = isinstance(cmd, str)
17237+ kw['stdout'] = pproc.PIPE
17238+ if silent:
17239+ kw['stderr'] = pproc.PIPE
17240+
17241+ try:
17242+ p = pproc.Popen(cmd, **kw)
17243+ output = p.communicate()[0]
17244+ except OSError, e:
17245+ raise ValueError(str(e))
17246+
17247+ if p.returncode:
17248+ if not silent:
17249+ msg = "command execution failed: %s -> %r" % (cmd, str(output))
17250+ raise ValueError(msg)
17251+ output = ''
17252+ return output
17253+
17254+reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
17255+def subst_vars(expr, params):
17256+ "substitute ${PREFIX}/bin in /usr/local/bin"
17257+ def repl_var(m):
17258+ if m.group(1):
17259+ return '\\'
17260+ if m.group(2):
17261+ return '$'
17262+ try:
17263+ # environments may contain lists
17264+ return params.get_flat(m.group(3))
17265+ except AttributeError:
17266+ return params[m.group(3)]
17267+ return reg_subst.sub(repl_var, expr)
17268+
17269+def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
17270+ "infers the binary format from the unversioned_sys_platform name."
17271+
17272+ if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos', 'gnu'):
17273+ return 'elf'
17274+ elif unversioned_sys_platform == 'darwin':
17275+ return 'mac-o'
17276+ elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
17277+ return 'pe'
17278+ # TODO we assume all other operating systems are elf, which is not true.
17279+ # we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
17280+ return 'elf'
17281+
17282+def unversioned_sys_platform():
17283+ """returns an unversioned name from sys.platform.
17284+ sys.plaform is not very well defined and depends directly on the python source tree.
17285+ The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
17286+ i.e., it's possible to get freebsd7 on a freebsd8 system.
17287+ So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
17288+ Some possible values of sys.platform are, amongst others:
17289+ aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
17290+ generic gnu0 irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
17291+ Investigating the python source tree may reveal more values.
17292+ """
17293+ s = sys.platform
17294+ if s == 'java':
17295+ # The real OS is hidden under the JVM.
17296+ from java.lang import System
17297+ s = System.getProperty('os.name')
17298+ # see http://lopica.sourceforge.net/os.html for a list of possible values
17299+ if s == 'Mac OS X':
17300+ return 'darwin'
17301+ elif s.startswith('Windows '):
17302+ return 'win32'
17303+ elif s == 'OS/2':
17304+ return 'os2'
17305+ elif s == 'HP-UX':
17306+ return 'hpux'
17307+ elif s in ('SunOS', 'Solaris'):
17308+ return 'sunos'
17309+ else: s = s.lower()
17310+ if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
17311+ return re.split('\d+$', s)[0]
17312+
17313+#@deprecated('use unversioned_sys_platform instead')
17314+def detect_platform():
17315+ """this function has been in the Utils module for some time.
17316+ It's hard to guess what people have used it for.
17317+ It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
17318+ For example, the version is not removed on freebsd and netbsd, amongst others.
17319+ """
17320+ s = sys.platform
17321+
17322+ # known POSIX
17323+ for x in 'cygwin linux irix sunos hpux aix darwin gnu'.split():
17324+ # sys.platform may be linux2
17325+ if s.find(x) >= 0:
17326+ return x
17327+
17328+ # unknown POSIX
17329+ if os.name in 'posix java os2'.split():
17330+ return os.name
17331+
17332+ return s
17333+
17334+def load_tool(tool, tooldir=None):
17335+ '''
17336+ load_tool: import a Python module, optionally using several directories.
17337+ @param tool [string]: name of tool to import.
17338+ @param tooldir [list]: directories to look for the tool.
17339+ @return: the loaded module.
17340+
17341+ Warning: this function is not thread-safe: plays with sys.path,
17342+ so must run in sequence.
17343+ '''
17344+ if tooldir:
17345+ assert isinstance(tooldir, list)
17346+ sys.path = tooldir + sys.path
17347+ else:
17348+ tooldir = []
17349+ try:
17350+ return __import__(tool)
17351+ finally:
17352+ for dt in tooldir:
17353+ sys.path.remove(dt)
17354+
17355+def readf(fname, m='r'):
17356+ "get the contents of a file, it is not used anywhere for the moment"
17357+ f = open(fname, m)
17358+ try:
17359+ txt = f.read()
17360+ finally:
17361+ f.close()
17362+ return txt
17363+
17364+def nada(*k, **kw):
17365+ """A function that does nothing"""
17366+ pass
17367+
17368+def diff_path(top, subdir):
17369+ """difference between two absolute paths"""
17370+ top = os.path.normpath(top).replace('\\', '/').split('/')
17371+ subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
17372+ if len(top) == len(subdir): return ''
17373+ diff = subdir[len(top) - len(subdir):]
17374+ return os.path.join(*diff)
17375+
17376+class Context(object):
17377+ """A base class for commands to be executed from Waf scripts"""
17378+
17379+ def set_curdir(self, dir):
17380+ self.curdir_ = dir
17381+
17382+ def get_curdir(self):
17383+ try:
17384+ return self.curdir_
17385+ except AttributeError:
17386+ self.curdir_ = os.getcwd()
17387+ return self.get_curdir()
17388+
17389+ curdir = property(get_curdir, set_curdir)
17390+
17391+ def recurse(self, dirs, name=''):
17392+ """The function for calling scripts from folders, it tries to call wscript + function_name
17393+ and if that file does not exist, it will call the method 'function_name' from a file named wscript
17394+ the dirs can be a list of folders or a string containing space-separated folder paths
17395+ """
17396+ if not name:
17397+ name = inspect.stack()[1][3]
17398+
17399+ if isinstance(dirs, str):
17400+ dirs = to_list(dirs)
17401+
17402+ for x in dirs:
17403+ if os.path.isabs(x):
17404+ nexdir = x
17405+ else:
17406+ nexdir = os.path.join(self.curdir, x)
17407+
17408+ base = os.path.join(nexdir, WSCRIPT_FILE)
17409+ file_path = base + '_' + name
17410+
17411+ try:
17412+ txt = readf(file_path, m='rU')
17413+ except (OSError, IOError):
17414+ try:
17415+ module = load_module(base)
17416+ except OSError:
17417+ raise WscriptError('No such script %s' % base)
17418+
17419+ try:
17420+ f = module.__dict__[name]
17421+ except KeyError:
17422+ raise WscriptError('No function %s defined in %s' % (name, base))
17423+
17424+ if getattr(self.__class__, 'pre_recurse', None):
17425+ self.pre_recurse(f, base, nexdir)
17426+ old = self.curdir
17427+ self.curdir = nexdir
17428+ try:
17429+ f(self)
17430+ finally:
17431+ self.curdir = old
17432+ if getattr(self.__class__, 'post_recurse', None):
17433+ self.post_recurse(module, base, nexdir)
17434+ else:
17435+ dc = {'ctx': self}
17436+ if getattr(self.__class__, 'pre_recurse', None):
17437+ dc = self.pre_recurse(txt, file_path, nexdir)
17438+ old = self.curdir
17439+ self.curdir = nexdir
17440+ try:
17441+ try:
17442+ exec(compile(txt, file_path, 'exec'), dc)
17443+ except Exception:
17444+ exc_type, exc_value, tb = sys.exc_info()
17445+ raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
17446+ finally:
17447+ self.curdir = old
17448+ if getattr(self.__class__, 'post_recurse', None):
17449+ self.post_recurse(txt, file_path, nexdir)
17450+
17451+if is_win32:
17452+ old = shutil.copy2
17453+ def copy2(src, dst):
17454+ old(src, dst)
17455+ shutil.copystat(src, src)
17456+ setattr(shutil, 'copy2', copy2)
17457+
17458+def zip_folder(dir, zip_file_name, prefix):
17459+ """
17460+ prefix represents the app to add in the archive
17461+ """
17462+ import zipfile
17463+ zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
17464+ base = os.path.abspath(dir)
17465+
17466+ if prefix:
17467+ if prefix[-1] != os.sep:
17468+ prefix += os.sep
17469+
17470+ n = len(base)
17471+ for root, dirs, files in os.walk(base):
17472+ for f in files:
17473+ archive_name = prefix + root[n:] + os.sep + f
17474+ zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
17475+ zip.close()
17476+
17477+def get_elapsed_time(start):
17478+ "Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
17479+ delta = datetime.datetime.now() - start
17480+ # cast to int necessary for python 3.0
17481+ days = int(delta.days)
17482+ hours = int(delta.seconds / 3600)
17483+ minutes = int((delta.seconds - hours * 3600) / 60)
17484+ seconds = delta.seconds - hours * 3600 - minutes * 60 \
17485+ + float(delta.microseconds) / 1000 / 1000
17486+ result = ''
17487+ if days:
17488+ result += '%dd' % days
17489+ if days or hours:
17490+ result += '%dh' % hours
17491+ if days or hours or minutes:
17492+ result += '%dm' % minutes
17493+ return '%s%.3fs' % (result, seconds)
17494+
17495+if os.name == 'java':
17496+ # For Jython (they should really fix the inconsistency)
17497+ try:
17498+ gc.disable()
17499+ gc.enable()
17500+ except NotImplementedError:
17501+ gc.disable = gc.enable
17502+
17503+def run_once(fun):
17504+ """
17505+ decorator, make a function cache its results, use like this:
17506+
17507+ @run_once
17508+ def foo(k):
17509+ return 345*2343
17510+ """
17511+ cache = {}
17512+ def wrap(k):
17513+ try:
17514+ return cache[k]
17515+ except KeyError:
17516+ ret = fun(k)
17517+ cache[k] = ret
17518+ return ret
17519+ wrap.__cache__ = cache
17520+ return wrap
17521+
17522diff --git a/buildtools/wafadmin/__init__.py b/buildtools/wafadmin/__init__.py
17523new file mode 100644
17524index 0000000..01273cf
17525--- /dev/null
17526+++ b/buildtools/wafadmin/__init__.py
17527@@ -0,0 +1,3 @@
17528+#!/usr/bin/env python
17529+# encoding: utf-8
17530+# Thomas Nagy, 2005 (ita)
17531diff --git a/buildtools/wafadmin/ansiterm.py b/buildtools/wafadmin/ansiterm.py
17532new file mode 100644
17533index 0000000..720b79c
17534--- /dev/null
17535+++ b/buildtools/wafadmin/ansiterm.py
17536@@ -0,0 +1,236 @@
17537+import sys, os
17538+try:
17539+ if (not sys.stderr.isatty()) or (not sys.stdout.isatty()):
17540+ raise ValueError('not a tty')
17541+
17542+ from ctypes import *
17543+
17544+ class COORD(Structure):
17545+ _fields_ = [("X", c_short), ("Y", c_short)]
17546+
17547+ class SMALL_RECT(Structure):
17548+ _fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
17549+
17550+ class CONSOLE_SCREEN_BUFFER_INFO(Structure):
17551+ _fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
17552+
17553+ class CONSOLE_CURSOR_INFO(Structure):
17554+ _fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
17555+
17556+ sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17557+ csinfo = CONSOLE_CURSOR_INFO()
17558+ hconsole = windll.kernel32.GetStdHandle(-11)
17559+ windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
17560+ if sbinfo.Size.X < 10 or sbinfo.Size.Y < 10: raise Exception('small console')
17561+ windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
17562+except Exception:
17563+ pass
17564+else:
17565+ import re, threading
17566+
17567+ to_int = lambda number, default: number and int(number) or default
17568+ wlock = threading.Lock()
17569+
17570+ STD_OUTPUT_HANDLE = -11
17571+ STD_ERROR_HANDLE = -12
17572+
17573+ class AnsiTerm(object):
17574+ def __init__(self):
17575+ self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
17576+ self.cursor_history = []
17577+ self.orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17578+ self.orig_csinfo = CONSOLE_CURSOR_INFO()
17579+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self.orig_sbinfo))
17580+ windll.kernel32.GetConsoleCursorInfo(hconsole, byref(self.orig_csinfo))
17581+
17582+
17583+ def screen_buffer_info(self):
17584+ sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
17585+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
17586+ return sbinfo
17587+
17588+ def clear_line(self, param):
17589+ mode = param and int(param) or 0
17590+ sbinfo = self.screen_buffer_info()
17591+ if mode == 1: # Clear from begining of line to cursor position
17592+ line_start = COORD(0, sbinfo.CursorPosition.Y)
17593+ line_length = sbinfo.Size.X
17594+ elif mode == 2: # Clear entire line
17595+ line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
17596+ line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
17597+ else: # Clear from cursor position to end of line
17598+ line_start = sbinfo.CursorPosition
17599+ line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
17600+ chars_written = c_int()
17601+ windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), line_length, line_start, byref(chars_written))
17602+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
17603+
17604+ def clear_screen(self, param):
17605+ mode = to_int(param, 0)
17606+ sbinfo = self.screen_buffer_info()
17607+ if mode == 1: # Clear from begining of screen to cursor position
17608+ clear_start = COORD(0, 0)
17609+ clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
17610+ elif mode == 2: # Clear entire screen and return cursor to home
17611+ clear_start = COORD(0, 0)
17612+ clear_length = sbinfo.Size.X * sbinfo.Size.Y
17613+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
17614+ else: # Clear from cursor position to end of screen
17615+ clear_start = sbinfo.CursorPosition
17616+ clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
17617+ chars_written = c_int()
17618+ windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), clear_length, clear_start, byref(chars_written))
17619+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
17620+
17621+ def push_cursor(self, param):
17622+ sbinfo = self.screen_buffer_info()
17623+ self.cursor_history.push(sbinfo.CursorPosition)
17624+
17625+ def pop_cursor(self, param):
17626+ if self.cursor_history:
17627+ old_pos = self.cursor_history.pop()
17628+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
17629+
17630+ def set_cursor(self, param):
17631+ x, sep, y = param.partition(';')
17632+ x = to_int(x, 1) - 1
17633+ y = to_int(y, 1) - 1
17634+ sbinfo = self.screen_buffer_info()
17635+ new_pos = COORD(
17636+ min(max(0, x), sbinfo.Size.X),
17637+ min(max(0, y), sbinfo.Size.Y)
17638+ )
17639+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17640+
17641+ def set_column(self, param):
17642+ x = to_int(param, 1) - 1
17643+ sbinfo = self.screen_buffer_info()
17644+ new_pos = COORD(
17645+ min(max(0, x), sbinfo.Size.X),
17646+ sbinfo.CursorPosition.Y
17647+ )
17648+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17649+
17650+ def move_cursor(self, x_offset=0, y_offset=0):
17651+ sbinfo = self.screen_buffer_info()
17652+ new_pos = COORD(
17653+ min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
17654+ min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
17655+ )
17656+ windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
17657+
17658+ def move_up(self, param):
17659+ self.move_cursor(y_offset = -to_int(param, 1))
17660+
17661+ def move_down(self, param):
17662+ self.move_cursor(y_offset = to_int(param, 1))
17663+
17664+ def move_left(self, param):
17665+ self.move_cursor(x_offset = -to_int(param, 1))
17666+
17667+ def move_right(self, param):
17668+ self.move_cursor(x_offset = to_int(param, 1))
17669+
17670+ def next_line(self, param):
17671+ sbinfo = self.screen_buffer_info()
17672+ self.move_cursor(
17673+ x_offset = -sbinfo.CursorPosition.X,
17674+ y_offset = to_int(param, 1)
17675+ )
17676+
17677+ def prev_line(self, param):
17678+ sbinfo = self.screen_buffer_info()
17679+ self.move_cursor(
17680+ x_offset = -sbinfo.CursorPosition.X,
17681+ y_offset = -to_int(param, 1)
17682+ )
17683+
17684+ escape_to_color = { (0, 30): 0x0, #black
17685+ (0, 31): 0x4, #red
17686+ (0, 32): 0x2, #green
17687+ (0, 33): 0x4+0x2, #dark yellow
17688+ (0, 34): 0x1, #blue
17689+ (0, 35): 0x1+0x4, #purple
17690+ (0, 36): 0x2+0x4, #cyan
17691+ (0, 37): 0x1+0x2+0x4, #grey
17692+ (1, 30): 0x1+0x2+0x4, #dark gray
17693+ (1, 31): 0x4+0x8, #red
17694+ (1, 32): 0x2+0x8, #light green
17695+ (1, 33): 0x4+0x2+0x8, #yellow
17696+ (1, 34): 0x1+0x8, #light blue
17697+ (1, 35): 0x1+0x4+0x8, #light purple
17698+ (1, 36): 0x1+0x2+0x8, #light cyan
17699+ (1, 37): 0x1+0x2+0x4+0x8, #white
17700+ }
17701+
17702+ def set_color(self, param):
17703+ cols = param.split(';')
17704+ attr = self.orig_sbinfo.Attributes
17705+ for c in cols:
17706+ c = to_int(c, 0)
17707+ if c in range(30,38):
17708+ attr = (attr & 0xf0) | (self.escape_to_color.get((0,c), 0x7))
17709+ elif c in range(40,48):
17710+ attr = (attr & 0x0f) | (self.escape_to_color.get((0,c), 0x7) << 8)
17711+ elif c in range(90,98):
17712+ attr = (attr & 0xf0) | (self.escape_to_color.get((1,c-60), 0x7))
17713+ elif c in range(100,108):
17714+ attr = (attr & 0x0f) | (self.escape_to_color.get((1,c-60), 0x7) << 8)
17715+ elif c == 1:
17716+ attr |= 0x08
17717+ windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
17718+
17719+ def show_cursor(self,param):
17720+ csinfo.bVisible = 1
17721+ windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
17722+
17723+ def hide_cursor(self,param):
17724+ csinfo.bVisible = 0
17725+ windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
17726+
17727+ ansi_command_table = {
17728+ 'A': move_up,
17729+ 'B': move_down,
17730+ 'C': move_right,
17731+ 'D': move_left,
17732+ 'E': next_line,
17733+ 'F': prev_line,
17734+ 'G': set_column,
17735+ 'H': set_cursor,
17736+ 'f': set_cursor,
17737+ 'J': clear_screen,
17738+ 'K': clear_line,
17739+ 'h': show_cursor,
17740+ 'l': hide_cursor,
17741+ 'm': set_color,
17742+ 's': push_cursor,
17743+ 'u': pop_cursor,
17744+ }
17745+ # Match either the escape sequence or text not containing escape sequence
17746+ ansi_tokans = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
17747+ def write(self, text):
17748+ try:
17749+ wlock.acquire()
17750+ for param, cmd, txt in self.ansi_tokans.findall(text):
17751+ if cmd:
17752+ cmd_func = self.ansi_command_table.get(cmd)
17753+ if cmd_func:
17754+ cmd_func(self, param)
17755+ else:
17756+ chars_written = c_int()
17757+ if isinstance(txt, unicode):
17758+ windll.kernel32.WriteConsoleW(self.hconsole, txt, len(txt), byref(chars_written), None)
17759+ else:
17760+ windll.kernel32.WriteConsoleA(self.hconsole, txt, len(txt), byref(chars_written), None)
17761+ finally:
17762+ wlock.release()
17763+
17764+ def flush(self):
17765+ pass
17766+
17767+ def isatty(self):
17768+ return True
17769+
17770+ sys.stderr = sys.stdout = AnsiTerm()
17771+ os.environ['TERM'] = 'vt100'
17772+
17773diff --git a/buildtools/wafadmin/pproc.py b/buildtools/wafadmin/pproc.py
17774new file mode 100644
17775index 0000000..cb15178
17776--- /dev/null
17777+++ b/buildtools/wafadmin/pproc.py
17778@@ -0,0 +1,620 @@
17779+# borrowed from python 2.5.2c1
17780+# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
17781+# Licensed to PSF under a Contributor Agreement.
17782+
17783+import sys
17784+mswindows = (sys.platform == "win32")
17785+
17786+import os
17787+import types
17788+import traceback
17789+import gc
17790+
17791+class CalledProcessError(Exception):
17792+ def __init__(self, returncode, cmd):
17793+ self.returncode = returncode
17794+ self.cmd = cmd
17795+ def __str__(self):
17796+ return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
17797+
17798+if mswindows:
17799+ import threading
17800+ import msvcrt
17801+ if 0:
17802+ import pywintypes
17803+ from win32api import GetStdHandle, STD_INPUT_HANDLE, \
17804+ STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
17805+ from win32api import GetCurrentProcess, DuplicateHandle, \
17806+ GetModuleFileName, GetVersion
17807+ from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
17808+ from win32pipe import CreatePipe
17809+ from win32process import CreateProcess, STARTUPINFO, \
17810+ GetExitCodeProcess, STARTF_USESTDHANDLES, \
17811+ STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
17812+ from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
17813+ else:
17814+ from _subprocess import *
17815+ class STARTUPINFO:
17816+ dwFlags = 0
17817+ hStdInput = None
17818+ hStdOutput = None
17819+ hStdError = None
17820+ wShowWindow = 0
17821+ class pywintypes:
17822+ error = IOError
17823+else:
17824+ import select
17825+ import errno
17826+ import fcntl
17827+ import pickle
17828+
17829+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
17830+
17831+try:
17832+ MAXFD = os.sysconf("SC_OPEN_MAX")
17833+except:
17834+ MAXFD = 256
17835+
17836+try:
17837+ False
17838+except NameError:
17839+ False = 0
17840+ True = 1
17841+
17842+_active = []
17843+
17844+def _cleanup():
17845+ for inst in _active[:]:
17846+ if inst.poll(_deadstate=sys.maxint) >= 0:
17847+ try:
17848+ _active.remove(inst)
17849+ except ValueError:
17850+ pass
17851+
17852+PIPE = -1
17853+STDOUT = -2
17854+
17855+
17856+def call(*popenargs, **kwargs):
17857+ return Popen(*popenargs, **kwargs).wait()
17858+
17859+def check_call(*popenargs, **kwargs):
17860+ retcode = call(*popenargs, **kwargs)
17861+ cmd = kwargs.get("args")
17862+ if cmd is None:
17863+ cmd = popenargs[0]
17864+ if retcode:
17865+ raise CalledProcessError(retcode, cmd)
17866+ return retcode
17867+
17868+
17869+def list2cmdline(seq):
17870+ result = []
17871+ needquote = False
17872+ for arg in seq:
17873+ bs_buf = []
17874+
17875+ if result:
17876+ result.append(' ')
17877+
17878+ needquote = (" " in arg) or ("\t" in arg) or arg == ""
17879+ if needquote:
17880+ result.append('"')
17881+
17882+ for c in arg:
17883+ if c == '\\':
17884+ bs_buf.append(c)
17885+ elif c == '"':
17886+ result.append('\\' * len(bs_buf)*2)
17887+ bs_buf = []
17888+ result.append('\\"')
17889+ else:
17890+ if bs_buf:
17891+ result.extend(bs_buf)
17892+ bs_buf = []
17893+ result.append(c)
17894+
17895+ if bs_buf:
17896+ result.extend(bs_buf)
17897+
17898+ if needquote:
17899+ result.extend(bs_buf)
17900+ result.append('"')
17901+
17902+ return ''.join(result)
17903+
17904+class Popen(object):
17905+ def __init__(self, args, bufsize=0, executable=None,
17906+ stdin=None, stdout=None, stderr=None,
17907+ preexec_fn=None, close_fds=False, shell=False,
17908+ cwd=None, env=None, universal_newlines=False,
17909+ startupinfo=None, creationflags=0):
17910+ _cleanup()
17911+
17912+ self._child_created = False
17913+ if not isinstance(bufsize, (int, long)):
17914+ raise TypeError("bufsize must be an integer")
17915+
17916+ if mswindows:
17917+ if preexec_fn is not None:
17918+ raise ValueError("preexec_fn is not supported on Windows platforms")
17919+ if close_fds:
17920+ raise ValueError("close_fds is not supported on Windows platforms")
17921+ else:
17922+ if startupinfo is not None:
17923+ raise ValueError("startupinfo is only supported on Windows platforms")
17924+ if creationflags != 0:
17925+ raise ValueError("creationflags is only supported on Windows platforms")
17926+
17927+ self.stdin = None
17928+ self.stdout = None
17929+ self.stderr = None
17930+ self.pid = None
17931+ self.returncode = None
17932+ self.universal_newlines = universal_newlines
17933+
17934+ (p2cread, p2cwrite,
17935+ c2pread, c2pwrite,
17936+ errread, errwrite) = self._get_handles(stdin, stdout, stderr)
17937+
17938+ self._execute_child(args, executable, preexec_fn, close_fds,
17939+ cwd, env, universal_newlines,
17940+ startupinfo, creationflags, shell,
17941+ p2cread, p2cwrite,
17942+ c2pread, c2pwrite,
17943+ errread, errwrite)
17944+
17945+ if mswindows:
17946+ if stdin is None and p2cwrite is not None:
17947+ os.close(p2cwrite)
17948+ p2cwrite = None
17949+ if stdout is None and c2pread is not None:
17950+ os.close(c2pread)
17951+ c2pread = None
17952+ if stderr is None and errread is not None:
17953+ os.close(errread)
17954+ errread = None
17955+
17956+ if p2cwrite:
17957+ self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
17958+ if c2pread:
17959+ if universal_newlines:
17960+ self.stdout = os.fdopen(c2pread, 'rU', bufsize)
17961+ else:
17962+ self.stdout = os.fdopen(c2pread, 'rb', bufsize)
17963+ if errread:
17964+ if universal_newlines:
17965+ self.stderr = os.fdopen(errread, 'rU', bufsize)
17966+ else:
17967+ self.stderr = os.fdopen(errread, 'rb', bufsize)
17968+
17969+
17970+ def _translate_newlines(self, data):
17971+ data = data.replace("\r\n", "\n")
17972+ data = data.replace("\r", "\n")
17973+ return data
17974+
17975+
17976+ def __del__(self, sys=sys):
17977+ if not self._child_created:
17978+ return
17979+ self.poll(_deadstate=sys.maxint)
17980+ if self.returncode is None and _active is not None:
17981+ _active.append(self)
17982+
17983+
17984+ def communicate(self, input=None):
17985+ if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
17986+ stdout = None
17987+ stderr = None
17988+ if self.stdin:
17989+ if input:
17990+ self.stdin.write(input)
17991+ self.stdin.close()
17992+ elif self.stdout:
17993+ stdout = self.stdout.read()
17994+ elif self.stderr:
17995+ stderr = self.stderr.read()
17996+ self.wait()
17997+ return (stdout, stderr)
17998+
17999+ return self._communicate(input)
18000+
18001+
18002+ if mswindows:
18003+ def _get_handles(self, stdin, stdout, stderr):
18004+ if stdin is None and stdout is None and stderr is None:
18005+ return (None, None, None, None, None, None)
18006+
18007+ p2cread, p2cwrite = None, None
18008+ c2pread, c2pwrite = None, None
18009+ errread, errwrite = None, None
18010+
18011+ if stdin is None:
18012+ p2cread = GetStdHandle(STD_INPUT_HANDLE)
18013+ if p2cread is not None:
18014+ pass
18015+ elif stdin is None or stdin == PIPE:
18016+ p2cread, p2cwrite = CreatePipe(None, 0)
18017+ p2cwrite = p2cwrite.Detach()
18018+ p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
18019+ elif isinstance(stdin, int):
18020+ p2cread = msvcrt.get_osfhandle(stdin)
18021+ else:
18022+ p2cread = msvcrt.get_osfhandle(stdin.fileno())
18023+ p2cread = self._make_inheritable(p2cread)
18024+
18025+ if stdout is None:
18026+ c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
18027+ if c2pwrite is not None:
18028+ pass
18029+ elif stdout is None or stdout == PIPE:
18030+ c2pread, c2pwrite = CreatePipe(None, 0)
18031+ c2pread = c2pread.Detach()
18032+ c2pread = msvcrt.open_osfhandle(c2pread, 0)
18033+ elif isinstance(stdout, int):
18034+ c2pwrite = msvcrt.get_osfhandle(stdout)
18035+ else:
18036+ c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
18037+ c2pwrite = self._make_inheritable(c2pwrite)
18038+
18039+ if stderr is None:
18040+ errwrite = GetStdHandle(STD_ERROR_HANDLE)
18041+ if errwrite is not None:
18042+ pass
18043+ elif stderr is None or stderr == PIPE:
18044+ errread, errwrite = CreatePipe(None, 0)
18045+ errread = errread.Detach()
18046+ errread = msvcrt.open_osfhandle(errread, 0)
18047+ elif stderr == STDOUT:
18048+ errwrite = c2pwrite
18049+ elif isinstance(stderr, int):
18050+ errwrite = msvcrt.get_osfhandle(stderr)
18051+ else:
18052+ errwrite = msvcrt.get_osfhandle(stderr.fileno())
18053+ errwrite = self._make_inheritable(errwrite)
18054+
18055+ return (p2cread, p2cwrite,
18056+ c2pread, c2pwrite,
18057+ errread, errwrite)
18058+ def _make_inheritable(self, handle):
18059+ return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
18060+
18061+ def _find_w9xpopen(self):
18062+ w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
18063+ if not os.path.exists(w9xpopen):
18064+ w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
18065+ if not os.path.exists(w9xpopen):
18066+ raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
18067+ return w9xpopen
18068+
18069+ def _execute_child(self, args, executable, preexec_fn, close_fds,
18070+ cwd, env, universal_newlines,
18071+ startupinfo, creationflags, shell,
18072+ p2cread, p2cwrite,
18073+ c2pread, c2pwrite,
18074+ errread, errwrite):
18075+
18076+ if not isinstance(args, types.StringTypes):
18077+ args = list2cmdline(args)
18078+
18079+ if startupinfo is None:
18080+ startupinfo = STARTUPINFO()
18081+ if None not in (p2cread, c2pwrite, errwrite):
18082+ startupinfo.dwFlags |= STARTF_USESTDHANDLES
18083+ startupinfo.hStdInput = p2cread
18084+ startupinfo.hStdOutput = c2pwrite
18085+ startupinfo.hStdError = errwrite
18086+
18087+ if shell:
18088+ startupinfo.dwFlags |= STARTF_USESHOWWINDOW
18089+ startupinfo.wShowWindow = SW_HIDE
18090+ comspec = os.environ.get("COMSPEC", "cmd.exe")
18091+ args = comspec + " /c " + args
18092+ if (GetVersion() >= 0x80000000L or
18093+ os.path.basename(comspec).lower() == "command.com"):
18094+ w9xpopen = self._find_w9xpopen()
18095+ args = '"%s" %s' % (w9xpopen, args)
18096+ creationflags |= CREATE_NEW_CONSOLE
18097+
18098+ try:
18099+ hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
18100+ except pywintypes.error, e:
18101+ raise WindowsError(*e.args)
18102+
18103+ self._child_created = True
18104+ self._handle = hp
18105+ self.pid = pid
18106+ ht.Close()
18107+
18108+ if p2cread is not None:
18109+ p2cread.Close()
18110+ if c2pwrite is not None:
18111+ c2pwrite.Close()
18112+ if errwrite is not None:
18113+ errwrite.Close()
18114+
18115+
18116+ def poll(self, _deadstate=None):
18117+ if self.returncode is None:
18118+ if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
18119+ self.returncode = GetExitCodeProcess(self._handle)
18120+ return self.returncode
18121+
18122+
18123+ def wait(self):
18124+ if self.returncode is None:
18125+ obj = WaitForSingleObject(self._handle, INFINITE)
18126+ self.returncode = GetExitCodeProcess(self._handle)
18127+ return self.returncode
18128+
18129+ def _readerthread(self, fh, buffer):
18130+ buffer.append(fh.read())
18131+
18132+ def _communicate(self, input):
18133+ stdout = None
18134+ stderr = None
18135+
18136+ if self.stdout:
18137+ stdout = []
18138+ stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
18139+ stdout_thread.setDaemon(True)
18140+ stdout_thread.start()
18141+ if self.stderr:
18142+ stderr = []
18143+ stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
18144+ stderr_thread.setDaemon(True)
18145+ stderr_thread.start()
18146+
18147+ if self.stdin:
18148+ if input is not None:
18149+ self.stdin.write(input)
18150+ self.stdin.close()
18151+
18152+ if self.stdout:
18153+ stdout_thread.join()
18154+ if self.stderr:
18155+ stderr_thread.join()
18156+
18157+ if stdout is not None:
18158+ stdout = stdout[0]
18159+ if stderr is not None:
18160+ stderr = stderr[0]
18161+
18162+ if self.universal_newlines and hasattr(file, 'newlines'):
18163+ if stdout:
18164+ stdout = self._translate_newlines(stdout)
18165+ if stderr:
18166+ stderr = self._translate_newlines(stderr)
18167+
18168+ self.wait()
18169+ return (stdout, stderr)
18170+
18171+ else:
18172+ def _get_handles(self, stdin, stdout, stderr):
18173+ p2cread, p2cwrite = None, None
18174+ c2pread, c2pwrite = None, None
18175+ errread, errwrite = None, None
18176+
18177+ if stdin is None:
18178+ pass
18179+ elif stdin == PIPE:
18180+ p2cread, p2cwrite = os.pipe()
18181+ elif isinstance(stdin, int):
18182+ p2cread = stdin
18183+ else:
18184+ p2cread = stdin.fileno()
18185+
18186+ if stdout is None:
18187+ pass
18188+ elif stdout == PIPE:
18189+ c2pread, c2pwrite = os.pipe()
18190+ elif isinstance(stdout, int):
18191+ c2pwrite = stdout
18192+ else:
18193+ c2pwrite = stdout.fileno()
18194+
18195+ if stderr is None:
18196+ pass
18197+ elif stderr == PIPE:
18198+ errread, errwrite = os.pipe()
18199+ elif stderr == STDOUT:
18200+ errwrite = c2pwrite
18201+ elif isinstance(stderr, int):
18202+ errwrite = stderr
18203+ else:
18204+ errwrite = stderr.fileno()
18205+
18206+ return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
18207+
18208+ def _set_cloexec_flag(self, fd):
18209+ try:
18210+ cloexec_flag = fcntl.FD_CLOEXEC
18211+ except AttributeError:
18212+ cloexec_flag = 1
18213+
18214+ old = fcntl.fcntl(fd, fcntl.F_GETFD)
18215+ fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
18216+
18217+ def _close_fds(self, but):
18218+ for i in xrange(3, MAXFD):
18219+ if i == but:
18220+ continue
18221+ try:
18222+ os.close(i)
18223+ except:
18224+ pass
18225+
18226+ def _execute_child(self, args, executable, preexec_fn, close_fds,
18227+ cwd, env, universal_newlines, startupinfo, creationflags, shell,
18228+ p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
18229+
18230+ if isinstance(args, types.StringTypes):
18231+ args = [args]
18232+ else:
18233+ args = list(args)
18234+
18235+ if shell:
18236+ args = ["/bin/sh", "-c"] + args
18237+
18238+ if executable is None:
18239+ executable = args[0]
18240+
18241+ errpipe_read, errpipe_write = os.pipe()
18242+ self._set_cloexec_flag(errpipe_write)
18243+
18244+ gc_was_enabled = gc.isenabled()
18245+ gc.disable()
18246+ try:
18247+ self.pid = os.fork()
18248+ except:
18249+ if gc_was_enabled:
18250+ gc.enable()
18251+ raise
18252+ self._child_created = True
18253+ if self.pid == 0:
18254+ try:
18255+ if p2cwrite:
18256+ os.close(p2cwrite)
18257+ if c2pread:
18258+ os.close(c2pread)
18259+ if errread:
18260+ os.close(errread)
18261+ os.close(errpipe_read)
18262+
18263+ if p2cread:
18264+ os.dup2(p2cread, 0)
18265+ if c2pwrite:
18266+ os.dup2(c2pwrite, 1)
18267+ if errwrite:
18268+ os.dup2(errwrite, 2)
18269+
18270+ if p2cread and p2cread not in (0,):
18271+ os.close(p2cread)
18272+ if c2pwrite and c2pwrite not in (p2cread, 1):
18273+ os.close(c2pwrite)
18274+ if errwrite and errwrite not in (p2cread, c2pwrite, 2):
18275+ os.close(errwrite)
18276+
18277+ if close_fds:
18278+ self._close_fds(but=errpipe_write)
18279+
18280+ if cwd is not None:
18281+ os.chdir(cwd)
18282+
18283+ if preexec_fn:
18284+ apply(preexec_fn)
18285+
18286+ if env is None:
18287+ os.execvp(executable, args)
18288+ else:
18289+ os.execvpe(executable, args, env)
18290+
18291+ except:
18292+ exc_type, exc_value, tb = sys.exc_info()
18293+ exc_lines = traceback.format_exception(exc_type, exc_value, tb)
18294+ exc_value.child_traceback = ''.join(exc_lines)
18295+ os.write(errpipe_write, pickle.dumps(exc_value))
18296+
18297+ os._exit(255)
18298+
18299+ if gc_was_enabled:
18300+ gc.enable()
18301+ os.close(errpipe_write)
18302+ if p2cread and p2cwrite:
18303+ os.close(p2cread)
18304+ if c2pwrite and c2pread:
18305+ os.close(c2pwrite)
18306+ if errwrite and errread:
18307+ os.close(errwrite)
18308+
18309+ data = os.read(errpipe_read, 1048576)
18310+ os.close(errpipe_read)
18311+ if data != "":
18312+ os.waitpid(self.pid, 0)
18313+ child_exception = pickle.loads(data)
18314+ raise child_exception
18315+
18316+ def _handle_exitstatus(self, sts):
18317+ if os.WIFSIGNALED(sts):
18318+ self.returncode = -os.WTERMSIG(sts)
18319+ elif os.WIFEXITED(sts):
18320+ self.returncode = os.WEXITSTATUS(sts)
18321+ else:
18322+ raise RuntimeError("Unknown child exit status!")
18323+
18324+ def poll(self, _deadstate=None):
18325+ if self.returncode is None:
18326+ try:
18327+ pid, sts = os.waitpid(self.pid, os.WNOHANG)
18328+ if pid == self.pid:
18329+ self._handle_exitstatus(sts)
18330+ except os.error:
18331+ if _deadstate is not None:
18332+ self.returncode = _deadstate
18333+ return self.returncode
18334+
18335+ def wait(self):
18336+ if self.returncode is None:
18337+ pid, sts = os.waitpid(self.pid, 0)
18338+ self._handle_exitstatus(sts)
18339+ return self.returncode
18340+
18341+ def _communicate(self, input):
18342+ read_set = []
18343+ write_set = []
18344+ stdout = None
18345+ stderr = None
18346+
18347+ if self.stdin:
18348+ self.stdin.flush()
18349+ if input:
18350+ write_set.append(self.stdin)
18351+ else:
18352+ self.stdin.close()
18353+ if self.stdout:
18354+ read_set.append(self.stdout)
18355+ stdout = []
18356+ if self.stderr:
18357+ read_set.append(self.stderr)
18358+ stderr = []
18359+
18360+ input_offset = 0
18361+ while read_set or write_set:
18362+ rlist, wlist, xlist = select.select(read_set, write_set, [])
18363+
18364+ if self.stdin in wlist:
18365+ bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
18366+ input_offset += bytes_written
18367+ if input_offset >= len(input):
18368+ self.stdin.close()
18369+ write_set.remove(self.stdin)
18370+
18371+ if self.stdout in rlist:
18372+ data = os.read(self.stdout.fileno(), 1024)
18373+ if data == "":
18374+ self.stdout.close()
18375+ read_set.remove(self.stdout)
18376+ stdout.append(data)
18377+
18378+ if self.stderr in rlist:
18379+ data = os.read(self.stderr.fileno(), 1024)
18380+ if data == "":
18381+ self.stderr.close()
18382+ read_set.remove(self.stderr)
18383+ stderr.append(data)
18384+
18385+ if stdout is not None:
18386+ stdout = ''.join(stdout)
18387+ if stderr is not None:
18388+ stderr = ''.join(stderr)
18389+
18390+ if self.universal_newlines and hasattr(file, 'newlines'):
18391+ if stdout:
18392+ stdout = self._translate_newlines(stdout)
18393+ if stderr:
18394+ stderr = self._translate_newlines(stderr)
18395+
18396+ self.wait()
18397+ return (stdout, stderr)
18398+
18399diff --git a/buildtools/wafadmin/py3kfixes.py b/buildtools/wafadmin/py3kfixes.py
18400new file mode 100644
18401index 0000000..2f3c9c2
18402--- /dev/null
18403+++ b/buildtools/wafadmin/py3kfixes.py
18404@@ -0,0 +1,130 @@
18405+#!/usr/bin/env python
18406+# encoding: utf-8
18407+# Thomas Nagy, 2009 (ita)
18408+
18409+"""
18410+Fixes for py3k go here
18411+"""
18412+
18413+import os
18414+
18415+all_modifs = {}
18416+
18417+def modif(dir, name, fun):
18418+ if name == '*':
18419+ lst = []
18420+ for y in '. Tools 3rdparty'.split():
18421+ for x in os.listdir(os.path.join(dir, y)):
18422+ if x.endswith('.py'):
18423+ lst.append(y + os.sep + x)
18424+ #lst = [y + os.sep + x for x in os.listdir(os.path.join(dir, y)) for y in '. Tools 3rdparty'.split() if x.endswith('.py')]
18425+ for x in lst:
18426+ modif(dir, x, fun)
18427+ return
18428+
18429+ filename = os.path.join(dir, name)
18430+ f = open(filename, 'r')
18431+ txt = f.read()
18432+ f.close()
18433+
18434+ txt = fun(txt)
18435+
18436+ f = open(filename, 'w')
18437+ f.write(txt)
18438+ f.close()
18439+
18440+def subst(filename):
18441+ def do_subst(fun):
18442+ global all_modifs
18443+ try:
18444+ all_modifs[filename] += fun
18445+ except KeyError:
18446+ all_modifs[filename] = [fun]
18447+ return fun
18448+ return do_subst
18449+
18450+@subst('Constants.py')
18451+def r1(code):
18452+ code = code.replace("'iluvcuteoverload'", "b'iluvcuteoverload'")
18453+ code = code.replace("ABI=7", "ABI=37")
18454+ return code
18455+
18456+@subst('Tools/ccroot.py')
18457+def r2(code):
18458+ code = code.replace("p.stdin.write('\\n')", "p.stdin.write(b'\\n')")
18459+ code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
18460+ return code
18461+
18462+@subst('Utils.py')
18463+def r3(code):
18464+ code = code.replace("m.update(str(lst))", "m.update(str(lst).encode())")
18465+ code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
18466+ return code
18467+
18468+@subst('ansiterm.py')
18469+def r33(code):
18470+ code = code.replace('unicode', 'str')
18471+ return code
18472+
18473+@subst('Task.py')
18474+def r4(code):
18475+ code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
18476+ code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
18477+ code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
18478+ code = code.replace("up(x.name)", "up(x.name.encode())")
18479+ code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
18480+ code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
18481+ code = code.replace("sig.encode('hex')", 'binascii.hexlify(sig)')
18482+ code = code.replace("os.path.join(Options.cache_global,ssig)", "os.path.join(Options.cache_global,ssig.decode())")
18483+ return code
18484+
18485+@subst('Build.py')
18486+def r5(code):
18487+ code = code.replace("cPickle.dump(data,file,-1)", "cPickle.dump(data,file)")
18488+ code = code.replace('for node in src_dir_node.childs.values():', 'for node in list(src_dir_node.childs.values()):')
18489+ return code
18490+
18491+@subst('*')
18492+def r6(code):
18493+ code = code.replace('xrange', 'range')
18494+ code = code.replace('iteritems', 'items')
18495+ code = code.replace('maxint', 'maxsize')
18496+ code = code.replace('iterkeys', 'keys')
18497+ code = code.replace('Error,e:', 'Error as e:')
18498+ code = code.replace('Exception,e:', 'Exception as e:')
18499+ return code
18500+
18501+@subst('TaskGen.py')
18502+def r7(code):
18503+ code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
18504+ return code
18505+
18506+@subst('Tools/python.py')
18507+def r8(code):
18508+ code = code.replace('proc.communicate()[0]', 'proc.communicate()[0].decode("utf-8")')
18509+ return code
18510+
18511+@subst('Tools/glib2.py')
18512+def r9(code):
18513+ code = code.replace('f.write(c)', 'f.write(c.encode("utf-8"))')
18514+ return code
18515+
18516+@subst('Tools/config_c.py')
18517+def r10(code):
18518+ code = code.replace("key=kw['success']", "key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
18519+ code = code.replace('out=str(out)','out=out.decode("utf-8")')
18520+ code = code.replace('err=str(err)','err=err.decode("utf-8")')
18521+ return code
18522+
18523+@subst('Tools/d.py')
18524+def r11(code):
18525+ code = code.replace('ret.strip()', 'ret.strip().decode("utf-8")')
18526+ return code
18527+
18528+def fixdir(dir):
18529+ global all_modifs
18530+ for k in all_modifs:
18531+ for v in all_modifs[k]:
18532+ modif(os.path.join(dir, 'wafadmin'), k, v)
18533+ #print('substitutions finished')
18534+