diff options
author | Richard Purdie <richard@openedhand.com> | 2005-08-31 10:45:47 +0000 |
---|---|---|
committer | Richard Purdie <richard@openedhand.com> | 2005-08-31 10:45:47 +0000 |
commit | 4b46c1f6e891b1ddd5968536440b888661fade3e (patch) | |
tree | e0ba2c1f56f61b868bf746da5c4feabb25b800b2 /openembedded/classes/base.bbclass | |
download | poky-4b46c1f6e891b1ddd5968536440b888661fade3e.tar.gz |
Initial population
git-svn-id: https://svn.o-hand.com/repos/poky@1 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'openembedded/classes/base.bbclass')
-rw-r--r-- | openembedded/classes/base.bbclass | 801 |
1 files changed, 801 insertions, 0 deletions
diff --git a/openembedded/classes/base.bbclass b/openembedded/classes/base.bbclass new file mode 100644 index 0000000000..37254b94ba --- /dev/null +++ b/openembedded/classes/base.bbclass | |||
@@ -0,0 +1,801 @@ | |||
1 | PATCHES_DIR="${S}" | ||
2 | |||
3 | def base_dep_prepend(d): | ||
4 | import bb; | ||
5 | # | ||
6 | # Ideally this will check a flag so we will operate properly in | ||
7 | # the case where host == build == target, for now we don't work in | ||
8 | # that case though. | ||
9 | # | ||
10 | deps = "" | ||
11 | |||
12 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | ||
13 | # we need that built is the responsibility of the patch function / class, not | ||
14 | # the application. | ||
15 | patchdeps = bb.data.getVar("PATCH_DEPENDS", d, 1) | ||
16 | if patchdeps and not patchdeps in bb.data.getVar("PROVIDES", d, 1): | ||
17 | deps = patchdeps | ||
18 | |||
19 | if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d): | ||
20 | if (bb.data.getVar('HOST_SYS', d, 1) != | ||
21 | bb.data.getVar('BUILD_SYS', d, 1)): | ||
22 | deps += " virtual/${TARGET_PREFIX}gcc virtual/libc " | ||
23 | return deps | ||
24 | |||
25 | def base_read_file(filename): | ||
26 | import bb | ||
27 | try: | ||
28 | f = file( filename, "r" ) | ||
29 | except IOError, reason: | ||
30 | raise bb.build.FuncFailed("can't read from file '%s' (%s)", (filename,reason)) | ||
31 | else: | ||
32 | return f.read().strip() | ||
33 | return None | ||
34 | |||
35 | def base_conditional(variable, checkvalue, truevalue, falsevalue, d): | ||
36 | import bb | ||
37 | if bb.data.getVar(variable,d,1) == checkvalue: | ||
38 | return truevalue | ||
39 | else: | ||
40 | return falsevalue | ||
41 | |||
42 | DEPENDS_prepend="${@base_dep_prepend(d)} " | ||
43 | |||
44 | def base_set_filespath(path, d): | ||
45 | import os, bb | ||
46 | filespath = [] | ||
47 | for p in path: | ||
48 | overrides = bb.data.getVar("OVERRIDES", d, 1) or "" | ||
49 | overrides = overrides + ":" | ||
50 | for o in overrides.split(":"): | ||
51 | filespath.append(os.path.join(p, o)) | ||
52 | bb.data.setVar("FILESPATH", ":".join(filespath), d) | ||
53 | |||
54 | FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" | ||
55 | |||
56 | def oe_filter(f, str, d): | ||
57 | from re import match | ||
58 | return " ".join(filter(lambda x: match(f, x, 0), str.split())) | ||
59 | |||
60 | def oe_filter_out(f, str, d): | ||
61 | from re import match | ||
62 | return " ".join(filter(lambda x: not match(f, x, 0), str.split())) | ||
63 | |||
64 | die() { | ||
65 | oefatal "$*" | ||
66 | } | ||
67 | |||
68 | oenote() { | ||
69 | echo "NOTE:" "$*" | ||
70 | } | ||
71 | |||
72 | oewarn() { | ||
73 | echo "WARNING:" "$*" | ||
74 | } | ||
75 | |||
76 | oefatal() { | ||
77 | echo "FATAL:" "$*" | ||
78 | exit 1 | ||
79 | } | ||
80 | |||
81 | oedebug() { | ||
82 | test $# -ge 2 || { | ||
83 | echo "Usage: oedebug level \"message\"" | ||
84 | exit 1 | ||
85 | } | ||
86 | |||
87 | test ${OEDEBUG:-0} -ge $1 && { | ||
88 | shift | ||
89 | echo "DEBUG:" $* | ||
90 | } | ||
91 | } | ||
92 | |||
93 | oe_runmake() { | ||
94 | if [ x"$MAKE" = x ]; then MAKE=make; fi | ||
95 | oenote ${MAKE} ${EXTRA_OEMAKE} "$@" | ||
96 | ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed" | ||
97 | } | ||
98 | |||
99 | oe_soinstall() { | ||
100 | # Purpose: Install shared library file and | ||
101 | # create the necessary links | ||
102 | # Example: | ||
103 | # | ||
104 | # oe_ | ||
105 | # | ||
106 | #oenote installing shared library $1 to $2 | ||
107 | # | ||
108 | libname=`basename $1` | ||
109 | install -m 755 $1 $2/$libname | ||
110 | sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'` | ||
111 | solink=`echo $libname | sed -e 's/\.so\..*/.so/'` | ||
112 | ln -sf $libname $2/$sonamelink | ||
113 | ln -sf $libname $2/$solink | ||
114 | } | ||
115 | |||
116 | oe_libinstall() { | ||
117 | # Purpose: Install a library, in all its forms | ||
118 | # Example | ||
119 | # | ||
120 | # oe_libinstall libltdl ${STAGING_LIBDIR}/ | ||
121 | # oe_libinstall -C src/libblah libblah ${D}/${libdir}/ | ||
122 | dir="" | ||
123 | libtool="" | ||
124 | silent="" | ||
125 | require_static="" | ||
126 | require_shared="" | ||
127 | while [ "$#" -gt 0 ]; do | ||
128 | case "$1" in | ||
129 | -C) | ||
130 | shift | ||
131 | dir="$1" | ||
132 | ;; | ||
133 | -s) | ||
134 | silent=1 | ||
135 | ;; | ||
136 | -a) | ||
137 | require_static=1 | ||
138 | ;; | ||
139 | -so) | ||
140 | require_shared=1 | ||
141 | ;; | ||
142 | -*) | ||
143 | oefatal "oe_libinstall: unknown option: $1" | ||
144 | ;; | ||
145 | *) | ||
146 | break; | ||
147 | ;; | ||
148 | esac | ||
149 | shift | ||
150 | done | ||
151 | |||
152 | libname="$1" | ||
153 | shift | ||
154 | destpath="$1" | ||
155 | if [ -z "$destpath" ]; then | ||
156 | oefatal "oe_libinstall: no destination path specified" | ||
157 | fi | ||
158 | |||
159 | __runcmd () { | ||
160 | if [ -z "$silent" ]; then | ||
161 | echo >&2 "oe_libinstall: $*" | ||
162 | fi | ||
163 | $* | ||
164 | } | ||
165 | |||
166 | if [ -z "$dir" ]; then | ||
167 | dir=`pwd` | ||
168 | fi | ||
169 | if [ -d "$dir/.libs" ]; then | ||
170 | dir=$dir/.libs | ||
171 | fi | ||
172 | olddir=`pwd` | ||
173 | __runcmd cd $dir | ||
174 | |||
175 | lafile=$libname.la | ||
176 | if [ -f "$lafile" ]; then | ||
177 | # libtool archive | ||
178 | eval `cat $lafile|grep "^library_names="` | ||
179 | libtool=1 | ||
180 | else | ||
181 | library_names="$libname.so* $libname.dll.a" | ||
182 | fi | ||
183 | |||
184 | __runcmd install -d $destpath/ | ||
185 | dota=$libname.a | ||
186 | if [ -f "$dota" -o -n "$require_static" ]; then | ||
187 | __runcmd install -m 0644 $dota $destpath/ | ||
188 | fi | ||
189 | dotlai=$libname.lai | ||
190 | if [ -f "$dotlai" -a -n "$libtool" ]; then | ||
191 | __runcmd install -m 0644 $dotlai $destpath/$libname.la | ||
192 | fi | ||
193 | |||
194 | for name in $library_names; do | ||
195 | files=`eval echo $name` | ||
196 | for f in $files; do | ||
197 | if [ ! -e "$f" ]; then | ||
198 | if [ -n "$libtool" ]; then | ||
199 | oefatal "oe_libinstall: $dir/$f not found." | ||
200 | fi | ||
201 | elif [ -L "$f" ]; then | ||
202 | __runcmd cp -P "$f" $destpath/ | ||
203 | elif [ ! -L "$f" ]; then | ||
204 | libfile="$f" | ||
205 | __runcmd install -m 0755 $libfile $destpath/ | ||
206 | fi | ||
207 | done | ||
208 | done | ||
209 | |||
210 | if [ -z "$libfile" ]; then | ||
211 | if [ -n "$require_shared" ]; then | ||
212 | oefatal "oe_libinstall: unable to locate shared library" | ||
213 | fi | ||
214 | elif [ -z "$libtool" ]; then | ||
215 | # special case hack for non-libtool .so.#.#.# links | ||
216 | baselibfile=`basename "$libfile"` | ||
217 | if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then | ||
218 | sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'` | ||
219 | solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'` | ||
220 | if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then | ||
221 | __runcmd ln -sf $baselibfile $destpath/$sonamelink | ||
222 | fi | ||
223 | __runcmd ln -sf $baselibfile $destpath/$solink | ||
224 | fi | ||
225 | fi | ||
226 | |||
227 | __runcmd cd "$olddir" | ||
228 | } | ||
229 | |||
230 | oe_machinstall() { | ||
231 | # Purpose: Install machine dependent files, if available | ||
232 | # If not available, check if there is a default | ||
233 | # If no default, just touch the destination | ||
234 | # Example: | ||
235 | # $1 $2 $3 $4 | ||
236 | # oe_machinstall -m 0644 fstab ${D}/etc/fstab | ||
237 | # | ||
238 | # TODO: Check argument number? | ||
239 | # | ||
240 | filename=`basename $3` | ||
241 | dirname=`dirname $3` | ||
242 | |||
243 | for o in `echo ${OVERRIDES} | tr ':' ' '`; do | ||
244 | if [ -e $dirname/$o/$filename ]; then | ||
245 | oenote $dirname/$o/$filename present, installing to $4 | ||
246 | install $1 $2 $dirname/$o/$filename $4 | ||
247 | return | ||
248 | fi | ||
249 | done | ||
250 | # oenote overrides specific file NOT present, trying default=$3... | ||
251 | if [ -e $3 ]; then | ||
252 | oenote $3 present, installing to $4 | ||
253 | install $1 $2 $3 $4 | ||
254 | else | ||
255 | oenote $3 NOT present, touching empty $4 | ||
256 | touch $4 | ||
257 | fi | ||
258 | } | ||
259 | |||
260 | addtask showdata | ||
261 | do_showdata[nostamp] = "1" | ||
262 | python do_showdata() { | ||
263 | import sys | ||
264 | # emit variables and shell functions | ||
265 | bb.data.emit_env(sys.__stdout__, d, True) | ||
266 | # emit the metadata which isnt valid shell | ||
267 | for e in d.keys(): | ||
268 | if bb.data.getVarFlag(e, 'python', d): | ||
269 | sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | ||
270 | } | ||
271 | |||
272 | addtask listtasks | ||
273 | do_listtasks[nostamp] = "1" | ||
274 | python do_listtasks() { | ||
275 | import sys | ||
276 | # emit variables and shell functions | ||
277 | #bb.data.emit_env(sys.__stdout__, d) | ||
278 | # emit the metadata which isnt valid shell | ||
279 | for e in d.keys(): | ||
280 | if bb.data.getVarFlag(e, 'task', d): | ||
281 | sys.__stdout__.write("%s\n" % e) | ||
282 | } | ||
283 | |||
284 | addtask clean | ||
285 | do_clean[dirs] = "${TOPDIR}" | ||
286 | do_clean[nostamp] = "1" | ||
287 | do_clean[bbdepcmd] = "" | ||
288 | python base_do_clean() { | ||
289 | """clear the build and temp directories""" | ||
290 | dir = bb.data.expand("${WORKDIR}", d) | ||
291 | if dir == '//': raise bb.build.FuncFailed("wrong DATADIR") | ||
292 | bb.note("removing " + dir) | ||
293 | os.system('rm -rf ' + dir) | ||
294 | |||
295 | dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d) | ||
296 | bb.note("removing " + dir) | ||
297 | os.system('rm -f '+ dir) | ||
298 | } | ||
299 | |||
300 | addtask mrproper | ||
301 | do_mrproper[dirs] = "${TOPDIR}" | ||
302 | do_mrproper[nostamp] = "1" | ||
303 | do_mrproper[bbdepcmd] = "" | ||
304 | python base_do_mrproper() { | ||
305 | """clear downloaded sources, build and temp directories""" | ||
306 | dir = bb.data.expand("${DL_DIR}", d) | ||
307 | if dir == '/': bb.build.FuncFailed("wrong DATADIR") | ||
308 | bb.debug(2, "removing " + dir) | ||
309 | os.system('rm -rf ' + dir) | ||
310 | bb.build.exec_task('do_clean', d) | ||
311 | } | ||
312 | |||
313 | addtask fetch | ||
314 | do_fetch[dirs] = "${DL_DIR}" | ||
315 | do_fetch[nostamp] = "1" | ||
316 | python base_do_fetch() { | ||
317 | import sys | ||
318 | |||
319 | localdata = bb.data.createCopy(d) | ||
320 | bb.data.update_data(localdata) | ||
321 | |||
322 | src_uri = bb.data.getVar('SRC_URI', localdata, 1) | ||
323 | if not src_uri: | ||
324 | return 1 | ||
325 | |||
326 | try: | ||
327 | bb.fetch.init(src_uri.split(),d) | ||
328 | except bb.fetch.NoMethodError: | ||
329 | (type, value, traceback) = sys.exc_info() | ||
330 | raise bb.build.FuncFailed("No method: %s" % value) | ||
331 | |||
332 | try: | ||
333 | bb.fetch.go(localdata) | ||
334 | except bb.fetch.MissingParameterError: | ||
335 | (type, value, traceback) = sys.exc_info() | ||
336 | raise bb.build.FuncFailed("Missing parameters: %s" % value) | ||
337 | except bb.fetch.FetchError: | ||
338 | (type, value, traceback) = sys.exc_info() | ||
339 | raise bb.build.FuncFailed("Fetch failed: %s" % value) | ||
340 | } | ||
341 | |||
342 | def oe_unpack_file(file, data, url = None): | ||
343 | import bb, os | ||
344 | if not url: | ||
345 | url = "file://%s" % file | ||
346 | dots = file.split(".") | ||
347 | if dots[-1] in ['gz', 'bz2', 'Z']: | ||
348 | efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1]))) | ||
349 | else: | ||
350 | efile = file | ||
351 | cmd = None | ||
352 | if file.endswith('.tar'): | ||
353 | cmd = 'tar x --no-same-owner -f %s' % file | ||
354 | elif file.endswith('.tgz') or file.endswith('.tar.gz'): | ||
355 | cmd = 'tar xz --no-same-owner -f %s' % file | ||
356 | elif file.endswith('.tbz') or file.endswith('.tar.bz2'): | ||
357 | cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file | ||
358 | elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): | ||
359 | cmd = 'gzip -dc %s > %s' % (file, efile) | ||
360 | elif file.endswith('.bz2'): | ||
361 | cmd = 'bzip2 -dc %s > %s' % (file, efile) | ||
362 | elif file.endswith('.zip'): | ||
363 | cmd = 'unzip -q %s' % file | ||
364 | elif os.path.isdir(file): | ||
365 | filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1)) | ||
366 | destdir = "." | ||
367 | if file[0:len(filesdir)] == filesdir: | ||
368 | destdir = file[len(filesdir):file.rfind('/')] | ||
369 | destdir = destdir.strip('/') | ||
370 | if len(destdir) < 1: | ||
371 | destdir = "." | ||
372 | elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK): | ||
373 | os.makedirs("%s/%s" % (os.getcwd(), destdir)) | ||
374 | cmd = 'cp -a %s %s/%s/' % (file, os.getcwd(), destdir) | ||
375 | else: | ||
376 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | ||
377 | if not 'patch' in parm: | ||
378 | # The "destdir" handling was specifically done for FILESPATH | ||
379 | # items. So, only do so for file:// entries. | ||
380 | if type == "file": | ||
381 | destdir = bb.decodeurl(url)[1] or "." | ||
382 | else: | ||
383 | destdir = "." | ||
384 | bb.mkdirhier("%s/%s" % (os.getcwd(), destdir)) | ||
385 | cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir) | ||
386 | if not cmd: | ||
387 | return True | ||
388 | cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd) | ||
389 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) | ||
390 | ret = os.system(cmd) | ||
391 | return ret == 0 | ||
392 | |||
393 | addtask unpack after do_fetch | ||
394 | do_unpack[dirs] = "${WORKDIR}" | ||
395 | python base_do_unpack() { | ||
396 | import re, os | ||
397 | |||
398 | localdata = bb.data.createCopy(d) | ||
399 | bb.data.update_data(localdata) | ||
400 | |||
401 | src_uri = bb.data.getVar('SRC_URI', localdata) | ||
402 | if not src_uri: | ||
403 | return | ||
404 | src_uri = bb.data.expand(src_uri, localdata) | ||
405 | for url in src_uri.split(): | ||
406 | try: | ||
407 | local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata) | ||
408 | except bb.MalformedUrl, e: | ||
409 | raise FuncFailed('Unable to generate local path for malformed uri: %s' % e) | ||
410 | # dont need any parameters for extraction, strip them off | ||
411 | local = re.sub(';.*$', '', local) | ||
412 | local = os.path.realpath(local) | ||
413 | ret = oe_unpack_file(local, localdata, url) | ||
414 | if not ret: | ||
415 | raise bb.build.FuncFailed() | ||
416 | } | ||
417 | |||
418 | addtask patch after do_unpack | ||
419 | do_patch[dirs] = "${WORKDIR}" | ||
420 | python base_do_patch() { | ||
421 | import re | ||
422 | import bb.fetch | ||
423 | |||
424 | src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split() | ||
425 | if not src_uri: | ||
426 | return | ||
427 | |||
428 | patchcleancmd = bb.data.getVar('PATCHCLEANCMD', d, 1) | ||
429 | if patchcleancmd: | ||
430 | bb.data.setVar("do_patchcleancmd", patchcleancmd, d) | ||
431 | bb.data.setVarFlag("do_patchcleancmd", "func", 1, d) | ||
432 | bb.build.exec_func("do_patchcleancmd", d) | ||
433 | |||
434 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
435 | for url in src_uri: | ||
436 | |||
437 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | ||
438 | if not "patch" in parm: | ||
439 | continue | ||
440 | |||
441 | bb.fetch.init([url], d) | ||
442 | url = bb.encodeurl((type, host, path, user, pswd, [])) | ||
443 | local = os.path.join('/', bb.fetch.localpath(url, d)) | ||
444 | |||
445 | # did it need to be unpacked? | ||
446 | dots = os.path.basename(local).split(".") | ||
447 | if dots[-1] in ['gz', 'bz2', 'Z']: | ||
448 | unpacked = os.path.join(bb.data.getVar('WORKDIR', d),'.'.join(dots[0:-1])) | ||
449 | else: | ||
450 | unpacked = local | ||
451 | unpacked = bb.data.expand(unpacked, d) | ||
452 | |||
453 | if "pnum" in parm: | ||
454 | pnum = parm["pnum"] | ||
455 | else: | ||
456 | pnum = "1" | ||
457 | |||
458 | if "pname" in parm: | ||
459 | pname = parm["pname"] | ||
460 | else: | ||
461 | pname = os.path.basename(unpacked) | ||
462 | |||
463 | bb.note("Applying patch '%s'" % pname) | ||
464 | bb.data.setVar("do_patchcmd", bb.data.getVar("PATCHCMD", d, 1) % (pnum, pname, unpacked), d) | ||
465 | bb.data.setVarFlag("do_patchcmd", "func", 1, d) | ||
466 | bb.data.setVarFlag("do_patchcmd", "dirs", "${WORKDIR} ${S}", d) | ||
467 | bb.build.exec_func("do_patchcmd", d) | ||
468 | } | ||
469 | |||
470 | |||
471 | addhandler base_eventhandler | ||
472 | python base_eventhandler() { | ||
473 | from bb import note, error, data | ||
474 | from bb.event import Handled, NotHandled, getName | ||
475 | import os | ||
476 | |||
477 | messages = {} | ||
478 | messages["Completed"] = "completed" | ||
479 | messages["Succeeded"] = "completed" | ||
480 | messages["Started"] = "started" | ||
481 | messages["Failed"] = "failed" | ||
482 | |||
483 | name = getName(e) | ||
484 | msg = "" | ||
485 | if name.startswith("Pkg"): | ||
486 | msg += "package %s: " % data.getVar("P", e.data, 1) | ||
487 | msg += messages.get(name[3:]) or name[3:] | ||
488 | elif name.startswith("Task"): | ||
489 | msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task) | ||
490 | msg += messages.get(name[4:]) or name[4:] | ||
491 | elif name.startswith("Build"): | ||
492 | msg += "build %s: " % e.name | ||
493 | msg += messages.get(name[5:]) or name[5:] | ||
494 | elif name == "UnsatisfiedDep": | ||
495 | msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower()) | ||
496 | note(msg) | ||
497 | |||
498 | if name.startswith("BuildStarted"): | ||
499 | statusvars = ['TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', | ||
500 | 'TARGET_FPU'] | ||
501 | statuslines = ["%-13s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars] | ||
502 | statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines) | ||
503 | print statusmsg | ||
504 | |||
505 | needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] | ||
506 | pesteruser = [] | ||
507 | for v in needed_vars: | ||
508 | val = bb.data.getVar(v, e.data, 1) | ||
509 | if not val or val == 'INVALID': | ||
510 | pesteruser.append(v) | ||
511 | if pesteruser: | ||
512 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | ||
513 | |||
514 | if not data in e.__dict__: | ||
515 | return NotHandled | ||
516 | |||
517 | log = data.getVar("EVENTLOG", e.data, 1) | ||
518 | if log: | ||
519 | logfile = file(log, "a") | ||
520 | logfile.write("%s\n" % msg) | ||
521 | logfile.close() | ||
522 | |||
523 | return NotHandled | ||
524 | } | ||
525 | |||
526 | addtask configure after do_unpack do_patch | ||
527 | do_configure[dirs] = "${S} ${B}" | ||
528 | do_configure[bbdepcmd] = "do_populate_staging" | ||
529 | base_do_configure() { | ||
530 | : | ||
531 | } | ||
532 | |||
533 | addtask compile after do_configure | ||
534 | do_compile[dirs] = "${S} ${B}" | ||
535 | do_compile[bbdepcmd] = "do_populate_staging" | ||
536 | base_do_compile() { | ||
537 | if [ -e Makefile -o -e makefile ]; then | ||
538 | oe_runmake || die "make failed" | ||
539 | else | ||
540 | oenote "nothing to compile" | ||
541 | fi | ||
542 | } | ||
543 | |||
544 | |||
545 | addtask stage after do_compile | ||
546 | base_do_stage () { | ||
547 | : | ||
548 | } | ||
549 | |||
550 | do_populate_staging[dirs] = "${STAGING_DIR}/${TARGET_SYS}/bin ${STAGING_DIR}/${TARGET_SYS}/lib \ | ||
551 | ${STAGING_DIR}/${TARGET_SYS}/include \ | ||
552 | ${STAGING_DIR}/${BUILD_SYS}/bin ${STAGING_DIR}/${BUILD_SYS}/lib \ | ||
553 | ${STAGING_DIR}/${BUILD_SYS}/include \ | ||
554 | ${STAGING_DATADIR} \ | ||
555 | ${S} ${B}" | ||
556 | |||
557 | addtask populate_staging after do_compile | ||
558 | |||
559 | #python do_populate_staging () { | ||
560 | # if not bb.data.getVar('manifest', d): | ||
561 | # bb.build.exec_func('do_emit_manifest', d) | ||
562 | # if bb.data.getVar('do_stage', d): | ||
563 | # bb.build.exec_func('do_stage', d) | ||
564 | # else: | ||
565 | # bb.build.exec_func('manifest_do_populate_staging', d) | ||
566 | #} | ||
567 | |||
568 | python do_populate_staging () { | ||
569 | if bb.data.getVar('manifest_do_populate_staging', d): | ||
570 | bb.build.exec_func('manifest_do_populate_staging', d) | ||
571 | else: | ||
572 | bb.build.exec_func('do_stage', d) | ||
573 | } | ||
574 | |||
575 | #addtask install | ||
576 | addtask install after do_compile | ||
577 | do_install[dirs] = "${S} ${B}" | ||
578 | |||
579 | base_do_install() { | ||
580 | : | ||
581 | } | ||
582 | |||
583 | #addtask populate_pkgs after do_compile | ||
584 | #python do_populate_pkgs () { | ||
585 | # if not bb.data.getVar('manifest', d): | ||
586 | # bb.build.exec_func('do_emit_manifest', d) | ||
587 | # bb.build.exec_func('manifest_do_populate_pkgs', d) | ||
588 | # bb.build.exec_func('package_do_shlibs', d) | ||
589 | #} | ||
590 | |||
591 | base_do_package() { | ||
592 | : | ||
593 | } | ||
594 | |||
595 | addtask build after do_populate_staging | ||
596 | do_build = "" | ||
597 | do_build[func] = "1" | ||
598 | |||
599 | # Functions that update metadata based on files outputted | ||
600 | # during the build process. | ||
601 | |||
602 | SHLIBS = "" | ||
603 | RDEPENDS_prepend = " ${SHLIBS}" | ||
604 | |||
605 | python read_manifest () { | ||
606 | import sys | ||
607 | mfn = bb.data.getVar("MANIFEST", d, 1) | ||
608 | if os.access(mfn, os.R_OK): | ||
609 | # we have a manifest, so emit do_stage and do_populate_pkgs, | ||
610 | # and stuff some additional bits of data into the metadata store | ||
611 | mfile = file(mfn, "r") | ||
612 | manifest = bb.manifest.parse(mfile, d) | ||
613 | if not manifest: | ||
614 | return | ||
615 | |||
616 | bb.data.setVar('manifest', manifest, d) | ||
617 | } | ||
618 | |||
619 | python parse_manifest () { | ||
620 | manifest = bb.data.getVar("manifest", d) | ||
621 | if not manifest: | ||
622 | return | ||
623 | for func in ("do_populate_staging", "do_populate_pkgs"): | ||
624 | value = bb.manifest.emit(func, manifest, d) | ||
625 | if value: | ||
626 | bb.data.setVar("manifest_" + func, value, d) | ||
627 | bb.data.delVarFlag("manifest_" + func, "python", d) | ||
628 | bb.data.delVarFlag("manifest_" + func, "fakeroot", d) | ||
629 | bb.data.setVarFlag("manifest_" + func, "func", 1, d) | ||
630 | packages = [] | ||
631 | for l in manifest: | ||
632 | if "pkg" in l and l["pkg"] is not None: | ||
633 | packages.append(l["pkg"]) | ||
634 | bb.data.setVar("PACKAGES", " ".join(packages), d) | ||
635 | } | ||
636 | |||
637 | def explode_deps(s): | ||
638 | r = [] | ||
639 | l = s.split() | ||
640 | flag = False | ||
641 | for i in l: | ||
642 | if i[0] == '(': | ||
643 | flag = True | ||
644 | j = [] | ||
645 | if flag: | ||
646 | j.append(i) | ||
647 | if i.endswith(')'): | ||
648 | flag = False | ||
649 | r[-1] += ' ' + ' '.join(j) | ||
650 | else: | ||
651 | r.append(i) | ||
652 | return r | ||
653 | |||
654 | python read_shlibdeps () { | ||
655 | packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() | ||
656 | for pkg in packages: | ||
657 | rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") | ||
658 | shlibsfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".shlibdeps", d) | ||
659 | if os.access(shlibsfile, os.R_OK): | ||
660 | fd = file(shlibsfile) | ||
661 | lines = fd.readlines() | ||
662 | fd.close() | ||
663 | for l in lines: | ||
664 | rdepends.append(l.rstrip()) | ||
665 | pcfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".pcdeps", d) | ||
666 | if os.access(pcfile, os.R_OK): | ||
667 | fd = file(pcfile) | ||
668 | lines = fd.readlines() | ||
669 | fd.close() | ||
670 | for l in lines: | ||
671 | rdepends.append(l.rstrip()) | ||
672 | bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) | ||
673 | } | ||
674 | |||
675 | python read_subpackage_metadata () { | ||
676 | import re | ||
677 | |||
678 | def decode(str): | ||
679 | import codecs | ||
680 | c = codecs.getdecoder("string_escape") | ||
681 | return c(str)[0] | ||
682 | |||
683 | data_file = bb.data.expand("${WORKDIR}/install/${PN}.package", d) | ||
684 | if os.access(data_file, os.R_OK): | ||
685 | f = file(data_file, 'r') | ||
686 | lines = f.readlines() | ||
687 | f.close() | ||
688 | r = re.compile("([^:]+):\s*(.*)") | ||
689 | for l in lines: | ||
690 | m = r.match(l) | ||
691 | if m: | ||
692 | bb.data.setVar(m.group(1), decode(m.group(2)), d) | ||
693 | } | ||
694 | |||
695 | python __anonymous () { | ||
696 | import exceptions | ||
697 | need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1) | ||
698 | if need_host: | ||
699 | import re | ||
700 | this_host = bb.data.getVar('HOST_SYS', d, 1) | ||
701 | if not re.match(need_host, this_host): | ||
702 | raise bb.parse.SkipPackage("incompatible with host %s" % this_host) | ||
703 | |||
704 | pn = bb.data.getVar('PN', d, 1) | ||
705 | |||
706 | cvsdate = bb.data.getVar('CVSDATE_%s' % pn, d, 1) | ||
707 | if cvsdate != None: | ||
708 | bb.data.setVar('CVSDATE', cvsdate, d) | ||
709 | |||
710 | use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1) | ||
711 | if use_nls != None: | ||
712 | bb.data.setVar('USE_NLS', use_nls, d) | ||
713 | |||
714 | try: | ||
715 | bb.build.exec_func('read_manifest', d) | ||
716 | bb.build.exec_func('parse_manifest', d) | ||
717 | except exceptions.KeyboardInterrupt: | ||
718 | raise | ||
719 | except Exception, e: | ||
720 | bb.error("anonymous function: %s" % e) | ||
721 | pass | ||
722 | } | ||
723 | |||
724 | python () { | ||
725 | import bb, os | ||
726 | mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1) | ||
727 | old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1) | ||
728 | if (old_arch == mach_arch): | ||
729 | # Nothing to do | ||
730 | return | ||
731 | if (bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1) == '0'): | ||
732 | return | ||
733 | paths = [] | ||
734 | for p in [ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ]: | ||
735 | paths.append(bb.data.expand(os.path.join(p, mach_arch), d)) | ||
736 | for s in bb.data.getVar('SRC_URI', d, 1).split(): | ||
737 | local = bb.data.expand(bb.fetch.localpath(s, d), d) | ||
738 | for mp in paths: | ||
739 | if local.startswith(mp): | ||
740 | # bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch)) | ||
741 | bb.data.setVar('PACKAGE_ARCH', mach_arch, d) | ||
742 | return | ||
743 | } | ||
744 | |||
745 | |||
746 | addtask emit_manifest | ||
747 | python do_emit_manifest () { | ||
748 | # FIXME: emit a manifest here | ||
749 | # 1) adjust PATH to hit the wrapper scripts | ||
750 | wrappers = bb.which(bb.data.getVar("BBPATH", d, 1), 'build/install', 0) | ||
751 | path = (bb.data.getVar('PATH', d, 1) or '').split(':') | ||
752 | path.insert(0, os.path.dirname(wrappers)) | ||
753 | bb.data.setVar('PATH', ':'.join(path), d) | ||
754 | # 2) exec_func("do_install", d) | ||
755 | bb.build.exec_func('do_install', d) | ||
756 | # 3) read in data collected by the wrappers | ||
757 | bb.build.exec_func('read_manifest', d) | ||
758 | # 4) mangle the manifest we just generated, get paths back into | ||
759 | # our variable form | ||
760 | # 5) write it back out | ||
761 | # 6) re-parse it to ensure the generated functions are proper | ||
762 | bb.build.exec_func('parse_manifest', d) | ||
763 | } | ||
764 | |||
765 | EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_patch do_populate_pkgs do_stage | ||
766 | |||
767 | MIRRORS[func] = "0" | ||
768 | MIRRORS () { | ||
769 | ${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool | ||
770 | ${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool | ||
771 | ${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool | ||
772 | ${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool | ||
773 | ${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool | ||
774 | ${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool | ||
775 | ${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool | ||
776 | ${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool | ||
777 | ${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool | ||
778 | ${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool | ||
779 | ${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool | ||
780 | ${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool | ||
781 | ${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool | ||
782 | ${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool | ||
783 | ${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool | ||
784 | ${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool | ||
785 | ${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool | ||
786 | ${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool | ||
787 | ${GNU_MIRROR} ftp://mirrors.kernel.org/gnu | ||
788 | ${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu | ||
789 | ${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu | ||
790 | ${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu | ||
791 | ${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu | ||
792 | ftp://ftp.kernel.org/pub http://www.kernel.org/pub | ||
793 | ftp://ftp.kernel.org/pub ftp://ftp.us.kernel.org/pub | ||
794 | ftp://ftp.kernel.org/pub ftp://ftp.uk.kernel.org/pub | ||
795 | ftp://ftp.kernel.org/pub ftp://ftp.hk.kernel.org/pub | ||
796 | ftp://ftp.kernel.org/pub ftp://ftp.au.kernel.org/pub | ||
797 | ftp://ftp.kernel.org/pub ftp://ftp.jp.kernel.org/pub | ||
798 | ftp://.*/.*/ http://www.oesources.org/source/current/ | ||
799 | http://.*/.*/ http://www.oesources.org/source/current/ | ||
800 | } | ||
801 | |||