diff options
author | Richard Purdie <richard@openedhand.com> | 2006-07-21 10:10:31 +0000 |
---|---|---|
committer | Richard Purdie <richard@openedhand.com> | 2006-07-21 10:10:31 +0000 |
commit | b2f192faabe412adce79534e22efe9fb69ee40e2 (patch) | |
tree | 7076c49d4286f8a1733650bd8fbc7161af200d57 /meta/classes/base.bbclass | |
parent | 2cf0eadf9f730027833af802d7e6c90b44248f80 (diff) | |
download | poky-b2f192faabe412adce79534e22efe9fb69ee40e2.tar.gz |
Rename /openembedded/ -> /meta/
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@530 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'meta/classes/base.bbclass')
-rw-r--r-- | meta/classes/base.bbclass | 793 |
1 files changed, 793 insertions, 0 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass new file mode 100644 index 0000000000..8467ebddc2 --- /dev/null +++ b/meta/classes/base.bbclass | |||
@@ -0,0 +1,793 @@ | |||
1 | PATCHES_DIR="${S}" | ||
2 | |||
3 | def base_dep_prepend(d): | ||
4 | import bb; | ||
5 | # | ||
6 | # Ideally this will check a flag so we will operate properly in | ||
7 | # the case where host == build == target, for now we don't work in | ||
8 | # that case though. | ||
9 | # | ||
10 | deps = "" | ||
11 | |||
12 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | ||
13 | # we need that built is the responsibility of the patch function / class, not | ||
14 | # the application. | ||
15 | patchdeps = bb.data.getVar("PATCH_DEPENDS", d, 1) | ||
16 | if patchdeps and not patchdeps in bb.data.getVar("PROVIDES", d, 1): | ||
17 | deps = patchdeps | ||
18 | |||
19 | if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d): | ||
20 | if (bb.data.getVar('HOST_SYS', d, 1) != | ||
21 | bb.data.getVar('BUILD_SYS', d, 1)): | ||
22 | deps += " virtual/${TARGET_PREFIX}gcc virtual/libc " | ||
23 | return deps | ||
24 | |||
25 | def base_read_file(filename): | ||
26 | import bb | ||
27 | try: | ||
28 | f = file( filename, "r" ) | ||
29 | except IOError, reason: | ||
30 | return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M: | ||
31 | else: | ||
32 | return f.read().strip() | ||
33 | return None | ||
34 | |||
35 | def base_conditional(variable, checkvalue, truevalue, falsevalue, d): | ||
36 | import bb | ||
37 | if bb.data.getVar(variable,d,1) == checkvalue: | ||
38 | return truevalue | ||
39 | else: | ||
40 | return falsevalue | ||
41 | |||
42 | DEPENDS_prepend="${@base_dep_prepend(d)} " | ||
43 | |||
44 | def base_set_filespath(path, d): | ||
45 | import os, bb | ||
46 | filespath = [] | ||
47 | for p in path: | ||
48 | overrides = bb.data.getVar("OVERRIDES", d, 1) or "" | ||
49 | overrides = overrides + ":" | ||
50 | for o in overrides.split(":"): | ||
51 | filespath.append(os.path.join(p, o)) | ||
52 | bb.data.setVar("FILESPATH", ":".join(filespath), d) | ||
53 | |||
54 | FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" | ||
55 | |||
56 | def oe_filter(f, str, d): | ||
57 | from re import match | ||
58 | return " ".join(filter(lambda x: match(f, x, 0), str.split())) | ||
59 | |||
60 | def oe_filter_out(f, str, d): | ||
61 | from re import match | ||
62 | return " ".join(filter(lambda x: not match(f, x, 0), str.split())) | ||
63 | |||
64 | die() { | ||
65 | oefatal "$*" | ||
66 | } | ||
67 | |||
68 | oenote() { | ||
69 | echo "NOTE:" "$*" | ||
70 | } | ||
71 | |||
72 | oewarn() { | ||
73 | echo "WARNING:" "$*" | ||
74 | } | ||
75 | |||
76 | oefatal() { | ||
77 | echo "FATAL:" "$*" | ||
78 | exit 1 | ||
79 | } | ||
80 | |||
81 | oedebug() { | ||
82 | test $# -ge 2 || { | ||
83 | echo "Usage: oedebug level \"message\"" | ||
84 | exit 1 | ||
85 | } | ||
86 | |||
87 | test ${OEDEBUG:-0} -ge $1 && { | ||
88 | shift | ||
89 | echo "DEBUG:" $* | ||
90 | } | ||
91 | } | ||
92 | |||
93 | oe_runmake() { | ||
94 | if [ x"$MAKE" = x ]; then MAKE=make; fi | ||
95 | oenote ${MAKE} ${EXTRA_OEMAKE} "$@" | ||
96 | ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed" | ||
97 | } | ||
98 | |||
99 | oe_soinstall() { | ||
100 | # Purpose: Install shared library file and | ||
101 | # create the necessary links | ||
102 | # Example: | ||
103 | # | ||
104 | # oe_ | ||
105 | # | ||
106 | #oenote installing shared library $1 to $2 | ||
107 | # | ||
108 | libname=`basename $1` | ||
109 | install -m 755 $1 $2/$libname | ||
110 | sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'` | ||
111 | solink=`echo $libname | sed -e 's/\.so\..*/.so/'` | ||
112 | ln -sf $libname $2/$sonamelink | ||
113 | ln -sf $libname $2/$solink | ||
114 | } | ||
115 | |||
116 | oe_libinstall() { | ||
117 | # Purpose: Install a library, in all its forms | ||
118 | # Example | ||
119 | # | ||
120 | # oe_libinstall libltdl ${STAGING_LIBDIR}/ | ||
121 | # oe_libinstall -C src/libblah libblah ${D}/${libdir}/ | ||
122 | dir="" | ||
123 | libtool="" | ||
124 | silent="" | ||
125 | require_static="" | ||
126 | require_shared="" | ||
127 | staging_install="" | ||
128 | while [ "$#" -gt 0 ]; do | ||
129 | case "$1" in | ||
130 | -C) | ||
131 | shift | ||
132 | dir="$1" | ||
133 | ;; | ||
134 | -s) | ||
135 | silent=1 | ||
136 | ;; | ||
137 | -a) | ||
138 | require_static=1 | ||
139 | ;; | ||
140 | -so) | ||
141 | require_shared=1 | ||
142 | ;; | ||
143 | -*) | ||
144 | oefatal "oe_libinstall: unknown option: $1" | ||
145 | ;; | ||
146 | *) | ||
147 | break; | ||
148 | ;; | ||
149 | esac | ||
150 | shift | ||
151 | done | ||
152 | |||
153 | libname="$1" | ||
154 | shift | ||
155 | destpath="$1" | ||
156 | if [ -z "$destpath" ]; then | ||
157 | oefatal "oe_libinstall: no destination path specified" | ||
158 | fi | ||
159 | if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null | ||
160 | then | ||
161 | staging_install=1 | ||
162 | fi | ||
163 | |||
164 | __runcmd () { | ||
165 | if [ -z "$silent" ]; then | ||
166 | echo >&2 "oe_libinstall: $*" | ||
167 | fi | ||
168 | $* | ||
169 | } | ||
170 | |||
171 | if [ -z "$dir" ]; then | ||
172 | dir=`pwd` | ||
173 | fi | ||
174 | dotlai=$libname.lai | ||
175 | dir=$dir`(cd $dir; find -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"` | ||
176 | olddir=`pwd` | ||
177 | __runcmd cd $dir | ||
178 | |||
179 | lafile=$libname.la | ||
180 | if [ -f "$lafile" ]; then | ||
181 | # libtool archive | ||
182 | eval `cat $lafile|grep "^library_names="` | ||
183 | libtool=1 | ||
184 | else | ||
185 | library_names="$libname.so* $libname.dll.a" | ||
186 | fi | ||
187 | |||
188 | __runcmd install -d $destpath/ | ||
189 | dota=$libname.a | ||
190 | if [ -f "$dota" -o -n "$require_static" ]; then | ||
191 | __runcmd install -m 0644 $dota $destpath/ | ||
192 | fi | ||
193 | if [ -f "$dotlai" -a -n "$libtool" ]; then | ||
194 | if test -n "$staging_install" | ||
195 | then | ||
196 | # stop libtool using the final directory name for libraries | ||
197 | # in staging: | ||
198 | __runcmd rm -f $destpath/$libname.la | ||
199 | __runcmd sed -e 's/^installed=yes$/installed=no/' -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' $dotlai >$destpath/$libname.la | ||
200 | else | ||
201 | __runcmd install -m 0644 $dotlai $destpath/$libname.la | ||
202 | fi | ||
203 | fi | ||
204 | |||
205 | for name in $library_names; do | ||
206 | files=`eval echo $name` | ||
207 | for f in $files; do | ||
208 | if [ ! -e "$f" ]; then | ||
209 | if [ -n "$libtool" ]; then | ||
210 | oefatal "oe_libinstall: $dir/$f not found." | ||
211 | fi | ||
212 | elif [ -L "$f" ]; then | ||
213 | __runcmd cp -P "$f" $destpath/ | ||
214 | elif [ ! -L "$f" ]; then | ||
215 | libfile="$f" | ||
216 | __runcmd install -m 0755 $libfile $destpath/ | ||
217 | fi | ||
218 | done | ||
219 | done | ||
220 | |||
221 | if [ -z "$libfile" ]; then | ||
222 | if [ -n "$require_shared" ]; then | ||
223 | oefatal "oe_libinstall: unable to locate shared library" | ||
224 | fi | ||
225 | elif [ -z "$libtool" ]; then | ||
226 | # special case hack for non-libtool .so.#.#.# links | ||
227 | baselibfile=`basename "$libfile"` | ||
228 | if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then | ||
229 | sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'` | ||
230 | solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'` | ||
231 | if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then | ||
232 | __runcmd ln -sf $baselibfile $destpath/$sonamelink | ||
233 | fi | ||
234 | __runcmd ln -sf $baselibfile $destpath/$solink | ||
235 | fi | ||
236 | fi | ||
237 | |||
238 | __runcmd cd "$olddir" | ||
239 | } | ||
240 | |||
241 | oe_machinstall() { | ||
242 | # Purpose: Install machine dependent files, if available | ||
243 | # If not available, check if there is a default | ||
244 | # If no default, just touch the destination | ||
245 | # Example: | ||
246 | # $1 $2 $3 $4 | ||
247 | # oe_machinstall -m 0644 fstab ${D}/etc/fstab | ||
248 | # | ||
249 | # TODO: Check argument number? | ||
250 | # | ||
251 | filename=`basename $3` | ||
252 | dirname=`dirname $3` | ||
253 | |||
254 | for o in `echo ${OVERRIDES} | tr ':' ' '`; do | ||
255 | if [ -e $dirname/$o/$filename ]; then | ||
256 | oenote $dirname/$o/$filename present, installing to $4 | ||
257 | install $1 $2 $dirname/$o/$filename $4 | ||
258 | return | ||
259 | fi | ||
260 | done | ||
261 | # oenote overrides specific file NOT present, trying default=$3... | ||
262 | if [ -e $3 ]; then | ||
263 | oenote $3 present, installing to $4 | ||
264 | install $1 $2 $3 $4 | ||
265 | else | ||
266 | oenote $3 NOT present, touching empty $4 | ||
267 | touch $4 | ||
268 | fi | ||
269 | } | ||
270 | |||
271 | addtask showdata | ||
272 | do_showdata[nostamp] = "1" | ||
273 | python do_showdata() { | ||
274 | import sys | ||
275 | # emit variables and shell functions | ||
276 | bb.data.emit_env(sys.__stdout__, d, True) | ||
277 | # emit the metadata which isnt valid shell | ||
278 | for e in d.keys(): | ||
279 | if bb.data.getVarFlag(e, 'python', d): | ||
280 | sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | ||
281 | } | ||
282 | |||
283 | addtask listtasks | ||
284 | do_listtasks[nostamp] = "1" | ||
285 | python do_listtasks() { | ||
286 | import sys | ||
287 | # emit variables and shell functions | ||
288 | #bb.data.emit_env(sys.__stdout__, d) | ||
289 | # emit the metadata which isnt valid shell | ||
290 | for e in d.keys(): | ||
291 | if bb.data.getVarFlag(e, 'task', d): | ||
292 | sys.__stdout__.write("%s\n" % e) | ||
293 | } | ||
294 | |||
295 | addtask clean | ||
296 | do_clean[dirs] = "${TOPDIR}" | ||
297 | do_clean[nostamp] = "1" | ||
298 | do_clean[bbdepcmd] = "" | ||
299 | python base_do_clean() { | ||
300 | """clear the build and temp directories""" | ||
301 | dir = bb.data.expand("${WORKDIR}", d) | ||
302 | if dir == '//': raise bb.build.FuncFailed("wrong DATADIR") | ||
303 | bb.note("removing " + dir) | ||
304 | os.system('rm -rf ' + dir) | ||
305 | |||
306 | dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d) | ||
307 | bb.note("removing " + dir) | ||
308 | os.system('rm -f '+ dir) | ||
309 | } | ||
310 | |||
311 | addtask mrproper | ||
312 | do_mrproper[dirs] = "${TOPDIR}" | ||
313 | do_mrproper[nostamp] = "1" | ||
314 | do_mrproper[bbdepcmd] = "" | ||
315 | python base_do_mrproper() { | ||
316 | """clear downloaded sources, build and temp directories""" | ||
317 | dir = bb.data.expand("${DL_DIR}", d) | ||
318 | if dir == '/': bb.build.FuncFailed("wrong DATADIR") | ||
319 | bb.debug(2, "removing " + dir) | ||
320 | os.system('rm -rf ' + dir) | ||
321 | bb.build.exec_task('do_clean', d) | ||
322 | } | ||
323 | |||
324 | addtask fetch | ||
325 | do_fetch[dirs] = "${DL_DIR}" | ||
326 | do_fetch[nostamp] = "1" | ||
327 | python base_do_fetch() { | ||
328 | import sys | ||
329 | |||
330 | localdata = bb.data.createCopy(d) | ||
331 | bb.data.update_data(localdata) | ||
332 | |||
333 | src_uri = bb.data.getVar('SRC_URI', localdata, 1) | ||
334 | if not src_uri: | ||
335 | return 1 | ||
336 | |||
337 | try: | ||
338 | bb.fetch.init(src_uri.split(),d) | ||
339 | except bb.fetch.NoMethodError: | ||
340 | (type, value, traceback) = sys.exc_info() | ||
341 | raise bb.build.FuncFailed("No method: %s" % value) | ||
342 | |||
343 | try: | ||
344 | bb.fetch.go(localdata) | ||
345 | except bb.fetch.MissingParameterError: | ||
346 | (type, value, traceback) = sys.exc_info() | ||
347 | raise bb.build.FuncFailed("Missing parameters: %s" % value) | ||
348 | except bb.fetch.FetchError: | ||
349 | (type, value, traceback) = sys.exc_info() | ||
350 | raise bb.build.FuncFailed("Fetch failed: %s" % value) | ||
351 | } | ||
352 | |||
353 | def oe_unpack_file(file, data, url = None): | ||
354 | import bb, os | ||
355 | if not url: | ||
356 | url = "file://%s" % file | ||
357 | dots = file.split(".") | ||
358 | if dots[-1] in ['gz', 'bz2', 'Z']: | ||
359 | efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1]))) | ||
360 | else: | ||
361 | efile = file | ||
362 | cmd = None | ||
363 | if file.endswith('.tar'): | ||
364 | cmd = 'tar x --no-same-owner -f %s' % file | ||
365 | elif file.endswith('.tgz') or file.endswith('.tar.gz'): | ||
366 | cmd = 'tar xz --no-same-owner -f %s' % file | ||
367 | elif file.endswith('.tbz') or file.endswith('.tar.bz2'): | ||
368 | cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file | ||
369 | elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): | ||
370 | cmd = 'gzip -dc %s > %s' % (file, efile) | ||
371 | elif file.endswith('.bz2'): | ||
372 | cmd = 'bzip2 -dc %s > %s' % (file, efile) | ||
373 | elif file.endswith('.zip'): | ||
374 | cmd = 'unzip -q' | ||
375 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | ||
376 | if 'dos' in parm: | ||
377 | cmd = '%s -a' % cmd | ||
378 | cmd = '%s %s' % (cmd, file) | ||
379 | elif os.path.isdir(file): | ||
380 | filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1)) | ||
381 | destdir = "." | ||
382 | if file[0:len(filesdir)] == filesdir: | ||
383 | destdir = file[len(filesdir):file.rfind('/')] | ||
384 | destdir = destdir.strip('/') | ||
385 | if len(destdir) < 1: | ||
386 | destdir = "." | ||
387 | elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK): | ||
388 | os.makedirs("%s/%s" % (os.getcwd(), destdir)) | ||
389 | cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir) | ||
390 | else: | ||
391 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | ||
392 | if not 'patch' in parm: | ||
393 | # The "destdir" handling was specifically done for FILESPATH | ||
394 | # items. So, only do so for file:// entries. | ||
395 | if type == "file": | ||
396 | destdir = bb.decodeurl(url)[1] or "." | ||
397 | else: | ||
398 | destdir = "." | ||
399 | bb.mkdirhier("%s/%s" % (os.getcwd(), destdir)) | ||
400 | cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir) | ||
401 | if not cmd: | ||
402 | return True | ||
403 | |||
404 | |||
405 | dest = os.path.join(os.getcwd(), os.path.basename(file)) | ||
406 | if os.path.exists(dest): | ||
407 | if os.path.samefile(file, dest): | ||
408 | return True | ||
409 | |||
410 | cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd) | ||
411 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) | ||
412 | ret = os.system(cmd) | ||
413 | return ret == 0 | ||
414 | |||
415 | addtask unpack after do_fetch | ||
416 | do_unpack[dirs] = "${WORKDIR}" | ||
417 | python base_do_unpack() { | ||
418 | import re, os | ||
419 | |||
420 | localdata = bb.data.createCopy(d) | ||
421 | bb.data.update_data(localdata) | ||
422 | |||
423 | src_uri = bb.data.getVar('SRC_URI', localdata) | ||
424 | if not src_uri: | ||
425 | return | ||
426 | src_uri = bb.data.expand(src_uri, localdata) | ||
427 | for url in src_uri.split(): | ||
428 | try: | ||
429 | local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata) | ||
430 | except bb.MalformedUrl, e: | ||
431 | raise FuncFailed('Unable to generate local path for malformed uri: %s' % e) | ||
432 | # dont need any parameters for extraction, strip them off | ||
433 | local = re.sub(';.*$', '', local) | ||
434 | local = os.path.realpath(local) | ||
435 | ret = oe_unpack_file(local, localdata, url) | ||
436 | if not ret: | ||
437 | raise bb.build.FuncFailed() | ||
438 | } | ||
439 | |||
440 | addtask patch after do_unpack | ||
441 | do_patch[dirs] = "${WORKDIR}" | ||
442 | python base_do_patch() { | ||
443 | import re | ||
444 | import bb.fetch | ||
445 | |||
446 | src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split() | ||
447 | if not src_uri: | ||
448 | return | ||
449 | |||
450 | patchcleancmd = bb.data.getVar('PATCHCLEANCMD', d, 1) | ||
451 | if patchcleancmd: | ||
452 | bb.data.setVar("do_patchcleancmd", patchcleancmd, d) | ||
453 | bb.data.setVarFlag("do_patchcleancmd", "func", 1, d) | ||
454 | bb.build.exec_func("do_patchcleancmd", d) | ||
455 | |||
456 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
457 | for url in src_uri: | ||
458 | |||
459 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | ||
460 | if not "patch" in parm: | ||
461 | continue | ||
462 | |||
463 | bb.fetch.init([url],d) | ||
464 | url = bb.encodeurl((type, host, path, user, pswd, [])) | ||
465 | local = os.path.join('/', bb.fetch.localpath(url, d)) | ||
466 | |||
467 | # did it need to be unpacked? | ||
468 | dots = os.path.basename(local).split(".") | ||
469 | if dots[-1] in ['gz', 'bz2', 'Z']: | ||
470 | unpacked = os.path.join(bb.data.getVar('WORKDIR', d),'.'.join(dots[0:-1])) | ||
471 | else: | ||
472 | unpacked = local | ||
473 | unpacked = bb.data.expand(unpacked, d) | ||
474 | |||
475 | if "pnum" in parm: | ||
476 | pnum = parm["pnum"] | ||
477 | else: | ||
478 | pnum = "1" | ||
479 | |||
480 | if "pname" in parm: | ||
481 | pname = parm["pname"] | ||
482 | else: | ||
483 | pname = os.path.basename(unpacked) | ||
484 | |||
485 | if "mindate" in parm: | ||
486 | mindate = parm["mindate"] | ||
487 | else: | ||
488 | mindate = 0 | ||
489 | |||
490 | if "maxdate" in parm: | ||
491 | maxdate = parm["maxdate"] | ||
492 | else: | ||
493 | maxdate = "20711226" | ||
494 | |||
495 | pn = bb.data.getVar('PN', d, 1) | ||
496 | srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1) | ||
497 | |||
498 | if not srcdate: | ||
499 | srcdate = bb.data.getVar('SRCDATE', d, 1) | ||
500 | |||
501 | if srcdate == "now": | ||
502 | srcdate = bb.data.getVar('DATE', d, 1) | ||
503 | |||
504 | if (maxdate < srcdate) or (mindate > srcdate): | ||
505 | if (maxdate < srcdate): | ||
506 | bb.note("Patch '%s' is outdated" % pname) | ||
507 | |||
508 | if (mindate > srcdate): | ||
509 | bb.note("Patch '%s' is predated" % pname) | ||
510 | |||
511 | continue | ||
512 | |||
513 | bb.note("Applying patch '%s'" % pname) | ||
514 | bb.data.setVar("do_patchcmd", bb.data.getVar("PATCHCMD", d, 1) % (pnum, pname, unpacked), d) | ||
515 | bb.data.setVarFlag("do_patchcmd", "func", 1, d) | ||
516 | bb.data.setVarFlag("do_patchcmd", "dirs", "${WORKDIR} ${S}", d) | ||
517 | bb.build.exec_func("do_patchcmd", d) | ||
518 | } | ||
519 | |||
520 | |||
521 | addhandler base_eventhandler | ||
522 | python base_eventhandler() { | ||
523 | from bb import note, error, data | ||
524 | from bb.event import Handled, NotHandled, getName | ||
525 | import os | ||
526 | |||
527 | messages = {} | ||
528 | messages["Completed"] = "completed" | ||
529 | messages["Succeeded"] = "completed" | ||
530 | messages["Started"] = "started" | ||
531 | messages["Failed"] = "failed" | ||
532 | |||
533 | name = getName(e) | ||
534 | msg = "" | ||
535 | if name.startswith("Pkg"): | ||
536 | msg += "package %s: " % data.getVar("P", e.data, 1) | ||
537 | msg += messages.get(name[3:]) or name[3:] | ||
538 | elif name.startswith("Task"): | ||
539 | msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task) | ||
540 | msg += messages.get(name[4:]) or name[4:] | ||
541 | elif name.startswith("Build"): | ||
542 | msg += "build %s: " % e.name | ||
543 | msg += messages.get(name[5:]) or name[5:] | ||
544 | elif name == "UnsatisfiedDep": | ||
545 | msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower()) | ||
546 | note(msg) | ||
547 | |||
548 | if name.startswith("BuildStarted"): | ||
549 | bb.data.setVar( 'BB_VERSION', bb.__version__, e.data ) | ||
550 | path_to_bbfiles = bb.data.getVar( 'BBFILES', e.data, 1 ) | ||
551 | path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )] | ||
552 | monotone_revision = "<unknown>" | ||
553 | try: | ||
554 | monotone_revision = file( "%s/MT/revision" % path_to_packages ).read().strip() | ||
555 | except IOError: | ||
556 | pass | ||
557 | bb.data.setVar( 'OE_REVISION', monotone_revision, e.data ) | ||
558 | statusvars = ['BB_VERSION', 'OE_REVISION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TARGET_FPU'] | ||
559 | statuslines = ["%-14s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars] | ||
560 | statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines) | ||
561 | print statusmsg | ||
562 | |||
563 | needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] | ||
564 | pesteruser = [] | ||
565 | for v in needed_vars: | ||
566 | val = bb.data.getVar(v, e.data, 1) | ||
567 | if not val or val == 'INVALID': | ||
568 | pesteruser.append(v) | ||
569 | if pesteruser: | ||
570 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | ||
571 | |||
572 | if not data in e.__dict__: | ||
573 | return NotHandled | ||
574 | |||
575 | log = data.getVar("EVENTLOG", e.data, 1) | ||
576 | if log: | ||
577 | logfile = file(log, "a") | ||
578 | logfile.write("%s\n" % msg) | ||
579 | logfile.close() | ||
580 | |||
581 | return NotHandled | ||
582 | } | ||
583 | |||
584 | addtask configure after do_unpack do_patch | ||
585 | do_configure[dirs] = "${S} ${B}" | ||
586 | do_configure[bbdepcmd] = "do_populate_staging" | ||
587 | base_do_configure() { | ||
588 | : | ||
589 | } | ||
590 | |||
591 | addtask compile after do_configure | ||
592 | do_compile[dirs] = "${S} ${B}" | ||
593 | do_compile[bbdepcmd] = "do_populate_staging" | ||
594 | base_do_compile() { | ||
595 | if [ -e Makefile -o -e makefile ]; then | ||
596 | oe_runmake || die "make failed" | ||
597 | else | ||
598 | oenote "nothing to compile" | ||
599 | fi | ||
600 | } | ||
601 | |||
602 | |||
603 | addtask stage after do_compile | ||
604 | base_do_stage () { | ||
605 | : | ||
606 | } | ||
607 | |||
608 | do_populate_staging[dirs] = "${STAGING_DIR}/${TARGET_SYS}/bin ${STAGING_DIR}/${TARGET_SYS}/lib \ | ||
609 | ${STAGING_DIR}/${TARGET_SYS}/include \ | ||
610 | ${STAGING_DIR}/${BUILD_SYS}/bin ${STAGING_DIR}/${BUILD_SYS}/lib \ | ||
611 | ${STAGING_DIR}/${BUILD_SYS}/include \ | ||
612 | ${STAGING_DATADIR} \ | ||
613 | ${S} ${B}" | ||
614 | |||
615 | addtask populate_staging after do_compile | ||
616 | |||
617 | python do_populate_staging () { | ||
618 | bb.build.exec_func('do_stage', d) | ||
619 | } | ||
620 | |||
621 | addtask install after do_compile | ||
622 | do_install[dirs] = "${S} ${B}" | ||
623 | |||
624 | base_do_install() { | ||
625 | : | ||
626 | } | ||
627 | |||
628 | base_do_package() { | ||
629 | : | ||
630 | } | ||
631 | |||
632 | addtask build after do_populate_staging | ||
633 | do_build = "" | ||
634 | do_build[func] = "1" | ||
635 | |||
636 | # Functions that update metadata based on files outputted | ||
637 | # during the build process. | ||
638 | |||
639 | SHLIBS = "" | ||
640 | RDEPENDS_prepend = " ${SHLIBS}" | ||
641 | |||
642 | def explode_deps(s): | ||
643 | r = [] | ||
644 | l = s.split() | ||
645 | flag = False | ||
646 | for i in l: | ||
647 | if i[0] == '(': | ||
648 | flag = True | ||
649 | j = [] | ||
650 | if flag: | ||
651 | j.append(i) | ||
652 | if i.endswith(')'): | ||
653 | flag = False | ||
654 | r[-1] += ' ' + ' '.join(j) | ||
655 | else: | ||
656 | r.append(i) | ||
657 | return r | ||
658 | |||
659 | python read_shlibdeps () { | ||
660 | packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() | ||
661 | for pkg in packages: | ||
662 | rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") | ||
663 | shlibsfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".shlibdeps", d) | ||
664 | if os.access(shlibsfile, os.R_OK): | ||
665 | fd = file(shlibsfile) | ||
666 | lines = fd.readlines() | ||
667 | fd.close() | ||
668 | for l in lines: | ||
669 | rdepends.append(l.rstrip()) | ||
670 | pcfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".pcdeps", d) | ||
671 | if os.access(pcfile, os.R_OK): | ||
672 | fd = file(pcfile) | ||
673 | lines = fd.readlines() | ||
674 | fd.close() | ||
675 | for l in lines: | ||
676 | rdepends.append(l.rstrip()) | ||
677 | bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) | ||
678 | } | ||
679 | |||
680 | python read_subpackage_metadata () { | ||
681 | import re | ||
682 | |||
683 | def decode(str): | ||
684 | import codecs | ||
685 | c = codecs.getdecoder("string_escape") | ||
686 | return c(str)[0] | ||
687 | |||
688 | data_file = bb.data.expand("${WORKDIR}/install/${PN}.package", d) | ||
689 | if os.access(data_file, os.R_OK): | ||
690 | f = file(data_file, 'r') | ||
691 | lines = f.readlines() | ||
692 | f.close() | ||
693 | r = re.compile("([^:]+):\s*(.*)") | ||
694 | for l in lines: | ||
695 | m = r.match(l) | ||
696 | if m: | ||
697 | bb.data.setVar(m.group(1), decode(m.group(2)), d) | ||
698 | } | ||
699 | |||
700 | python __anonymous () { | ||
701 | import exceptions | ||
702 | need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1) | ||
703 | if need_host: | ||
704 | import re | ||
705 | this_host = bb.data.getVar('HOST_SYS', d, 1) | ||
706 | if not re.match(need_host, this_host): | ||
707 | raise bb.parse.SkipPackage("incompatible with host %s" % this_host) | ||
708 | |||
709 | need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1) | ||
710 | if need_machine: | ||
711 | import re | ||
712 | this_machine = bb.data.getVar('MACHINE', d, 1) | ||
713 | if not re.match(need_machine, this_machine): | ||
714 | raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine) | ||
715 | |||
716 | pn = bb.data.getVar('PN', d, 1) | ||
717 | |||
718 | srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1) | ||
719 | if srcdate != None: | ||
720 | bb.data.setVar('SRCDATE', srcdate, d) | ||
721 | |||
722 | use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1) | ||
723 | if use_nls != None: | ||
724 | bb.data.setVar('USE_NLS', use_nls, d) | ||
725 | } | ||
726 | |||
727 | python () { | ||
728 | import bb, os | ||
729 | mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1) | ||
730 | old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1) | ||
731 | if (old_arch == mach_arch): | ||
732 | # Nothing to do | ||
733 | return | ||
734 | if (bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1) == '0'): | ||
735 | return | ||
736 | paths = [] | ||
737 | for p in [ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ]: | ||
738 | paths.append(bb.data.expand(os.path.join(p, mach_arch), d)) | ||
739 | for s in bb.data.getVar('SRC_URI', d, 1).split(): | ||
740 | local = bb.data.expand(bb.fetch.localpath(s, d), d) | ||
741 | for mp in paths: | ||
742 | if local.startswith(mp): | ||
743 | # bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch)) | ||
744 | bb.data.setVar('PACKAGE_ARCH', mach_arch, d) | ||
745 | return | ||
746 | } | ||
747 | |||
748 | EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_patch do_populate_pkgs do_stage | ||
749 | |||
750 | MIRRORS[func] = "0" | ||
751 | MIRRORS () { | ||
752 | ${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool | ||
753 | ${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool | ||
754 | ${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool | ||
755 | ${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool | ||
756 | ${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool | ||
757 | ${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool | ||
758 | ${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool | ||
759 | ${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool | ||
760 | ${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool | ||
761 | ${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool | ||
762 | ${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool | ||
763 | ${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool | ||
764 | ${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool | ||
765 | ${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool | ||
766 | ${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool | ||
767 | ${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool | ||
768 | ${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool | ||
769 | ${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool | ||
770 | ${GNU_MIRROR} ftp://mirrors.kernel.org/gnu | ||
771 | ${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu | ||
772 | ${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu | ||
773 | ${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu | ||
774 | ${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu | ||
775 | ftp://ftp.kernel.org/pub http://www.kernel.org/pub | ||
776 | ftp://ftp.kernel.org/pub ftp://ftp.us.kernel.org/pub | ||
777 | ftp://ftp.kernel.org/pub ftp://ftp.uk.kernel.org/pub | ||
778 | ftp://ftp.kernel.org/pub ftp://ftp.hk.kernel.org/pub | ||
779 | ftp://ftp.kernel.org/pub ftp://ftp.au.kernel.org/pub | ||
780 | ftp://ftp.kernel.org/pub ftp://ftp.jp.kernel.org/pub | ||
781 | ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/ | ||
782 | ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/ | ||
783 | ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/ | ||
784 | ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/ | ||
785 | ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/ | ||
786 | ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/ | ||
787 | ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/ | ||
788 | ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/ | ||
789 | |||
790 | ftp://.*/.*/ http://www.oesources.org/source/current/ | ||
791 | http://.*/.*/ http://www.oesources.org/source/current/ | ||
792 | } | ||
793 | |||