diff options
Diffstat (limited to 'meta/classes')
76 files changed, 5791 insertions, 0 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass new file mode 100644 index 0000000000..927e3432b7 --- /dev/null +++ b/meta/classes/autotools.bbclass | |||
@@ -0,0 +1,182 @@ | |||
1 | inherit base | ||
2 | |||
3 | def autotools_dep_prepend(d): | ||
4 | import bb; | ||
5 | |||
6 | if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1): | ||
7 | return '' | ||
8 | |||
9 | pn = bb.data.getVar('PN', d, 1) | ||
10 | deps = '' | ||
11 | |||
12 | if pn in ['autoconf-native', 'automake-native']: | ||
13 | return deps | ||
14 | deps += 'autoconf-native automake-native ' | ||
15 | |||
16 | if not pn in ['libtool', 'libtool-native', 'libtool-cross']: | ||
17 | deps += 'libtool-native ' | ||
18 | |||
19 | return deps + 'gnu-config-native ' | ||
20 | |||
21 | EXTRA_OEMAKE = "" | ||
22 | DEPENDS_prepend = "${@autotools_dep_prepend(d)}" | ||
23 | acpaths = "default" | ||
24 | EXTRA_AUTORECONF = "--exclude=autopoint" | ||
25 | |||
26 | def autotools_set_crosscompiling(d): | ||
27 | import bb | ||
28 | if not bb.data.inherits_class('native', d): | ||
29 | return " cross_compiling=yes" | ||
30 | return "" | ||
31 | |||
32 | # EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}" | ||
33 | |||
34 | oe_runconf () { | ||
35 | if [ -x ${S}/configure ] ; then | ||
36 | cfgcmd="${S}/configure \ | ||
37 | --build=${BUILD_SYS} \ | ||
38 | --host=${HOST_SYS} \ | ||
39 | --target=${TARGET_SYS} \ | ||
40 | --prefix=${prefix} \ | ||
41 | --exec_prefix=${exec_prefix} \ | ||
42 | --bindir=${bindir} \ | ||
43 | --sbindir=${sbindir} \ | ||
44 | --libexecdir=${libexecdir} \ | ||
45 | --datadir=${datadir} \ | ||
46 | --sysconfdir=${sysconfdir} \ | ||
47 | --sharedstatedir=${sharedstatedir} \ | ||
48 | --localstatedir=${localstatedir} \ | ||
49 | --libdir=${libdir} \ | ||
50 | --includedir=${includedir} \ | ||
51 | --oldincludedir=${oldincludedir} \ | ||
52 | --infodir=${infodir} \ | ||
53 | --mandir=${mandir} \ | ||
54 | ${EXTRA_OECONF} \ | ||
55 | $@" | ||
56 | oenote "Running $cfgcmd..." | ||
57 | $cfgcmd || oefatal "oe_runconf failed" | ||
58 | else | ||
59 | oefatal "no configure script found" | ||
60 | fi | ||
61 | } | ||
62 | |||
63 | autotools_do_configure() { | ||
64 | case ${PN} in | ||
65 | autoconf*) | ||
66 | ;; | ||
67 | automake*) | ||
68 | ;; | ||
69 | *) | ||
70 | # WARNING: gross hack follows: | ||
71 | # An autotools built package generally needs these scripts, however only | ||
72 | # automake or libtoolize actually install the current versions of them. | ||
73 | # This is a problem in builds that do not use libtool or automake, in the case | ||
74 | # where we -need- the latest version of these scripts. e.g. running a build | ||
75 | # for a package whose autotools are old, on an x86_64 machine, which the old | ||
76 | # config.sub does not support. Work around this by installing them manually | ||
77 | # regardless. | ||
78 | ( for ac in `find ${S} -name configure.in -o -name configure.ac`; do | ||
79 | rm -f `dirname $ac`/configure | ||
80 | done ) | ||
81 | if [ -e ${S}/configure.in -o -e ${S}/configure.ac ]; then | ||
82 | olddir=`pwd` | ||
83 | cd ${S} | ||
84 | if [ x"${acpaths}" = xdefault ]; then | ||
85 | acpaths= | ||
86 | for i in `find ${S} -maxdepth 2 -name \*.m4|grep -v 'aclocal.m4'| \ | ||
87 | grep -v 'acinclude.m4' | sed -e 's,\(.*/\).*$,\1,'|sort -u`; do | ||
88 | acpaths="$acpaths -I $i" | ||
89 | done | ||
90 | else | ||
91 | acpaths="${acpaths}" | ||
92 | fi | ||
93 | AUTOV=`automake --version |head -n 1 |sed "s/.* //;s/\.[0-9]\+$//"` | ||
94 | automake --version | ||
95 | echo "AUTOV is $AUTOV" | ||
96 | install -d ${STAGING_DIR}/${HOST_SYS}/share/aclocal | ||
97 | install -d ${STAGING_DIR}/${HOST_SYS}/share/aclocal-$AUTOV | ||
98 | acpaths="$acpaths -I ${STAGING_DIR}/${HOST_SYS}/share/aclocal-$AUTOV -I ${STAGING_DIR}/${HOST_SYS}/share/aclocal" | ||
99 | # autoreconf is too shy to overwrite aclocal.m4 if it doesn't look | ||
100 | # like it was auto-generated. Work around this by blowing it away | ||
101 | # by hand, unless the package specifically asked not to run aclocal. | ||
102 | if ! echo ${EXTRA_AUTORECONF} | grep -q "aclocal"; then | ||
103 | rm -f aclocal.m4 | ||
104 | fi | ||
105 | if [ -e configure.in ]; then | ||
106 | CONFIGURE_AC=configure.in | ||
107 | else | ||
108 | CONFIGURE_AC=configure.ac | ||
109 | fi | ||
110 | if grep "^AM_GLIB_GNU_GETTEXT" $CONFIGURE_AC >/dev/null; then | ||
111 | if grep "sed.*POTFILES" $CONFIGURE_AC >/dev/null; then | ||
112 | : do nothing -- we still have an old unmodified configure.ac | ||
113 | else | ||
114 | oenote Executing glib-gettextize --force --copy | ||
115 | echo "no" | glib-gettextize --force --copy | ||
116 | fi | ||
117 | fi | ||
118 | if grep "^AC_PROG_INTLTOOL" $CONFIGURE_AC >/dev/null; then | ||
119 | oenote Executing intltoolize --copy --force --automake | ||
120 | intltoolize --copy --force --automake | ||
121 | fi | ||
122 | oenote Executing autoreconf --verbose --install --force ${EXTRA_AUTORECONF} $acpaths | ||
123 | mkdir -p m4 | ||
124 | autoreconf -Wcross --verbose --install --force ${EXTRA_AUTORECONF} $acpaths || oefatal "autoreconf execution failed." | ||
125 | cd $olddir | ||
126 | fi | ||
127 | ;; | ||
128 | esac | ||
129 | if [ -e ${S}/configure ]; then | ||
130 | oe_runconf | ||
131 | else | ||
132 | oenote "nothing to configure" | ||
133 | fi | ||
134 | } | ||
135 | |||
136 | autotools_do_install() { | ||
137 | oe_runmake 'DESTDIR=${D}' install | ||
138 | } | ||
139 | |||
140 | STAGE_TEMP="${WORKDIR}/temp-staging" | ||
141 | |||
142 | autotools_stage_includes() { | ||
143 | if [ "${INHIBIT_AUTO_STAGE_INCLUDES}" != "1" ] | ||
144 | then | ||
145 | rm -rf ${STAGE_TEMP} | ||
146 | mkdir -p ${STAGE_TEMP} | ||
147 | make DESTDIR="${STAGE_TEMP}" install | ||
148 | cp -pPR ${STAGE_TEMP}/${includedir}/* ${STAGING_INCDIR} | ||
149 | rm -rf ${STAGE_TEMP} | ||
150 | fi | ||
151 | } | ||
152 | |||
153 | autotools_stage_all() { | ||
154 | if [ "${INHIBIT_AUTO_STAGE}" = "1" ] | ||
155 | then | ||
156 | return | ||
157 | fi | ||
158 | rm -rf ${STAGE_TEMP} | ||
159 | mkdir -p ${STAGE_TEMP} | ||
160 | oe_runmake DESTDIR="${STAGE_TEMP}" install | ||
161 | if [ -d ${STAGE_TEMP}/${includedir} ]; then | ||
162 | cp -fpPR ${STAGE_TEMP}/${includedir}/* ${STAGING_INCDIR} | ||
163 | fi | ||
164 | if [ -d ${STAGE_TEMP}/${libdir} ] | ||
165 | then | ||
166 | for i in ${STAGE_TEMP}/${libdir}/*.la | ||
167 | do | ||
168 | if [ ! -f "$i" ]; then | ||
169 | cp -fpPR ${STAGE_TEMP}/${libdir}/* ${STAGING_LIBDIR} | ||
170 | break | ||
171 | fi | ||
172 | oe_libinstall -so $(basename $i .la) ${STAGING_LIBDIR} | ||
173 | done | ||
174 | fi | ||
175 | if [ -d ${STAGE_TEMP}/${datadir}/aclocal ]; then | ||
176 | install -d ${STAGING_DATADIR}/aclocal | ||
177 | cp -fpPR ${STAGE_TEMP}/${datadir}/aclocal/* ${STAGING_DATADIR}/aclocal | ||
178 | fi | ||
179 | rm -rf ${STAGE_TEMP} | ||
180 | } | ||
181 | |||
182 | EXPORT_FUNCTIONS do_configure do_install | ||
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass new file mode 100644 index 0000000000..8467ebddc2 --- /dev/null +++ b/meta/classes/base.bbclass | |||
@@ -0,0 +1,793 @@ | |||
1 | PATCHES_DIR="${S}" | ||
2 | |||
3 | def base_dep_prepend(d): | ||
4 | import bb; | ||
5 | # | ||
6 | # Ideally this will check a flag so we will operate properly in | ||
7 | # the case where host == build == target, for now we don't work in | ||
8 | # that case though. | ||
9 | # | ||
10 | deps = "" | ||
11 | |||
12 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | ||
13 | # we need that built is the responsibility of the patch function / class, not | ||
14 | # the application. | ||
15 | patchdeps = bb.data.getVar("PATCH_DEPENDS", d, 1) | ||
16 | if patchdeps and not patchdeps in bb.data.getVar("PROVIDES", d, 1): | ||
17 | deps = patchdeps | ||
18 | |||
19 | if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d): | ||
20 | if (bb.data.getVar('HOST_SYS', d, 1) != | ||
21 | bb.data.getVar('BUILD_SYS', d, 1)): | ||
22 | deps += " virtual/${TARGET_PREFIX}gcc virtual/libc " | ||
23 | return deps | ||
24 | |||
25 | def base_read_file(filename): | ||
26 | import bb | ||
27 | try: | ||
28 | f = file( filename, "r" ) | ||
29 | except IOError, reason: | ||
30 | return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M: | ||
31 | else: | ||
32 | return f.read().strip() | ||
33 | return None | ||
34 | |||
35 | def base_conditional(variable, checkvalue, truevalue, falsevalue, d): | ||
36 | import bb | ||
37 | if bb.data.getVar(variable,d,1) == checkvalue: | ||
38 | return truevalue | ||
39 | else: | ||
40 | return falsevalue | ||
41 | |||
42 | DEPENDS_prepend="${@base_dep_prepend(d)} " | ||
43 | |||
44 | def base_set_filespath(path, d): | ||
45 | import os, bb | ||
46 | filespath = [] | ||
47 | for p in path: | ||
48 | overrides = bb.data.getVar("OVERRIDES", d, 1) or "" | ||
49 | overrides = overrides + ":" | ||
50 | for o in overrides.split(":"): | ||
51 | filespath.append(os.path.join(p, o)) | ||
52 | bb.data.setVar("FILESPATH", ":".join(filespath), d) | ||
53 | |||
54 | FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" | ||
55 | |||
56 | def oe_filter(f, str, d): | ||
57 | from re import match | ||
58 | return " ".join(filter(lambda x: match(f, x, 0), str.split())) | ||
59 | |||
60 | def oe_filter_out(f, str, d): | ||
61 | from re import match | ||
62 | return " ".join(filter(lambda x: not match(f, x, 0), str.split())) | ||
63 | |||
64 | die() { | ||
65 | oefatal "$*" | ||
66 | } | ||
67 | |||
68 | oenote() { | ||
69 | echo "NOTE:" "$*" | ||
70 | } | ||
71 | |||
72 | oewarn() { | ||
73 | echo "WARNING:" "$*" | ||
74 | } | ||
75 | |||
76 | oefatal() { | ||
77 | echo "FATAL:" "$*" | ||
78 | exit 1 | ||
79 | } | ||
80 | |||
81 | oedebug() { | ||
82 | test $# -ge 2 || { | ||
83 | echo "Usage: oedebug level \"message\"" | ||
84 | exit 1 | ||
85 | } | ||
86 | |||
87 | test ${OEDEBUG:-0} -ge $1 && { | ||
88 | shift | ||
89 | echo "DEBUG:" $* | ||
90 | } | ||
91 | } | ||
92 | |||
93 | oe_runmake() { | ||
94 | if [ x"$MAKE" = x ]; then MAKE=make; fi | ||
95 | oenote ${MAKE} ${EXTRA_OEMAKE} "$@" | ||
96 | ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed" | ||
97 | } | ||
98 | |||
99 | oe_soinstall() { | ||
100 | # Purpose: Install shared library file and | ||
101 | # create the necessary links | ||
102 | # Example: | ||
103 | # | ||
104 | # oe_ | ||
105 | # | ||
106 | #oenote installing shared library $1 to $2 | ||
107 | # | ||
108 | libname=`basename $1` | ||
109 | install -m 755 $1 $2/$libname | ||
110 | sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'` | ||
111 | solink=`echo $libname | sed -e 's/\.so\..*/.so/'` | ||
112 | ln -sf $libname $2/$sonamelink | ||
113 | ln -sf $libname $2/$solink | ||
114 | } | ||
115 | |||
116 | oe_libinstall() { | ||
117 | # Purpose: Install a library, in all its forms | ||
118 | # Example | ||
119 | # | ||
120 | # oe_libinstall libltdl ${STAGING_LIBDIR}/ | ||
121 | # oe_libinstall -C src/libblah libblah ${D}/${libdir}/ | ||
122 | dir="" | ||
123 | libtool="" | ||
124 | silent="" | ||
125 | require_static="" | ||
126 | require_shared="" | ||
127 | staging_install="" | ||
128 | while [ "$#" -gt 0 ]; do | ||
129 | case "$1" in | ||
130 | -C) | ||
131 | shift | ||
132 | dir="$1" | ||
133 | ;; | ||
134 | -s) | ||
135 | silent=1 | ||
136 | ;; | ||
137 | -a) | ||
138 | require_static=1 | ||
139 | ;; | ||
140 | -so) | ||
141 | require_shared=1 | ||
142 | ;; | ||
143 | -*) | ||
144 | oefatal "oe_libinstall: unknown option: $1" | ||
145 | ;; | ||
146 | *) | ||
147 | break; | ||
148 | ;; | ||
149 | esac | ||
150 | shift | ||
151 | done | ||
152 | |||
153 | libname="$1" | ||
154 | shift | ||
155 | destpath="$1" | ||
156 | if [ -z "$destpath" ]; then | ||
157 | oefatal "oe_libinstall: no destination path specified" | ||
158 | fi | ||
159 | if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null | ||
160 | then | ||
161 | staging_install=1 | ||
162 | fi | ||
163 | |||
164 | __runcmd () { | ||
165 | if [ -z "$silent" ]; then | ||
166 | echo >&2 "oe_libinstall: $*" | ||
167 | fi | ||
168 | $* | ||
169 | } | ||
170 | |||
171 | if [ -z "$dir" ]; then | ||
172 | dir=`pwd` | ||
173 | fi | ||
174 | dotlai=$libname.lai | ||
175 | dir=$dir`(cd $dir; find -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"` | ||
176 | olddir=`pwd` | ||
177 | __runcmd cd $dir | ||
178 | |||
179 | lafile=$libname.la | ||
180 | if [ -f "$lafile" ]; then | ||
181 | # libtool archive | ||
182 | eval `cat $lafile|grep "^library_names="` | ||
183 | libtool=1 | ||
184 | else | ||
185 | library_names="$libname.so* $libname.dll.a" | ||
186 | fi | ||
187 | |||
188 | __runcmd install -d $destpath/ | ||
189 | dota=$libname.a | ||
190 | if [ -f "$dota" -o -n "$require_static" ]; then | ||
191 | __runcmd install -m 0644 $dota $destpath/ | ||
192 | fi | ||
193 | if [ -f "$dotlai" -a -n "$libtool" ]; then | ||
194 | if test -n "$staging_install" | ||
195 | then | ||
196 | # stop libtool using the final directory name for libraries | ||
197 | # in staging: | ||
198 | __runcmd rm -f $destpath/$libname.la | ||
199 | __runcmd sed -e 's/^installed=yes$/installed=no/' -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' $dotlai >$destpath/$libname.la | ||
200 | else | ||
201 | __runcmd install -m 0644 $dotlai $destpath/$libname.la | ||
202 | fi | ||
203 | fi | ||
204 | |||
205 | for name in $library_names; do | ||
206 | files=`eval echo $name` | ||
207 | for f in $files; do | ||
208 | if [ ! -e "$f" ]; then | ||
209 | if [ -n "$libtool" ]; then | ||
210 | oefatal "oe_libinstall: $dir/$f not found." | ||
211 | fi | ||
212 | elif [ -L "$f" ]; then | ||
213 | __runcmd cp -P "$f" $destpath/ | ||
214 | elif [ ! -L "$f" ]; then | ||
215 | libfile="$f" | ||
216 | __runcmd install -m 0755 $libfile $destpath/ | ||
217 | fi | ||
218 | done | ||
219 | done | ||
220 | |||
221 | if [ -z "$libfile" ]; then | ||
222 | if [ -n "$require_shared" ]; then | ||
223 | oefatal "oe_libinstall: unable to locate shared library" | ||
224 | fi | ||
225 | elif [ -z "$libtool" ]; then | ||
226 | # special case hack for non-libtool .so.#.#.# links | ||
227 | baselibfile=`basename "$libfile"` | ||
228 | if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then | ||
229 | sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'` | ||
230 | solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'` | ||
231 | if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then | ||
232 | __runcmd ln -sf $baselibfile $destpath/$sonamelink | ||
233 | fi | ||
234 | __runcmd ln -sf $baselibfile $destpath/$solink | ||
235 | fi | ||
236 | fi | ||
237 | |||
238 | __runcmd cd "$olddir" | ||
239 | } | ||
240 | |||
241 | oe_machinstall() { | ||
242 | # Purpose: Install machine dependent files, if available | ||
243 | # If not available, check if there is a default | ||
244 | # If no default, just touch the destination | ||
245 | # Example: | ||
246 | # $1 $2 $3 $4 | ||
247 | # oe_machinstall -m 0644 fstab ${D}/etc/fstab | ||
248 | # | ||
249 | # TODO: Check argument number? | ||
250 | # | ||
251 | filename=`basename $3` | ||
252 | dirname=`dirname $3` | ||
253 | |||
254 | for o in `echo ${OVERRIDES} | tr ':' ' '`; do | ||
255 | if [ -e $dirname/$o/$filename ]; then | ||
256 | oenote $dirname/$o/$filename present, installing to $4 | ||
257 | install $1 $2 $dirname/$o/$filename $4 | ||
258 | return | ||
259 | fi | ||
260 | done | ||
261 | # oenote overrides specific file NOT present, trying default=$3... | ||
262 | if [ -e $3 ]; then | ||
263 | oenote $3 present, installing to $4 | ||
264 | install $1 $2 $3 $4 | ||
265 | else | ||
266 | oenote $3 NOT present, touching empty $4 | ||
267 | touch $4 | ||
268 | fi | ||
269 | } | ||
270 | |||
271 | addtask showdata | ||
272 | do_showdata[nostamp] = "1" | ||
273 | python do_showdata() { | ||
274 | import sys | ||
275 | # emit variables and shell functions | ||
276 | bb.data.emit_env(sys.__stdout__, d, True) | ||
277 | # emit the metadata which isnt valid shell | ||
278 | for e in d.keys(): | ||
279 | if bb.data.getVarFlag(e, 'python', d): | ||
280 | sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | ||
281 | } | ||
282 | |||
283 | addtask listtasks | ||
284 | do_listtasks[nostamp] = "1" | ||
285 | python do_listtasks() { | ||
286 | import sys | ||
287 | # emit variables and shell functions | ||
288 | #bb.data.emit_env(sys.__stdout__, d) | ||
289 | # emit the metadata which isnt valid shell | ||
290 | for e in d.keys(): | ||
291 | if bb.data.getVarFlag(e, 'task', d): | ||
292 | sys.__stdout__.write("%s\n" % e) | ||
293 | } | ||
294 | |||
295 | addtask clean | ||
296 | do_clean[dirs] = "${TOPDIR}" | ||
297 | do_clean[nostamp] = "1" | ||
298 | do_clean[bbdepcmd] = "" | ||
299 | python base_do_clean() { | ||
300 | """clear the build and temp directories""" | ||
301 | dir = bb.data.expand("${WORKDIR}", d) | ||
302 | if dir == '//': raise bb.build.FuncFailed("wrong DATADIR") | ||
303 | bb.note("removing " + dir) | ||
304 | os.system('rm -rf ' + dir) | ||
305 | |||
306 | dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d) | ||
307 | bb.note("removing " + dir) | ||
308 | os.system('rm -f '+ dir) | ||
309 | } | ||
310 | |||
311 | addtask mrproper | ||
312 | do_mrproper[dirs] = "${TOPDIR}" | ||
313 | do_mrproper[nostamp] = "1" | ||
314 | do_mrproper[bbdepcmd] = "" | ||
315 | python base_do_mrproper() { | ||
316 | """clear downloaded sources, build and temp directories""" | ||
317 | dir = bb.data.expand("${DL_DIR}", d) | ||
318 | if dir == '/': bb.build.FuncFailed("wrong DATADIR") | ||
319 | bb.debug(2, "removing " + dir) | ||
320 | os.system('rm -rf ' + dir) | ||
321 | bb.build.exec_task('do_clean', d) | ||
322 | } | ||
323 | |||
324 | addtask fetch | ||
325 | do_fetch[dirs] = "${DL_DIR}" | ||
326 | do_fetch[nostamp] = "1" | ||
327 | python base_do_fetch() { | ||
328 | import sys | ||
329 | |||
330 | localdata = bb.data.createCopy(d) | ||
331 | bb.data.update_data(localdata) | ||
332 | |||
333 | src_uri = bb.data.getVar('SRC_URI', localdata, 1) | ||
334 | if not src_uri: | ||
335 | return 1 | ||
336 | |||
337 | try: | ||
338 | bb.fetch.init(src_uri.split(),d) | ||
339 | except bb.fetch.NoMethodError: | ||
340 | (type, value, traceback) = sys.exc_info() | ||
341 | raise bb.build.FuncFailed("No method: %s" % value) | ||
342 | |||
343 | try: | ||
344 | bb.fetch.go(localdata) | ||
345 | except bb.fetch.MissingParameterError: | ||
346 | (type, value, traceback) = sys.exc_info() | ||
347 | raise bb.build.FuncFailed("Missing parameters: %s" % value) | ||
348 | except bb.fetch.FetchError: | ||
349 | (type, value, traceback) = sys.exc_info() | ||
350 | raise bb.build.FuncFailed("Fetch failed: %s" % value) | ||
351 | } | ||
352 | |||
353 | def oe_unpack_file(file, data, url = None): | ||
354 | import bb, os | ||
355 | if not url: | ||
356 | url = "file://%s" % file | ||
357 | dots = file.split(".") | ||
358 | if dots[-1] in ['gz', 'bz2', 'Z']: | ||
359 | efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1]))) | ||
360 | else: | ||
361 | efile = file | ||
362 | cmd = None | ||
363 | if file.endswith('.tar'): | ||
364 | cmd = 'tar x --no-same-owner -f %s' % file | ||
365 | elif file.endswith('.tgz') or file.endswith('.tar.gz'): | ||
366 | cmd = 'tar xz --no-same-owner -f %s' % file | ||
367 | elif file.endswith('.tbz') or file.endswith('.tar.bz2'): | ||
368 | cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file | ||
369 | elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): | ||
370 | cmd = 'gzip -dc %s > %s' % (file, efile) | ||
371 | elif file.endswith('.bz2'): | ||
372 | cmd = 'bzip2 -dc %s > %s' % (file, efile) | ||
373 | elif file.endswith('.zip'): | ||
374 | cmd = 'unzip -q' | ||
375 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | ||
376 | if 'dos' in parm: | ||
377 | cmd = '%s -a' % cmd | ||
378 | cmd = '%s %s' % (cmd, file) | ||
379 | elif os.path.isdir(file): | ||
380 | filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1)) | ||
381 | destdir = "." | ||
382 | if file[0:len(filesdir)] == filesdir: | ||
383 | destdir = file[len(filesdir):file.rfind('/')] | ||
384 | destdir = destdir.strip('/') | ||
385 | if len(destdir) < 1: | ||
386 | destdir = "." | ||
387 | elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK): | ||
388 | os.makedirs("%s/%s" % (os.getcwd(), destdir)) | ||
389 | cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir) | ||
390 | else: | ||
391 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | ||
392 | if not 'patch' in parm: | ||
393 | # The "destdir" handling was specifically done for FILESPATH | ||
394 | # items. So, only do so for file:// entries. | ||
395 | if type == "file": | ||
396 | destdir = bb.decodeurl(url)[1] or "." | ||
397 | else: | ||
398 | destdir = "." | ||
399 | bb.mkdirhier("%s/%s" % (os.getcwd(), destdir)) | ||
400 | cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir) | ||
401 | if not cmd: | ||
402 | return True | ||
403 | |||
404 | |||
405 | dest = os.path.join(os.getcwd(), os.path.basename(file)) | ||
406 | if os.path.exists(dest): | ||
407 | if os.path.samefile(file, dest): | ||
408 | return True | ||
409 | |||
410 | cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd) | ||
411 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) | ||
412 | ret = os.system(cmd) | ||
413 | return ret == 0 | ||
414 | |||
415 | addtask unpack after do_fetch | ||
416 | do_unpack[dirs] = "${WORKDIR}" | ||
417 | python base_do_unpack() { | ||
418 | import re, os | ||
419 | |||
420 | localdata = bb.data.createCopy(d) | ||
421 | bb.data.update_data(localdata) | ||
422 | |||
423 | src_uri = bb.data.getVar('SRC_URI', localdata) | ||
424 | if not src_uri: | ||
425 | return | ||
426 | src_uri = bb.data.expand(src_uri, localdata) | ||
427 | for url in src_uri.split(): | ||
428 | try: | ||
429 | local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata) | ||
430 | except bb.MalformedUrl, e: | ||
431 | raise FuncFailed('Unable to generate local path for malformed uri: %s' % e) | ||
432 | # dont need any parameters for extraction, strip them off | ||
433 | local = re.sub(';.*$', '', local) | ||
434 | local = os.path.realpath(local) | ||
435 | ret = oe_unpack_file(local, localdata, url) | ||
436 | if not ret: | ||
437 | raise bb.build.FuncFailed() | ||
438 | } | ||
439 | |||
440 | addtask patch after do_unpack | ||
441 | do_patch[dirs] = "${WORKDIR}" | ||
442 | python base_do_patch() { | ||
443 | import re | ||
444 | import bb.fetch | ||
445 | |||
446 | src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split() | ||
447 | if not src_uri: | ||
448 | return | ||
449 | |||
450 | patchcleancmd = bb.data.getVar('PATCHCLEANCMD', d, 1) | ||
451 | if patchcleancmd: | ||
452 | bb.data.setVar("do_patchcleancmd", patchcleancmd, d) | ||
453 | bb.data.setVarFlag("do_patchcleancmd", "func", 1, d) | ||
454 | bb.build.exec_func("do_patchcleancmd", d) | ||
455 | |||
456 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
457 | for url in src_uri: | ||
458 | |||
459 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | ||
460 | if not "patch" in parm: | ||
461 | continue | ||
462 | |||
463 | bb.fetch.init([url],d) | ||
464 | url = bb.encodeurl((type, host, path, user, pswd, [])) | ||
465 | local = os.path.join('/', bb.fetch.localpath(url, d)) | ||
466 | |||
467 | # did it need to be unpacked? | ||
468 | dots = os.path.basename(local).split(".") | ||
469 | if dots[-1] in ['gz', 'bz2', 'Z']: | ||
470 | unpacked = os.path.join(bb.data.getVar('WORKDIR', d),'.'.join(dots[0:-1])) | ||
471 | else: | ||
472 | unpacked = local | ||
473 | unpacked = bb.data.expand(unpacked, d) | ||
474 | |||
475 | if "pnum" in parm: | ||
476 | pnum = parm["pnum"] | ||
477 | else: | ||
478 | pnum = "1" | ||
479 | |||
480 | if "pname" in parm: | ||
481 | pname = parm["pname"] | ||
482 | else: | ||
483 | pname = os.path.basename(unpacked) | ||
484 | |||
485 | if "mindate" in parm: | ||
486 | mindate = parm["mindate"] | ||
487 | else: | ||
488 | mindate = 0 | ||
489 | |||
490 | if "maxdate" in parm: | ||
491 | maxdate = parm["maxdate"] | ||
492 | else: | ||
493 | maxdate = "20711226" | ||
494 | |||
495 | pn = bb.data.getVar('PN', d, 1) | ||
496 | srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1) | ||
497 | |||
498 | if not srcdate: | ||
499 | srcdate = bb.data.getVar('SRCDATE', d, 1) | ||
500 | |||
501 | if srcdate == "now": | ||
502 | srcdate = bb.data.getVar('DATE', d, 1) | ||
503 | |||
504 | if (maxdate < srcdate) or (mindate > srcdate): | ||
505 | if (maxdate < srcdate): | ||
506 | bb.note("Patch '%s' is outdated" % pname) | ||
507 | |||
508 | if (mindate > srcdate): | ||
509 | bb.note("Patch '%s' is predated" % pname) | ||
510 | |||
511 | continue | ||
512 | |||
513 | bb.note("Applying patch '%s'" % pname) | ||
514 | bb.data.setVar("do_patchcmd", bb.data.getVar("PATCHCMD", d, 1) % (pnum, pname, unpacked), d) | ||
515 | bb.data.setVarFlag("do_patchcmd", "func", 1, d) | ||
516 | bb.data.setVarFlag("do_patchcmd", "dirs", "${WORKDIR} ${S}", d) | ||
517 | bb.build.exec_func("do_patchcmd", d) | ||
518 | } | ||
519 | |||
520 | |||
521 | addhandler base_eventhandler | ||
522 | python base_eventhandler() { | ||
523 | from bb import note, error, data | ||
524 | from bb.event import Handled, NotHandled, getName | ||
525 | import os | ||
526 | |||
527 | messages = {} | ||
528 | messages["Completed"] = "completed" | ||
529 | messages["Succeeded"] = "completed" | ||
530 | messages["Started"] = "started" | ||
531 | messages["Failed"] = "failed" | ||
532 | |||
533 | name = getName(e) | ||
534 | msg = "" | ||
535 | if name.startswith("Pkg"): | ||
536 | msg += "package %s: " % data.getVar("P", e.data, 1) | ||
537 | msg += messages.get(name[3:]) or name[3:] | ||
538 | elif name.startswith("Task"): | ||
539 | msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task) | ||
540 | msg += messages.get(name[4:]) or name[4:] | ||
541 | elif name.startswith("Build"): | ||
542 | msg += "build %s: " % e.name | ||
543 | msg += messages.get(name[5:]) or name[5:] | ||
544 | elif name == "UnsatisfiedDep": | ||
545 | msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower()) | ||
546 | note(msg) | ||
547 | |||
548 | if name.startswith("BuildStarted"): | ||
549 | bb.data.setVar( 'BB_VERSION', bb.__version__, e.data ) | ||
550 | path_to_bbfiles = bb.data.getVar( 'BBFILES', e.data, 1 ) | ||
551 | path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )] | ||
552 | monotone_revision = "<unknown>" | ||
553 | try: | ||
554 | monotone_revision = file( "%s/MT/revision" % path_to_packages ).read().strip() | ||
555 | except IOError: | ||
556 | pass | ||
557 | bb.data.setVar( 'OE_REVISION', monotone_revision, e.data ) | ||
558 | statusvars = ['BB_VERSION', 'OE_REVISION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TARGET_FPU'] | ||
559 | statuslines = ["%-14s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars] | ||
560 | statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines) | ||
561 | print statusmsg | ||
562 | |||
563 | needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] | ||
564 | pesteruser = [] | ||
565 | for v in needed_vars: | ||
566 | val = bb.data.getVar(v, e.data, 1) | ||
567 | if not val or val == 'INVALID': | ||
568 | pesteruser.append(v) | ||
569 | if pesteruser: | ||
570 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | ||
571 | |||
572 | if not data in e.__dict__: | ||
573 | return NotHandled | ||
574 | |||
575 | log = data.getVar("EVENTLOG", e.data, 1) | ||
576 | if log: | ||
577 | logfile = file(log, "a") | ||
578 | logfile.write("%s\n" % msg) | ||
579 | logfile.close() | ||
580 | |||
581 | return NotHandled | ||
582 | } | ||
583 | |||
584 | addtask configure after do_unpack do_patch | ||
585 | do_configure[dirs] = "${S} ${B}" | ||
586 | do_configure[bbdepcmd] = "do_populate_staging" | ||
587 | base_do_configure() { | ||
588 | : | ||
589 | } | ||
590 | |||
591 | addtask compile after do_configure | ||
592 | do_compile[dirs] = "${S} ${B}" | ||
593 | do_compile[bbdepcmd] = "do_populate_staging" | ||
594 | base_do_compile() { | ||
595 | if [ -e Makefile -o -e makefile ]; then | ||
596 | oe_runmake || die "make failed" | ||
597 | else | ||
598 | oenote "nothing to compile" | ||
599 | fi | ||
600 | } | ||
601 | |||
602 | |||
603 | addtask stage after do_compile | ||
604 | base_do_stage () { | ||
605 | : | ||
606 | } | ||
607 | |||
608 | do_populate_staging[dirs] = "${STAGING_DIR}/${TARGET_SYS}/bin ${STAGING_DIR}/${TARGET_SYS}/lib \ | ||
609 | ${STAGING_DIR}/${TARGET_SYS}/include \ | ||
610 | ${STAGING_DIR}/${BUILD_SYS}/bin ${STAGING_DIR}/${BUILD_SYS}/lib \ | ||
611 | ${STAGING_DIR}/${BUILD_SYS}/include \ | ||
612 | ${STAGING_DATADIR} \ | ||
613 | ${S} ${B}" | ||
614 | |||
615 | addtask populate_staging after do_compile | ||
616 | |||
617 | python do_populate_staging () { | ||
618 | bb.build.exec_func('do_stage', d) | ||
619 | } | ||
620 | |||
621 | addtask install after do_compile | ||
622 | do_install[dirs] = "${S} ${B}" | ||
623 | |||
624 | base_do_install() { | ||
625 | : | ||
626 | } | ||
627 | |||
628 | base_do_package() { | ||
629 | : | ||
630 | } | ||
631 | |||
632 | addtask build after do_populate_staging | ||
633 | do_build = "" | ||
634 | do_build[func] = "1" | ||
635 | |||
636 | # Functions that update metadata based on files outputted | ||
637 | # during the build process. | ||
638 | |||
639 | SHLIBS = "" | ||
640 | RDEPENDS_prepend = " ${SHLIBS}" | ||
641 | |||
642 | def explode_deps(s): | ||
643 | r = [] | ||
644 | l = s.split() | ||
645 | flag = False | ||
646 | for i in l: | ||
647 | if i[0] == '(': | ||
648 | flag = True | ||
649 | j = [] | ||
650 | if flag: | ||
651 | j.append(i) | ||
652 | if i.endswith(')'): | ||
653 | flag = False | ||
654 | r[-1] += ' ' + ' '.join(j) | ||
655 | else: | ||
656 | r.append(i) | ||
657 | return r | ||
658 | |||
659 | python read_shlibdeps () { | ||
660 | packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() | ||
661 | for pkg in packages: | ||
662 | rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") | ||
663 | shlibsfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".shlibdeps", d) | ||
664 | if os.access(shlibsfile, os.R_OK): | ||
665 | fd = file(shlibsfile) | ||
666 | lines = fd.readlines() | ||
667 | fd.close() | ||
668 | for l in lines: | ||
669 | rdepends.append(l.rstrip()) | ||
670 | pcfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".pcdeps", d) | ||
671 | if os.access(pcfile, os.R_OK): | ||
672 | fd = file(pcfile) | ||
673 | lines = fd.readlines() | ||
674 | fd.close() | ||
675 | for l in lines: | ||
676 | rdepends.append(l.rstrip()) | ||
677 | bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) | ||
678 | } | ||
679 | |||
680 | python read_subpackage_metadata () { | ||
681 | import re | ||
682 | |||
683 | def decode(str): | ||
684 | import codecs | ||
685 | c = codecs.getdecoder("string_escape") | ||
686 | return c(str)[0] | ||
687 | |||
688 | data_file = bb.data.expand("${WORKDIR}/install/${PN}.package", d) | ||
689 | if os.access(data_file, os.R_OK): | ||
690 | f = file(data_file, 'r') | ||
691 | lines = f.readlines() | ||
692 | f.close() | ||
693 | r = re.compile("([^:]+):\s*(.*)") | ||
694 | for l in lines: | ||
695 | m = r.match(l) | ||
696 | if m: | ||
697 | bb.data.setVar(m.group(1), decode(m.group(2)), d) | ||
698 | } | ||
699 | |||
700 | python __anonymous () { | ||
701 | import exceptions | ||
702 | need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1) | ||
703 | if need_host: | ||
704 | import re | ||
705 | this_host = bb.data.getVar('HOST_SYS', d, 1) | ||
706 | if not re.match(need_host, this_host): | ||
707 | raise bb.parse.SkipPackage("incompatible with host %s" % this_host) | ||
708 | |||
709 | need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1) | ||
710 | if need_machine: | ||
711 | import re | ||
712 | this_machine = bb.data.getVar('MACHINE', d, 1) | ||
713 | if not re.match(need_machine, this_machine): | ||
714 | raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine) | ||
715 | |||
716 | pn = bb.data.getVar('PN', d, 1) | ||
717 | |||
718 | srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1) | ||
719 | if srcdate != None: | ||
720 | bb.data.setVar('SRCDATE', srcdate, d) | ||
721 | |||
722 | use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1) | ||
723 | if use_nls != None: | ||
724 | bb.data.setVar('USE_NLS', use_nls, d) | ||
725 | } | ||
726 | |||
727 | python () { | ||
728 | import bb, os | ||
729 | mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1) | ||
730 | old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1) | ||
731 | if (old_arch == mach_arch): | ||
732 | # Nothing to do | ||
733 | return | ||
734 | if (bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1) == '0'): | ||
735 | return | ||
736 | paths = [] | ||
737 | for p in [ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ]: | ||
738 | paths.append(bb.data.expand(os.path.join(p, mach_arch), d)) | ||
739 | for s in bb.data.getVar('SRC_URI', d, 1).split(): | ||
740 | local = bb.data.expand(bb.fetch.localpath(s, d), d) | ||
741 | for mp in paths: | ||
742 | if local.startswith(mp): | ||
743 | # bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch)) | ||
744 | bb.data.setVar('PACKAGE_ARCH', mach_arch, d) | ||
745 | return | ||
746 | } | ||
747 | |||
748 | EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_patch do_populate_pkgs do_stage | ||
749 | |||
750 | MIRRORS[func] = "0" | ||
751 | MIRRORS () { | ||
752 | ${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool | ||
753 | ${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool | ||
754 | ${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool | ||
755 | ${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool | ||
756 | ${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool | ||
757 | ${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool | ||
758 | ${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool | ||
759 | ${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool | ||
760 | ${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool | ||
761 | ${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool | ||
762 | ${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool | ||
763 | ${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool | ||
764 | ${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool | ||
765 | ${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool | ||
766 | ${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool | ||
767 | ${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool | ||
768 | ${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool | ||
769 | ${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool | ||
770 | ${GNU_MIRROR} ftp://mirrors.kernel.org/gnu | ||
771 | ${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu | ||
772 | ${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu | ||
773 | ${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu | ||
774 | ${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu | ||
775 | ftp://ftp.kernel.org/pub http://www.kernel.org/pub | ||
776 | ftp://ftp.kernel.org/pub ftp://ftp.us.kernel.org/pub | ||
777 | ftp://ftp.kernel.org/pub ftp://ftp.uk.kernel.org/pub | ||
778 | ftp://ftp.kernel.org/pub ftp://ftp.hk.kernel.org/pub | ||
779 | ftp://ftp.kernel.org/pub ftp://ftp.au.kernel.org/pub | ||
780 | ftp://ftp.kernel.org/pub ftp://ftp.jp.kernel.org/pub | ||
781 | ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/ | ||
782 | ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/ | ||
783 | ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/ | ||
784 | ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/ | ||
785 | ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/ | ||
786 | ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/ | ||
787 | ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/ | ||
788 | ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/ | ||
789 | |||
790 | ftp://.*/.*/ http://www.oesources.org/source/current/ | ||
791 | http://.*/.*/ http://www.oesources.org/source/current/ | ||
792 | } | ||
793 | |||
diff --git a/meta/classes/base_srpm.bbclass b/meta/classes/base_srpm.bbclass new file mode 100644 index 0000000000..aea6335278 --- /dev/null +++ b/meta/classes/base_srpm.bbclass | |||
@@ -0,0 +1,20 @@ | |||
1 | inherit base package rpm_core | ||
2 | |||
3 | SPECFILE="${RPMBUILDPATH}/SPECS/${PN}.spec" | ||
4 | |||
5 | base_srpm_do_unpack() { | ||
6 | test -e ${SRPMFILE} || die "Source rpm \"${SRPMFILE}\"does not exist" | ||
7 | if ! test -e ${SPECFILE}; then | ||
8 | ${RPM} -i ${SRPMFILE} | ||
9 | fi | ||
10 | test -e ${SPECFILE} || die "Spec file \"${SPECFILE}\" does not exist" | ||
11 | ${RPMBUILD} -bp ${SPECFILE} | ||
12 | } | ||
13 | |||
14 | base_srpm_do_compile() { | ||
15 | ${RPMBUILD} -bc ${SPECFILE} | ||
16 | } | ||
17 | |||
18 | base_srpm_do_install() { | ||
19 | ${RPMBUILD} -bi ${SPECFILE} | ||
20 | } | ||
diff --git a/meta/classes/binconfig.bbclass b/meta/classes/binconfig.bbclass new file mode 100644 index 0000000000..bf15ebcdf9 --- /dev/null +++ b/meta/classes/binconfig.bbclass | |||
@@ -0,0 +1,36 @@ | |||
1 | inherit base | ||
2 | |||
3 | # The namespaces can clash here hence the two step replace | ||
4 | def get_binconfig_mangle(d): | ||
5 | import bb.data | ||
6 | s = "-e ''" | ||
7 | if not bb.data.inherits_class('native', d): | ||
8 | s += " -e 's:=${libdir}:=OELIBDIR:;'" | ||
9 | s += " -e 's:=${includedir}:=OEINCDIR:;'" | ||
10 | s += " -e 's:=${datadir}:=OEDATADIR:'" | ||
11 | s += " -e 's:=${prefix}:=OEPREFIX:'" | ||
12 | s += " -e 's:=${exec_prefix}:=OEEXECPREFIX:'" | ||
13 | s += " -e 's:-L${libdir}:-LOELIBDIR:;'" | ||
14 | s += " -e 's:-I${includedir}:-IOEINCDIR:;'" | ||
15 | s += " -e 's:OELIBDIR:${STAGING_LIBDIR}:;'" | ||
16 | s += " -e 's:OEINCDIR:${STAGING_INCDIR}:;'" | ||
17 | s += " -e 's:OEDATADIR:${STAGING_DATADIR}:'" | ||
18 | s += " -e 's:OEPREFIX:${STAGING_LIBDIR}/..:'" | ||
19 | s += " -e 's:OEEXECPREFIX:${STAGING_LIBDIR}/..:'" | ||
20 | return s | ||
21 | |||
22 | # Native package configurations go in ${BINDIR}/<name>-config-native to prevent a collision with cross packages | ||
23 | def is_native(d): | ||
24 | import bb.data | ||
25 | return ["","-native"][bb.data.inherits_class('native', d)] | ||
26 | |||
27 | BINCONFIG_GLOB ?= "*-config" | ||
28 | |||
29 | do_stage_append() { | ||
30 | for config in `find ${S} -name '${BINCONFIG_GLOB}'`; do | ||
31 | configname=`basename $config`${@is_native(d)} | ||
32 | install -d ${STAGING_BINDIR} | ||
33 | cat $config | sed ${@get_binconfig_mangle(d)} > ${STAGING_BINDIR}/$configname | ||
34 | chmod u+x ${STAGING_BINDIR}/$configname | ||
35 | done | ||
36 | } | ||
diff --git a/meta/classes/ccache.inc b/meta/classes/ccache.inc new file mode 100644 index 0000000000..5e9356104b --- /dev/null +++ b/meta/classes/ccache.inc | |||
@@ -0,0 +1,11 @@ | |||
1 | # Make ccache use a TMPDIR specific ccache directory if using the crosscompiler, | ||
2 | # since it isn't likely to be useful with any other toolchain than the one we just | ||
3 | # built, and would otherwise push more useful things out of the default cache. | ||
4 | |||
5 | CCACHE_DIR_TARGET = "${TMPDIR}/ccache" | ||
6 | |||
7 | python () { | ||
8 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): | ||
9 | bb.data.setVar('CCACHE_DIR', '${CCACHE_DIR_TARGET}', d) | ||
10 | bb.data.setVarFlag('CCACHE_DIR', 'export', '1', d) | ||
11 | } | ||
diff --git a/meta/classes/ccdv.bbclass b/meta/classes/ccdv.bbclass new file mode 100644 index 0000000000..edd151ef8c --- /dev/null +++ b/meta/classes/ccdv.bbclass | |||
@@ -0,0 +1,21 @@ | |||
1 | python () { | ||
2 | if bb.data.getVar('PN', d, 1) in ['ccdv-native']: | ||
3 | if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d, 1): | ||
4 | bb.data.setVar("DEPENDS", '%s %s' % ("ccdv-native", bb.data.getVar("DEPENDS", d, 1) or ""), d) | ||
5 | bb.data.setVar("CC", '%s %s' % ("ccdv", bb.data.getVar("CC", d, 1) or ""), d) | ||
6 | bb.data.setVar("BUILD_CC", '%s %s' % ("ccdv", bb.data.getVar("BUILD_CC", d, 1) or ""), d) | ||
7 | bb.data.setVar("CCLD", '%s %s' % ("ccdv", bb.data.getVar("CCLD", d, 1) or ""), d) | ||
8 | } | ||
9 | |||
10 | def quiet_libtool(bb,d): | ||
11 | deps = (bb.data.getVar('DEPENDS', d, 1) or "").split() | ||
12 | if 'libtool-cross' in deps: | ||
13 | return "'LIBTOOL=${STAGING_BINDIR}/${HOST_SYS}-libtool --silent'" | ||
14 | elif 'libtool-native' in deps: | ||
15 | return "'LIBTOOL=${B}/${HOST_SYS}-libtool --silent'" | ||
16 | else: | ||
17 | return "" | ||
18 | |||
19 | CCDV = "ccdv" | ||
20 | EXTRA_OEMAKE_append = " ${@quiet_libtool(bb,d)}" | ||
21 | MAKE += "-s" | ||
diff --git a/meta/classes/cml1.bbclass b/meta/classes/cml1.bbclass new file mode 100644 index 0000000000..79218b4a12 --- /dev/null +++ b/meta/classes/cml1.bbclass | |||
@@ -0,0 +1,8 @@ | |||
1 | cml1_do_configure() { | ||
2 | set -e | ||
3 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS | ||
4 | oe_runmake oldconfig | ||
5 | } | ||
6 | |||
7 | EXPORT_FUNCTIONS do_configure | ||
8 | addtask configure after do_unpack do_patch before do_compile | ||
diff --git a/meta/classes/cpan.bbclass b/meta/classes/cpan.bbclass new file mode 100644 index 0000000000..853abfd1b3 --- /dev/null +++ b/meta/classes/cpan.bbclass | |||
@@ -0,0 +1,20 @@ | |||
1 | FILES_${PN} += '${libdir}/perl5' | ||
2 | |||
3 | cpan_do_configure () { | ||
4 | perl Makefile.PL | ||
5 | if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then | ||
6 | . ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh | ||
7 | sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new | ||
8 | mv Makefile.new Makefile | ||
9 | fi | ||
10 | } | ||
11 | |||
12 | cpan_do_compile () { | ||
13 | oe_runmake PASTHRU_INC="${CFLAGS}" | ||
14 | } | ||
15 | |||
16 | cpan_do_install () { | ||
17 | oe_runmake install_vendor | ||
18 | } | ||
19 | |||
20 | EXPORT_FUNCTIONS do_configure do_compile do_install | ||
diff --git a/meta/classes/cross.bbclass b/meta/classes/cross.bbclass new file mode 100644 index 0000000000..09357acbe8 --- /dev/null +++ b/meta/classes/cross.bbclass | |||
@@ -0,0 +1,55 @@ | |||
1 | # Cross packages are built indirectly via dependency, | ||
2 | # no need for them to be a direct target of 'world' | ||
3 | EXCLUDE_FROM_WORLD = "1" | ||
4 | |||
5 | PACKAGES = "" | ||
6 | |||
7 | HOST_ARCH = "${BUILD_ARCH}" | ||
8 | HOST_VENDOR = "${BUILD_VENDOR}" | ||
9 | HOST_OS = "${BUILD_OS}" | ||
10 | HOST_PREFIX = "${BUILD_PREFIX}" | ||
11 | HOST_CC_ARCH = "${BUILD_CC_ARCH}" | ||
12 | |||
13 | CPPFLAGS = "${BUILD_CPPFLAGS}" | ||
14 | CFLAGS = "${BUILD_CFLAGS}" | ||
15 | CXXFLAGS = "${BUILD_CFLAGS}" | ||
16 | LDFLAGS = "${BUILD_LDFLAGS}" | ||
17 | LDFLAGS_build-darwin = "-L${STAGING_DIR}/${BUILD_SYS}/lib " | ||
18 | |||
19 | # Overrides for paths | ||
20 | |||
21 | # Path prefixes | ||
22 | base_prefix = "${exec_prefix}" | ||
23 | prefix = "${CROSS_DIR}" | ||
24 | exec_prefix = "${prefix}" | ||
25 | |||
26 | # Base paths | ||
27 | base_bindir = "${base_prefix}/bin" | ||
28 | base_sbindir = "${base_prefix}/bin" | ||
29 | base_libdir = "${base_prefix}/lib" | ||
30 | |||
31 | # Architecture independent paths | ||
32 | datadir = "${prefix}/share" | ||
33 | sysconfdir = "${prefix}/etc" | ||
34 | sharedstatedir = "${prefix}/com" | ||
35 | localstatedir = "${prefix}/var" | ||
36 | infodir = "${datadir}/info" | ||
37 | mandir = "${datadir}/man" | ||
38 | docdir = "${datadir}/doc" | ||
39 | servicedir = "${prefix}/srv" | ||
40 | |||
41 | # Architecture dependent paths | ||
42 | bindir = "${exec_prefix}/bin" | ||
43 | sbindir = "${exec_prefix}/bin" | ||
44 | libexecdir = "${exec_prefix}/libexec" | ||
45 | libdir = "${exec_prefix}/lib" | ||
46 | includedir = "${exec_prefix}/include" | ||
47 | oldincludedir = "${exec_prefix}/include" | ||
48 | |||
49 | do_stage () { | ||
50 | oe_runmake install | ||
51 | } | ||
52 | |||
53 | do_install () { | ||
54 | : | ||
55 | } | ||
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass new file mode 100644 index 0000000000..5688dad93b --- /dev/null +++ b/meta/classes/debian.bbclass | |||
@@ -0,0 +1,101 @@ | |||
1 | STAGING_PKGMAPS_DIR = "${STAGING_DIR}/pkgmaps/debian" | ||
2 | |||
3 | # Debain package renaming only occurs when a package is built | ||
4 | # We therefore have to make sure we build all runtime packages | ||
5 | # before building the current package to make the packages runtime | ||
6 | # depends are correct | ||
7 | BUILD_ALL_DEPS = "1" | ||
8 | |||
9 | python debian_package_name_hook () { | ||
10 | import glob, copy, stat, errno, re | ||
11 | |||
12 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
13 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
14 | |||
15 | def socrunch(s): | ||
16 | s = s.lower().replace('_', '-') | ||
17 | m = re.match("^(.*)(.)\.so\.(.*)$", s) | ||
18 | if m is None: | ||
19 | return None | ||
20 | if m.group(2) in '0123456789': | ||
21 | bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3)) | ||
22 | else: | ||
23 | bin = m.group(1) + m.group(2) + m.group(3) | ||
24 | dev = m.group(1) + m.group(2) | ||
25 | return (bin, dev) | ||
26 | |||
27 | def isexec(path): | ||
28 | try: | ||
29 | s = os.stat(path) | ||
30 | except (os.error, AttributeError): | ||
31 | return 0 | ||
32 | return (s[stat.ST_MODE] & stat.S_IEXEC) | ||
33 | |||
34 | def auto_libname(packages, orig_pkg): | ||
35 | bin_re = re.compile(".*/s?bin$") | ||
36 | lib_re = re.compile(".*/lib$") | ||
37 | so_re = re.compile("lib.*\.so") | ||
38 | sonames = [] | ||
39 | has_bins = 0 | ||
40 | has_libs = 0 | ||
41 | pkg_dir = os.path.join(workdir, "install", orig_pkg) | ||
42 | for root, dirs, files in os.walk(pkg_dir): | ||
43 | if bin_re.match(root) and files: | ||
44 | has_bins = 1 | ||
45 | if lib_re.match(root) and files: | ||
46 | has_libs = 1 | ||
47 | for f in files: | ||
48 | if so_re.match(f): | ||
49 | fp = os.path.join(root, f) | ||
50 | cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + fp + " 2>/dev/null" | ||
51 | fd = os.popen(cmd) | ||
52 | lines = fd.readlines() | ||
53 | fd.close() | ||
54 | for l in lines: | ||
55 | m = re.match("\s+SONAME\s+([^\s]*)", l) | ||
56 | if m and not m.group(1) in sonames: | ||
57 | sonames.append(m.group(1)) | ||
58 | |||
59 | bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames)) | ||
60 | soname = None | ||
61 | if len(sonames) == 1: | ||
62 | soname = sonames[0] | ||
63 | elif len(sonames) > 1: | ||
64 | lead = bb.data.getVar('LEAD_SONAME', d, 1) | ||
65 | if lead: | ||
66 | r = re.compile(lead) | ||
67 | filtered = [] | ||
68 | for s in sonames: | ||
69 | if r.match(s): | ||
70 | filtered.append(s) | ||
71 | if len(filtered) == 1: | ||
72 | soname = filtered[0] | ||
73 | elif len(filtered) > 1: | ||
74 | bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead)) | ||
75 | else: | ||
76 | bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead)) | ||
77 | else: | ||
78 | bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames)) | ||
79 | |||
80 | if has_libs and not has_bins and soname: | ||
81 | soname_result = socrunch(soname) | ||
82 | if soname_result: | ||
83 | (pkgname, devname) = soname_result | ||
84 | for pkg in packages.split(): | ||
85 | if (bb.data.getVar('PKG_' + pkg, d) or bb.data.getVar('DEBIAN_NOAUTONAME_' + pkg, d)): | ||
86 | continue | ||
87 | if pkg == orig_pkg: | ||
88 | newpkg = pkgname | ||
89 | else: | ||
90 | newpkg = pkg.replace(orig_pkg, devname, 1) | ||
91 | if newpkg != pkg: | ||
92 | bb.data.setVar('PKG_' + pkg, newpkg, d) | ||
93 | |||
94 | for pkg in (bb.data.getVar('AUTO_LIBNAME_PKGS', d, 1) or "").split(): | ||
95 | auto_libname(packages, pkg) | ||
96 | } | ||
97 | |||
98 | EXPORT_FUNCTIONS package_name_hook | ||
99 | |||
100 | DEBIAN_NAMES = 1 | ||
101 | |||
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass new file mode 100644 index 0000000000..68d7112166 --- /dev/null +++ b/meta/classes/distutils-base.bbclass | |||
@@ -0,0 +1,14 @@ | |||
1 | EXTRA_OEMAKE = "" | ||
2 | DEPENDS += "${@["python-native python", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}" | ||
3 | RDEPENDS += "python-core" | ||
4 | |||
5 | def python_dir(d): | ||
6 | import os, bb | ||
7 | staging_incdir = bb.data.getVar( "STAGING_INCDIR", d, 1 ) | ||
8 | if os.path.exists( "%s/python2.3" % staging_incdir ): return "python2.3" | ||
9 | if os.path.exists( "%s/python2.4" % staging_incdir ): return "python2.4" | ||
10 | raise "No Python in STAGING_INCDIR. Forgot to build python-native ?" | ||
11 | |||
12 | PYTHON_DIR = "${@python_dir(d)}" | ||
13 | FILES_${PN} = "${bindir} ${libdir} ${libdir}/${PYTHON_DIR}" | ||
14 | |||
diff --git a/meta/classes/distutils.bbclass b/meta/classes/distutils.bbclass new file mode 100644 index 0000000000..a2b0e2b770 --- /dev/null +++ b/meta/classes/distutils.bbclass | |||
@@ -0,0 +1,15 @@ | |||
1 | inherit distutils-base | ||
2 | |||
3 | distutils_do_compile() { | ||
4 | BUILD_SYS=${BUILD_SYS} HOST_SYS=${HOST_SYS} \ | ||
5 | ${STAGING_BINDIR}/python setup.py build || \ | ||
6 | oefatal "python setup.py build execution failed." | ||
7 | } | ||
8 | |||
9 | distutils_do_install() { | ||
10 | BUILD_SYS=${BUILD_SYS} HOST_SYS=${HOST_SYS} \ | ||
11 | ${STAGING_BINDIR}/python setup.py install --prefix=${D}/${prefix} --install-data=${D}/${datadir} || \ | ||
12 | oefatal "python setup.py install execution failed." | ||
13 | } | ||
14 | |||
15 | EXPORT_FUNCTIONS do_compile do_install | ||
diff --git a/meta/classes/e.bbclass b/meta/classes/e.bbclass new file mode 100644 index 0000000000..afd9b6d2b3 --- /dev/null +++ b/meta/classes/e.bbclass | |||
@@ -0,0 +1,37 @@ | |||
1 | MAINTAINER = "Justin Patrin <papercrane@reversefold.com>" | ||
2 | HOMEPAGE = "http://www.enlightenment.org" | ||
3 | SECTION = "e/apps" | ||
4 | |||
5 | inherit autotools pkgconfig binconfig | ||
6 | |||
7 | do_prepsources () { | ||
8 | make clean distclean || true | ||
9 | } | ||
10 | addtask prepsources after do_fetch before do_unpack | ||
11 | |||
12 | def binconfig_suffix(d): | ||
13 | import bb | ||
14 | return ["","-native"][bb.data.inherits_class('native', d)] | ||
15 | |||
16 | export CURL_CONFIG = "${STAGING_BINDIR}/curl-config${@binconfig_suffix(d)}" | ||
17 | export EDB_CONFIG = "${STAGING_BINDIR}/edb-config${@binconfig_suffix(d)}" | ||
18 | export EET_CONFIG = "${STAGING_BINDIR}/eet-config${@binconfig_suffix(d)}" | ||
19 | export EVAS_CONFIG = "${STAGING_BINDIR}/evas-config${@binconfig_suffix(d)}" | ||
20 | export ECORE_CONFIG = "${STAGING_BINDIR}/ecore-config${@binconfig_suffix(d)}" | ||
21 | export EMBRYO_CONFIG = "${STAGING_BINDIR}/embryo-config${@binconfig_suffix(d)}" | ||
22 | export ENGRAVE_CONFIG = "${STAGING_BINDIR}/engrave-config${@binconfig_suffix(d)}" | ||
23 | export ENLIGHTENMENT_CONFIG = "${STAGING_BINDIR}/enlightenment-config${@binconfig_suffix(d)}" | ||
24 | export EPSILON_CONFIG = "${STAGING_BINDIR}/epsilon-config${@binconfig_suffix(d)}" | ||
25 | export EPEG_CONFIG = "${STAGING_BINDIR}/epeg-config${@binconfig_suffix(d)}" | ||
26 | export ESMART_CONFIG = "${STAGING_BINDIR}/esmart-config${@binconfig_suffix(d)}" | ||
27 | export FREETYPE_CONFIG = "${STAGING_BINDIR}/freetype-config${@binconfig_suffix(d)}" | ||
28 | export IMLIB2_CONFIG = "${STAGING_BINDIR}/imlib2-config${@binconfig_suffix(d)}" | ||
29 | |||
30 | do_compile_prepend() { | ||
31 | find ${S} -name Makefile | xargs sed -i 's:/usr/include:${STAGING_INCDIR}:' | ||
32 | find ${S} -name Makefile | xargs sed -i 's:/usr/X11R6/include:${STAGING_INCDIR}:' | ||
33 | } | ||
34 | |||
35 | PACKAGES = "${PN} ${PN}-themes" | ||
36 | FILES_${PN} = "${libdir}/lib*.so*" | ||
37 | FILES_${PN}-themes = "${datadir}/${PN}/themes ${datadir}/${PN}/data ${datadir}/${PN}/fonts ${datadir}/${PN}/pointers ${datadir}/${PN}/images ${datadir}/${PN}/users ${datadir}/${PN}/images ${datadir}/${PN}/styles" | ||
diff --git a/meta/classes/efl.bbclass b/meta/classes/efl.bbclass new file mode 100644 index 0000000000..9c490284c2 --- /dev/null +++ b/meta/classes/efl.bbclass | |||
@@ -0,0 +1,49 @@ | |||
1 | inherit e | ||
2 | |||
3 | SECTION = "e/libs" | ||
4 | |||
5 | SRCNAME = "${@bb.data.getVar('PN', d, 1).replace('-native', '')}" | ||
6 | SRC_URI = "${E_URI}/${SRCNAME}-${PV}.tar.gz" | ||
7 | S = "${WORKDIR}/${SRCNAME}-${PV}" | ||
8 | |||
9 | INHIBIT_AUTO_STAGE_INCLUDES = "1" | ||
10 | INHIBIT_NATIVE_STAGE_INSTALL = "1" | ||
11 | |||
12 | libdirectory = "src/lib" | ||
13 | libraries = "lib${SRCNAME}" | ||
14 | headers = "${@bb.data.getVar('SRCNAME',d,1).capitalize()}.h" | ||
15 | |||
16 | do_stage_append () { | ||
17 | for i in ${libraries} | ||
18 | do | ||
19 | oe_libinstall -C ${libdirectory} $i ${STAGING_LIBDIR} | ||
20 | done | ||
21 | for i in ${headers} | ||
22 | do | ||
23 | install -m 0644 ${libdirectory}/$i ${STAGING_INCDIR} | ||
24 | done | ||
25 | |||
26 | # Install binaries automatically for native builds | ||
27 | if [ "${@binconfig_suffix(d)}" = "-native" ] | ||
28 | then | ||
29 | |||
30 | # Most EFL binaries start with the package name | ||
31 | for i in src/bin/${SRCNAME}* | ||
32 | do | ||
33 | if [ -x $i -a -f $i ] | ||
34 | then | ||
35 | |||
36 | # Don't install anything with an extension (.so, etc) | ||
37 | if echo $i | grep -v \\. | ||
38 | then | ||
39 | ${HOST_SYS}-libtool --mode=install install -m 0755 $i ${STAGING_BINDIR} | ||
40 | fi | ||
41 | fi | ||
42 | done | ||
43 | fi | ||
44 | } | ||
45 | |||
46 | PACKAGES = "${PN} ${PN}-themes ${PN}-dev ${PN}-examples" | ||
47 | FILES_${PN}-dev = "${bindir}/${PN}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${libdir}/lib*.a" | ||
48 | FILES_${PN}-examples = "${bindir} ${datadir}" | ||
49 | |||
diff --git a/meta/classes/flow-lossage.bbclass b/meta/classes/flow-lossage.bbclass new file mode 100644 index 0000000000..3e841e3cae --- /dev/null +++ b/meta/classes/flow-lossage.bbclass | |||
@@ -0,0 +1,5 @@ | |||
1 | # gcc-3.4 blows up in gtktext with -frename-registers on arm-linux | ||
2 | python () { | ||
3 | cflags = (bb.data.getVar('CFLAGS', d, 1) or '').replace('-frename-registers', '') | ||
4 | bb.data.setVar('CFLAGS', cflags, d) | ||
5 | } | ||
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass new file mode 100644 index 0000000000..686f8e6596 --- /dev/null +++ b/meta/classes/gconf.bbclass | |||
@@ -0,0 +1,59 @@ | |||
1 | DEPENDS += "gconf" | ||
2 | |||
3 | gconf_postinst() { | ||
4 | if [ "$1" = configure ]; then | ||
5 | if [ "x$D" != "x" ]; then | ||
6 | exit 1 | ||
7 | fi | ||
8 | SCHEMA_LOCATION=/etc/gconf/schemas | ||
9 | for SCHEMA in ${SCHEMA_FILES}; do | ||
10 | if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then | ||
11 | HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \ | ||
12 | gconftool-2 \ | ||
13 | --makefile-install-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null | ||
14 | fi | ||
15 | done | ||
16 | fi | ||
17 | } | ||
18 | |||
19 | gconf_prerm() { | ||
20 | if [ "$1" = remove ] || [ "$1" = upgrade ]; then | ||
21 | SCHEMA_LOCATION=/etc/gconf/schemas | ||
22 | for SCHEMA in ${SCHEMA_FILES}; do | ||
23 | if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then | ||
24 | HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \ | ||
25 | gconftool-2 \ | ||
26 | --makefile-uninstall-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null | ||
27 | fi | ||
28 | done | ||
29 | fi | ||
30 | } | ||
31 | |||
32 | python populate_packages_append () { | ||
33 | import os.path, re | ||
34 | packages = bb.data.getVar('PACKAGES', d, 1).split() | ||
35 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
36 | |||
37 | for pkg in packages: | ||
38 | schema_dir = '%s/install/%s/etc/gconf/schemas' % (workdir, pkg) | ||
39 | schemas = [] | ||
40 | schema_re = re.compile(".*\.schemas$") | ||
41 | if os.path.exists(schema_dir): | ||
42 | for f in os.listdir(schema_dir): | ||
43 | if schema_re.match(f): | ||
44 | schemas.append(f) | ||
45 | if schemas != []: | ||
46 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) | ||
47 | bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d) | ||
48 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) | ||
49 | if not postinst: | ||
50 | postinst = '#!/bin/sh\n' | ||
51 | postinst += bb.data.getVar('gconf_postinst', d, 1) | ||
52 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | ||
53 | prerm = bb.data.getVar('pkg_prerm_%s' % pkg, d, 1) or bb.data.getVar('pkg_prerm', d, 1) | ||
54 | if not prerm: | ||
55 | prerm = '#!/bin/sh\n' | ||
56 | prerm += bb.data.getVar('gconf_prerm', d, 1) | ||
57 | bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d) | ||
58 | |||
59 | } | ||
diff --git a/meta/classes/gettext.bbclass b/meta/classes/gettext.bbclass new file mode 100644 index 0000000000..3785f5acd3 --- /dev/null +++ b/meta/classes/gettext.bbclass | |||
@@ -0,0 +1,12 @@ | |||
1 | python () { | ||
2 | # Remove the NLS bits if USE_NLS is no. | ||
3 | if bb.data.getVar('USE_NLS', d, 1) == 'no': | ||
4 | cfg = oe_filter_out('^--(dis|en)able-nls$', bb.data.getVar('EXTRA_OECONF', d, 1) or "", d) | ||
5 | cfg += " --disable-nls" | ||
6 | depends = bb.data.getVar('DEPENDS', d, 1) or "" | ||
7 | bb.data.setVar('DEPENDS', oe_filter_out('^(virtual/libiconv|virtual/libintl)$', depends, d), d) | ||
8 | bb.data.setVar('EXTRA_OECONF', cfg, d) | ||
9 | } | ||
10 | |||
11 | DEPENDS =+ "gettext-native" | ||
12 | EXTRA_OECONF += "--enable-nls" | ||
diff --git a/meta/classes/gnome.bbclass b/meta/classes/gnome.bbclass new file mode 100644 index 0000000000..8643989b73 --- /dev/null +++ b/meta/classes/gnome.bbclass | |||
@@ -0,0 +1,20 @@ | |||
1 | def gnome_verdir(v): | ||
2 | import re | ||
3 | m = re.match("([0-9]+)\.([0-9]+)\..*", v) | ||
4 | return "%s.%s" % (m.group(1), m.group(2)) | ||
5 | |||
6 | SECTION ?= "x11/gnome" | ||
7 | SRC_URI = "${GNOME_MIRROR}/${PN}/${@gnome_verdir("${PV}")}/${PN}-${PV}.tar.bz2" | ||
8 | |||
9 | DEPENDS += "gnome-common" | ||
10 | |||
11 | FILES_${PN} += "${datadir}/application-registry ${datadir}/mime-info \ | ||
12 | ${datadir}/gnome-2.0" | ||
13 | |||
14 | inherit autotools pkgconfig gconf | ||
15 | |||
16 | EXTRA_AUTORECONF += "-I ${STAGING_DIR}/${HOST_SYS}/share/aclocal/gnome2-macros" | ||
17 | |||
18 | gnome_stage_includes() { | ||
19 | autotools_stage_includes | ||
20 | } | ||
diff --git a/meta/classes/gpe.bbclass b/meta/classes/gpe.bbclass new file mode 100644 index 0000000000..861ec416a0 --- /dev/null +++ b/meta/classes/gpe.bbclass | |||
@@ -0,0 +1,17 @@ | |||
1 | DEPENDS_prepend = "coreutils-native virtual/libintl intltool-native " | ||
2 | GPE_TARBALL_SUFFIX ?= "gz" | ||
3 | SRC_URI = "${GPE_MIRROR}/${PN}-${PV}.tar.${GPE_TARBALL_SUFFIX}" | ||
4 | FILES_${PN} += "${datadir}/gpe ${datadir}/application-registry" | ||
5 | MAINTAINER ?= "GPE Team <gpe@handhelds.org>" | ||
6 | |||
7 | inherit gettext | ||
8 | |||
9 | gpe_do_compile() { | ||
10 | oe_runmake PREFIX=${prefix} | ||
11 | } | ||
12 | |||
13 | gpe_do_install() { | ||
14 | oe_runmake PREFIX=${prefix} DESTDIR=${D} install | ||
15 | } | ||
16 | |||
17 | EXPORT_FUNCTIONS do_compile do_install | ||
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass new file mode 100644 index 0000000000..0f68e6812b --- /dev/null +++ b/meta/classes/gtk-icon-cache.bbclass | |||
@@ -0,0 +1,38 @@ | |||
1 | FILES_${PN} += "${datadir}/icons/hicolor" | ||
2 | |||
3 | gtk-icon-cache_postinst() { | ||
4 | if [ "x$D" != "x" ]; then | ||
5 | exit 1 | ||
6 | fi | ||
7 | gtk-update-icon-cache -q /usr/share/icons/hicolor | ||
8 | } | ||
9 | |||
10 | gtk-icon-cache_postrm() { | ||
11 | gtk-update-icon-cache -q /usr/share/icons/hicolor | ||
12 | } | ||
13 | |||
14 | python populate_packages_append () { | ||
15 | import os.path | ||
16 | packages = bb.data.getVar('PACKAGES', d, 1).split() | ||
17 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
18 | |||
19 | for pkg in packages: | ||
20 | icon_dir = '%s/install/%s/%s/icons/hicolor' % (workdir, pkg, bb.data.getVar('datadir', d, 1)) | ||
21 | if not os.path.exists(icon_dir): | ||
22 | continue | ||
23 | |||
24 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) | ||
25 | |||
26 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) | ||
27 | if not postinst: | ||
28 | postinst = '#!/bin/sh\n' | ||
29 | postinst += bb.data.getVar('gtk-icon-cache_postinst', d, 1) | ||
30 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | ||
31 | |||
32 | postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1) | ||
33 | if not postrm: | ||
34 | postrm = '#!/bin/sh\n' | ||
35 | postrm += bb.data.getVar('gtk-icon-cache_postrm', d, 1) | ||
36 | bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) | ||
37 | } | ||
38 | |||
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass new file mode 100644 index 0000000000..7dfcfc29a4 --- /dev/null +++ b/meta/classes/icecc.bbclass | |||
@@ -0,0 +1,156 @@ | |||
1 | # IceCream distributed compiling support | ||
2 | # | ||
3 | # We need to create a tar.bz2 of our toolchain and set | ||
4 | # ICECC_VERSION, ICECC_CXX and ICEC_CC | ||
5 | # | ||
6 | |||
7 | def create_env(bb,d): | ||
8 | """ | ||
9 | Create a tar.bz of the current toolchain | ||
10 | """ | ||
11 | |||
12 | # Constin native-native compilation no environment needed if | ||
13 | # host prefix is empty (let us duplicate the query for ease) | ||
14 | prefix = bb.data.expand('${HOST_PREFIX}', d) | ||
15 | if len(prefix) == 0: | ||
16 | return "" | ||
17 | |||
18 | import tarfile | ||
19 | import socket | ||
20 | import time | ||
21 | import os | ||
22 | ice_dir = bb.data.expand('${CROSS_DIR}', d) | ||
23 | prefix = bb.data.expand('${HOST_PREFIX}' , d) | ||
24 | distro = bb.data.expand('${DISTRO}', d) | ||
25 | target_sys = bb.data.expand('${TARGET_SYS}', d) | ||
26 | #float = bb.data.getVar('${TARGET_FPU}', d) | ||
27 | float = "anyfloat" | ||
28 | name = socket.gethostname() | ||
29 | |||
30 | try: | ||
31 | os.stat(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2') | ||
32 | os.stat(ice_dir + '/' + target_sys + '/bin/g++') | ||
33 | except: | ||
34 | return "" | ||
35 | |||
36 | VERSION = '3.4.3' | ||
37 | cross_name = prefix + distro + target_sys + float +VERSION+ name | ||
38 | tar_file = ice_dir + '/ice/' + cross_name + '.tar.bz2' | ||
39 | |||
40 | try: | ||
41 | os.stat(tar_file) | ||
42 | return tar_file | ||
43 | except: | ||
44 | try: | ||
45 | os.makedirs(ice_dir+'/ice') | ||
46 | except: | ||
47 | pass | ||
48 | |||
49 | # FIXME find out the version of the compiler | ||
50 | tar = tarfile.open(tar_file, 'w:bz2') | ||
51 | tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2', | ||
52 | target_sys + 'cross/lib/ld-linux.so.2') | ||
53 | tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2', | ||
54 | target_sys + 'cross/lib/ld-2.3.3.so') | ||
55 | tar.add(ice_dir + '/' + target_sys + '/lib/libc-2.3.3.so', | ||
56 | target_sys + 'cross/lib/libc-2.3.3.so') | ||
57 | tar.add(ice_dir + '/' + target_sys + '/lib/libc.so.6', | ||
58 | target_sys + 'cross/lib/libc.so.6') | ||
59 | tar.add(ice_dir + '/' + target_sys + '/bin/gcc', | ||
60 | target_sys + 'cross/usr/bin/gcc') | ||
61 | tar.add(ice_dir + '/' + target_sys + '/bin/g++', | ||
62 | target_sys + 'cross/usr/bin/g++') | ||
63 | tar.add(ice_dir + '/' + target_sys + '/bin/as', | ||
64 | target_sys + 'cross/usr/bin/as') | ||
65 | tar.add(ice_dir + '/lib/gcc/' + target_sys +'/'+ VERSION + '/specs', | ||
66 | target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/specs') | ||
67 | tar.add(ice_dir + '/libexec/gcc/'+target_sys+'/' + VERSION + '/cc1', | ||
68 | target_sys + 'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1') | ||
69 | tar.add(ice_dir + '/libexec/gcc/arm-linux/' + VERSION + '/cc1plus', | ||
70 | target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1plus') | ||
71 | tar.close() | ||
72 | return tar_file | ||
73 | |||
74 | |||
75 | def create_path(compilers, type, bb, d): | ||
76 | """ | ||
77 | Create Symlinks for the icecc in the staging directory | ||
78 | """ | ||
79 | import os | ||
80 | |||
81 | staging = bb.data.expand('${STAGING_DIR}', d) + "/ice/" + type | ||
82 | icecc = bb.data.getVar('ICECC_PATH', d) | ||
83 | |||
84 | # Create the dir if necessary | ||
85 | try: | ||
86 | os.stat(staging) | ||
87 | except: | ||
88 | os.makedirs(staging) | ||
89 | |||
90 | |||
91 | for compiler in compilers: | ||
92 | gcc_path = staging + "/" + compiler | ||
93 | try: | ||
94 | os.stat(gcc_path) | ||
95 | except: | ||
96 | os.symlink(icecc, gcc_path) | ||
97 | |||
98 | return staging + ":" | ||
99 | |||
100 | |||
101 | def use_icc_version(bb,d): | ||
102 | # Constin native native | ||
103 | prefix = bb.data.expand('${HOST_PREFIX}', d) | ||
104 | if len(prefix) == 0: | ||
105 | return "no" | ||
106 | |||
107 | |||
108 | native = bb.data.expand('${PN}', d) | ||
109 | blacklist = [ "-cross", "-native" ] | ||
110 | |||
111 | for black in blacklist: | ||
112 | if black in native: | ||
113 | return "no" | ||
114 | |||
115 | return "yes" | ||
116 | |||
117 | def icc_path(bb,d,compile): | ||
118 | native = bb.data.expand('${PN}', d) | ||
119 | blacklist = [ "ulibc", "glibc", "ncurses" ] | ||
120 | for black in blacklist: | ||
121 | if black in native: | ||
122 | return "" | ||
123 | |||
124 | if "-native" in native: | ||
125 | compile = False | ||
126 | if "-cross" in native: | ||
127 | compile = False | ||
128 | |||
129 | prefix = bb.data.expand('${HOST_PREFIX}', d) | ||
130 | if compile and len(prefix) != 0: | ||
131 | return create_path( [prefix+"gcc", prefix+"g++"], "cross", bb, d ) | ||
132 | elif not compile or len(prefix) == 0: | ||
133 | return create_path( ["gcc", "g++"], "native", bb, d) | ||
134 | |||
135 | |||
136 | def icc_version(bb,d): | ||
137 | return create_env(bb,d) | ||
138 | |||
139 | |||
140 | # | ||
141 | # set the IceCream environment variables | ||
142 | do_configure_prepend() { | ||
143 | export PATH=${@icc_path(bb,d,False)}$PATH | ||
144 | export ICECC_CC="gcc" | ||
145 | export ICECC_CXX="g++" | ||
146 | } | ||
147 | |||
148 | do_compile_prepend() { | ||
149 | export PATH=${@icc_path(bb,d,True)}$PATH | ||
150 | export ICECC_CC="${HOST_PREFIX}gcc" | ||
151 | export ICECC_CXX="${HOST_PREFIX}g++" | ||
152 | |||
153 | if [ "${@use_icc_version(bb,d)}" = "yes" ]; then | ||
154 | export ICECC_VERSION="${@icc_version(bb,d)}" | ||
155 | fi | ||
156 | } | ||
diff --git a/meta/classes/image_ipk.bbclass b/meta/classes/image_ipk.bbclass new file mode 100644 index 0000000000..c2f1c8d682 --- /dev/null +++ b/meta/classes/image_ipk.bbclass | |||
@@ -0,0 +1,76 @@ | |||
1 | inherit rootfs_ipk | ||
2 | |||
3 | # We need to follow RDEPENDS and RRECOMMENDS for images | ||
4 | BUILD_ALL_DEPS = "1" | ||
5 | |||
6 | # Images are generally built explicitly, do not need to be part of world. | ||
7 | EXCLUDE_FROM_WORLD = "1" | ||
8 | |||
9 | USE_DEVFS ?= "0" | ||
10 | |||
11 | DEPENDS += "makedevs-native" | ||
12 | PACKAGE_ARCH = "${MACHINE_ARCH}" | ||
13 | |||
14 | def get_image_deps(d): | ||
15 | import bb | ||
16 | str = "" | ||
17 | for type in (bb.data.getVar('IMAGE_FSTYPES', d, 1) or "").split(): | ||
18 | deps = bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or "" | ||
19 | if deps: | ||
20 | str += " %s" % deps | ||
21 | return str | ||
22 | |||
23 | DEPENDS += "${@get_image_deps(d)}" | ||
24 | |||
25 | IMAGE_DEVICE_TABLE ?= "${@bb.which(bb.data.getVar('BBPATH', d, 1), 'files/device_table-minimal.txt')}" | ||
26 | IMAGE_POSTPROCESS_COMMAND ?= "" | ||
27 | |||
28 | # Must call real_do_rootfs() from inside here, rather than as a separate | ||
29 | # task, so that we have a single fakeroot context for the whole process. | ||
30 | fakeroot do_rootfs () { | ||
31 | set -x | ||
32 | rm -rf ${IMAGE_ROOTFS} | ||
33 | |||
34 | if [ "${USE_DEVFS}" != "1" ]; then | ||
35 | mkdir -p ${IMAGE_ROOTFS}/dev | ||
36 | makedevs -r ${IMAGE_ROOTFS} -D ${IMAGE_DEVICE_TABLE} | ||
37 | fi | ||
38 | |||
39 | real_do_rootfs | ||
40 | |||
41 | insert_feed_uris | ||
42 | |||
43 | rm -f ${IMAGE_ROOTFS}${libdir}/ipkg/lists/oe | ||
44 | |||
45 | ${IMAGE_PREPROCESS_COMMAND} | ||
46 | |||
47 | export TOPDIR=${TOPDIR} | ||
48 | |||
49 | for type in ${IMAGE_FSTYPES}; do | ||
50 | if test -z "$FAKEROOTKEY"; then | ||
51 | fakeroot -i ${TMPDIR}/fakedb.image bbimage -t $type -e ${FILE} | ||
52 | else | ||
53 | bbimage -n "${IMAGE_NAME}" -t "$type" -e "${FILE}" | ||
54 | fi | ||
55 | done | ||
56 | |||
57 | ${IMAGE_POSTPROCESS_COMMAND} | ||
58 | } | ||
59 | |||
60 | insert_feed_uris () { | ||
61 | |||
62 | echo "Building feeds for [${DISTRO}].." | ||
63 | |||
64 | for line in ${FEED_URIS} | ||
65 | do | ||
66 | # strip leading and trailing spaces/tabs, then split into name and uri | ||
67 | line_clean="`echo "$line"|sed 's/^[ \t]*//;s/[ \t]*$//'`" | ||
68 | feed_name="`echo "$line_clean" | sed -n 's/\(.*\)##\(.*\)/\1/p'`" | ||
69 | feed_uri="`echo "$line_clean" | sed -n 's/\(.*\)##\(.*\)/\2/p'`" | ||
70 | |||
71 | echo "Added $feed_name feed with URL $feed_uri" | ||
72 | |||
73 | # insert new feed-sources | ||
74 | echo "src/gz $feed_name $feed_uri" >> ${IMAGE_ROOTFS}/etc/ipkg/${feed_name}-feed.conf | ||
75 | done | ||
76 | } | ||
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass new file mode 100644 index 0000000000..92a6c982fb --- /dev/null +++ b/meta/classes/kernel-arch.bbclass | |||
@@ -0,0 +1,26 @@ | |||
1 | # | ||
2 | # set the ARCH environment variable for kernel compilation (including | ||
3 | # modules). return value must match one of the architecture directories | ||
4 | # in the kernel source "arch" directory | ||
5 | # | ||
6 | |||
7 | valid_archs = "alpha cris ia64 m68knommu ppc sh \ | ||
8 | sparc64 x86_64 arm h8300 m32r mips \ | ||
9 | ppc64 sh64 um arm26 i386 m68k \ | ||
10 | parisc s390 sparc v850" | ||
11 | |||
12 | def map_kernel_arch(a, d): | ||
13 | import bb, re | ||
14 | |||
15 | valid_archs = bb.data.getVar('valid_archs', d, 1).split() | ||
16 | |||
17 | if re.match('(i.86|athlon)$', a): return 'i386' | ||
18 | elif re.match('arm26$', a): return 'arm26' | ||
19 | elif re.match('armeb$', a): return 'arm' | ||
20 | elif re.match('powerpc$', a): return 'ppc' | ||
21 | elif re.match('mipsel$', a): return 'mips' | ||
22 | elif a in valid_archs: return a | ||
23 | else: | ||
24 | bb.error("cannot map '%s' to a linux kernel architecture" % a) | ||
25 | |||
26 | export ARCH = "${@map_kernel_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}" | ||
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass new file mode 100644 index 0000000000..ad51c4e035 --- /dev/null +++ b/meta/classes/kernel.bbclass | |||
@@ -0,0 +1,435 @@ | |||
1 | inherit module_strip | ||
2 | |||
3 | PROVIDES += "virtual/kernel" | ||
4 | DEPENDS += "virtual/${TARGET_PREFIX}depmod-${@get_kernelmajorversion('${PV}')} virtual/${TARGET_PREFIX}gcc${KERNEL_CCSUFFIX} update-modules" | ||
5 | |||
6 | inherit kernel-arch | ||
7 | |||
8 | PACKAGES_DYNAMIC += "kernel-module-*" | ||
9 | PACKAGES_DYNAMIC += "kernel-image-*" | ||
10 | |||
11 | export OS = "${TARGET_OS}" | ||
12 | export CROSS_COMPILE = "${TARGET_PREFIX}" | ||
13 | KERNEL_IMAGETYPE = "zImage" | ||
14 | |||
15 | KERNEL_PRIORITY = "${@bb.data.getVar('PV',d,1).split('-')[0].split('.')[-1]}" | ||
16 | |||
17 | # [jbowler 20051109] ${PV}${KERNEL_LOCALVERSION} is used throughout this | ||
18 | # .bbclass to (apparently) find the full 'uname -r' kernel version, this | ||
19 | # should be the same as UTS_RELEASE or (in this file) KERNEL_VERSION: | ||
20 | # KERNELRELEASE=$(VERSION).$(PATCHLEVEL).$(SUBLEVEL)$(EXTRAVERSION)$(LOCALVERSION) | ||
21 | # but since this is not certain this overridable setting is used here: | ||
22 | KERNEL_RELEASE ?= "${PV}${KERNEL_LOCALVERSION}" | ||
23 | |||
24 | KERNEL_CCSUFFIX ?= "" | ||
25 | KERNEL_LDSUFFIX ?= "" | ||
26 | |||
27 | # Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture | ||
28 | # specific options necessary for building the kernel and modules. | ||
29 | #FIXME: should be this: TARGET_CC_KERNEL_ARCH ?= "${TARGET_CC_ARCH}" | ||
30 | TARGET_CC_KERNEL_ARCH ?= "" | ||
31 | HOST_CC_KERNEL_ARCH ?= "${TARGET_CC_KERNEL_ARCH}" | ||
32 | TARGET_LD_KERNEL_ARCH ?= "" | ||
33 | HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}" | ||
34 | |||
35 | KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc${KERNEL_CCSUFFIX} ${HOST_CC_KERNEL_ARCH}" | ||
36 | KERNEL_LD = "${LD}${KERNEL_LDSUFFIX} ${HOST_LD_KERNEL_ARCH}" | ||
37 | |||
38 | KERNEL_OUTPUT = "arch/${ARCH}/boot/${KERNEL_IMAGETYPE}" | ||
39 | KERNEL_IMAGEDEST = "boot" | ||
40 | |||
41 | # | ||
42 | # configuration | ||
43 | # | ||
44 | export CMDLINE_CONSOLE = "console=${@bb.data.getVar("KERNEL_CONSOLE",d,1) or "ttyS0"}" | ||
45 | |||
46 | # parse kernel ABI version out of <linux/version.h> | ||
47 | def get_kernelversion(p): | ||
48 | import re | ||
49 | try: | ||
50 | f = open(p, 'r') | ||
51 | except IOError: | ||
52 | return None | ||
53 | l = f.readlines() | ||
54 | f.close() | ||
55 | r = re.compile("#define UTS_RELEASE \"(.*)\"") | ||
56 | for s in l: | ||
57 | m = r.match(s) | ||
58 | if m: | ||
59 | return m.group(1) | ||
60 | return None | ||
61 | |||
62 | def get_kernelmajorversion(p): | ||
63 | import re | ||
64 | r = re.compile("([0-9]+\.[0-9]+).*") | ||
65 | m = r.match(p); | ||
66 | if m: | ||
67 | return m.group(1) | ||
68 | return None | ||
69 | |||
70 | KERNEL_VERSION = "${@get_kernelversion('${S}/include/linux/version.h')}" | ||
71 | KERNEL_MAJOR_VERSION = "${@get_kernelmajorversion('${KERNEL_VERSION}')}" | ||
72 | |||
73 | KERNEL_LOCALVERSION ?= "" | ||
74 | |||
75 | # kernels are generally machine specific | ||
76 | PACKAGE_ARCH = "${MACHINE_ARCH}" | ||
77 | |||
78 | kernel_do_compile() { | ||
79 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS | ||
80 | oe_runmake include/linux/version.h CC="${KERNEL_CC}" LD="${KERNEL_LD}" | ||
81 | if [ "${KERNEL_MAJOR_VERSION}" != "2.6" ]; then | ||
82 | oe_runmake dep CC="${KERNEL_CC}" LD="${KERNEL_LD}" | ||
83 | fi | ||
84 | oe_runmake ${KERNEL_IMAGETYPE} CC="${KERNEL_CC}" LD="${KERNEL_LD}" | ||
85 | if (grep -q -i -e '^CONFIG_MODULES=y$' .config); then | ||
86 | oe_runmake modules CC="${KERNEL_CC}" LD="${KERNEL_LD}" | ||
87 | else | ||
88 | oenote "no modules to compile" | ||
89 | fi | ||
90 | } | ||
91 | |||
92 | kernel_do_stage() { | ||
93 | ASMDIR=`readlink include/asm` | ||
94 | |||
95 | mkdir -p ${STAGING_KERNEL_DIR}/include/$ASMDIR | ||
96 | cp -fR include/$ASMDIR/* ${STAGING_KERNEL_DIR}/include/$ASMDIR/ | ||
97 | rm -f $ASMDIR ${STAGING_KERNEL_DIR}/include/asm | ||
98 | ln -sf $ASMDIR ${STAGING_KERNEL_DIR}/include/asm | ||
99 | |||
100 | mkdir -p ${STAGING_KERNEL_DIR}/include/asm-generic | ||
101 | cp -fR include/asm-generic/* ${STAGING_KERNEL_DIR}/include/asm-generic/ | ||
102 | |||
103 | mkdir -p ${STAGING_KERNEL_DIR}/include/linux | ||
104 | cp -fR include/linux/* ${STAGING_KERNEL_DIR}/include/linux/ | ||
105 | |||
106 | mkdir -p ${STAGING_KERNEL_DIR}/include/net | ||
107 | cp -fR include/net/* ${STAGING_KERNEL_DIR}/include/net/ | ||
108 | |||
109 | mkdir -p ${STAGING_KERNEL_DIR}/include/pcmcia | ||
110 | cp -fR include/pcmcia/* ${STAGING_KERNEL_DIR}/include/pcmcia/ | ||
111 | |||
112 | if [ -d include/sound ]; then | ||
113 | mkdir -p ${STAGING_KERNEL_DIR}/include/sound | ||
114 | cp -fR include/sound/* ${STAGING_KERNEL_DIR}/include/sound/ | ||
115 | fi | ||
116 | |||
117 | if [ -d drivers/sound ]; then | ||
118 | # 2.4 alsa needs some headers from this directory | ||
119 | mkdir -p ${STAGING_KERNEL_DIR}/include/drivers/sound | ||
120 | cp -fR drivers/sound/*.h ${STAGING_KERNEL_DIR}/include/drivers/sound/ | ||
121 | fi | ||
122 | |||
123 | install -m 0644 .config ${STAGING_KERNEL_DIR}/config-${KERNEL_RELEASE} | ||
124 | ln -sf config-${KERNEL_RELEASE} ${STAGING_KERNEL_DIR}/.config | ||
125 | ln -sf config-${KERNEL_RELEASE} ${STAGING_KERNEL_DIR}/kernel-config | ||
126 | echo "${KERNEL_VERSION}" >${STAGING_KERNEL_DIR}/kernel-abiversion | ||
127 | echo "${S}" >${STAGING_KERNEL_DIR}/kernel-source | ||
128 | echo "${KERNEL_CCSUFFIX}" >${STAGING_KERNEL_DIR}/kernel-ccsuffix | ||
129 | echo "${KERNEL_LDSUFFIX}" >${STAGING_KERNEL_DIR}/kernel-ldsuffix | ||
130 | [ -e Rules.make ] && install -m 0644 Rules.make ${STAGING_KERNEL_DIR}/ | ||
131 | [ -e Makefile ] && install -m 0644 Makefile ${STAGING_KERNEL_DIR}/ | ||
132 | |||
133 | # Check if arch/${ARCH}/Makefile exists and install it | ||
134 | if [ -e arch/${ARCH}/Makefile ]; then | ||
135 | install -d ${STAGING_KERNEL_DIR}/arch/${ARCH} | ||
136 | install -m 0644 arch/${ARCH}/Makefile ${STAGING_KERNEL_DIR}/arch/${ARCH} | ||
137 | fi | ||
138 | cp -fR include/config* ${STAGING_KERNEL_DIR}/include/ | ||
139 | install -m 0644 ${KERNEL_OUTPUT} ${STAGING_KERNEL_DIR}/${KERNEL_IMAGETYPE} | ||
140 | install -m 0644 System.map ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} | ||
141 | [ -e Module.symvers ] && install -m 0644 Module.symvers ${STAGING_KERNEL_DIR}/ | ||
142 | |||
143 | cp -fR scripts ${STAGING_KERNEL_DIR}/ | ||
144 | } | ||
145 | |||
146 | kernel_do_install() { | ||
147 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS | ||
148 | if (grep -q -i -e '^CONFIG_MODULES=y$' .config); then | ||
149 | oe_runmake DEPMOD=echo INSTALL_MOD_PATH="${D}" modules_install | ||
150 | else | ||
151 | oenote "no modules to install" | ||
152 | fi | ||
153 | |||
154 | install -d ${D}/${KERNEL_IMAGEDEST} | ||
155 | install -d ${D}/boot | ||
156 | install -m 0644 ${KERNEL_OUTPUT} ${D}/${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${KERNEL_RELEASE} | ||
157 | install -m 0644 System.map ${D}/boot/System.map-${KERNEL_RELEASE} | ||
158 | install -m 0644 .config ${D}/boot/config-${KERNEL_RELEASE} | ||
159 | install -d ${D}/etc/modutils | ||
160 | |||
161 | # Check if scripts/genksyms exists and if so, build it | ||
162 | if [ -e scripts/genksyms/ ]; then | ||
163 | oe_runmake SUBDIRS="scripts/genksyms" | ||
164 | fi | ||
165 | |||
166 | install -d ${STAGING_KERNEL_DIR} | ||
167 | cp -fR scripts ${STAGING_KERNEL_DIR}/ | ||
168 | } | ||
169 | |||
170 | kernel_do_configure() { | ||
171 | yes '' | oe_runmake oldconfig | ||
172 | } | ||
173 | |||
174 | pkg_postinst_kernel () { | ||
175 | update-alternatives --install /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE} ${KERNEL_IMAGETYPE} /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${KERNEL_RELEASE} ${KERNEL_PRIORITY} || true | ||
176 | } | ||
177 | |||
178 | pkg_postrm_kernel () { | ||
179 | update-alternatives --remove ${KERNEL_IMAGETYPE} /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${KERNEL_RELEASE} || true | ||
180 | } | ||
181 | |||
182 | inherit cml1 | ||
183 | |||
184 | EXPORT_FUNCTIONS do_compile do_install do_stage do_configure | ||
185 | |||
186 | PACKAGES = "kernel kernel-image kernel-dev" | ||
187 | FILES = "" | ||
188 | FILES_kernel-image = "/boot/${KERNEL_IMAGETYPE}*" | ||
189 | FILES_kernel-dev = "/boot/System.map* /boot/config*" | ||
190 | RDEPENDS_kernel = "kernel-image-${KERNEL_VERSION}" | ||
191 | PKG_kernel-image = "kernel-image-${KERNEL_VERSION}" | ||
192 | ALLOW_EMPTY_kernel = "1" | ||
193 | ALLOW_EMPTY_kernel-image = "1" | ||
194 | |||
195 | pkg_postinst_kernel-image () { | ||
196 | if [ ! -e "$D/lib/modules/${KERNEL_RELEASE}" ]; then | ||
197 | mkdir -p $D/lib/modules/${KERNEL_RELEASE} | ||
198 | fi | ||
199 | if [ -n "$D" ]; then | ||
200 | ${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION} | ||
201 | else | ||
202 | depmod -A | ||
203 | fi | ||
204 | } | ||
205 | |||
206 | pkg_postinst_modules () { | ||
207 | if [ -n "$D" ]; then | ||
208 | ${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION} | ||
209 | else | ||
210 | depmod -A | ||
211 | update-modules || true | ||
212 | fi | ||
213 | } | ||
214 | |||
215 | pkg_postrm_modules () { | ||
216 | update-modules || true | ||
217 | } | ||
218 | |||
219 | autoload_postinst_fragment() { | ||
220 | if [ x"$D" = "x" ]; then | ||
221 | modprobe %s || true | ||
222 | fi | ||
223 | } | ||
224 | |||
225 | # autoload defaults (alphabetically sorted) | ||
226 | module_autoload_hidp = "hidp" | ||
227 | module_autoload_ipv6 = "ipv6" | ||
228 | module_autoload_ipsec = "ipsec" | ||
229 | module_autoload_ircomm-tty = "ircomm-tty" | ||
230 | module_autoload_rfcomm = "rfcomm" | ||
231 | module_autoload_sa1100-rtc = "sa1100-rtc" | ||
232 | |||
233 | # alias defaults (alphabetically sorted) | ||
234 | module_conf_af_packet = "alias net-pf-17 af_packet" | ||
235 | module_conf_bluez = "alias net-pf-31 bluez" | ||
236 | module_conf_bnep = "alias bt-proto-4 bnep" | ||
237 | module_conf_hci_uart = "alias tty-ldisc-15 hci_uart" | ||
238 | module_conf_l2cap = "alias bt-proto-0 l2cap" | ||
239 | module_conf_sco = "alias bt-proto-2 sco" | ||
240 | module_conf_rfcomm = "alias bt-proto-3 rfcomm" | ||
241 | |||
242 | python populate_packages_prepend () { | ||
243 | def extract_modinfo(file): | ||
244 | import os, re | ||
245 | tmpfile = os.tmpnam() | ||
246 | cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile) | ||
247 | os.system(cmd) | ||
248 | f = open(tmpfile) | ||
249 | l = f.read().split("\000") | ||
250 | f.close() | ||
251 | os.unlink(tmpfile) | ||
252 | exp = re.compile("([^=]+)=(.*)") | ||
253 | vals = {} | ||
254 | for i in l: | ||
255 | m = exp.match(i) | ||
256 | if not m: | ||
257 | continue | ||
258 | vals[m.group(1)] = m.group(2) | ||
259 | return vals | ||
260 | |||
261 | def parse_depmod(): | ||
262 | import os, re | ||
263 | |||
264 | dvar = bb.data.getVar('D', d, 1) | ||
265 | if not dvar: | ||
266 | bb.error("D not defined") | ||
267 | return | ||
268 | |||
269 | kernelver = bb.data.getVar('KERNEL_RELEASE', d, 1) | ||
270 | kernelver_stripped = kernelver | ||
271 | m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) | ||
272 | if m: | ||
273 | kernelver_stripped = m.group(1) | ||
274 | path = bb.data.getVar("PATH", d, 1) | ||
275 | host_prefix = bb.data.getVar("HOST_PREFIX", d, 1) or "" | ||
276 | major_version = bb.data.getVar('KERNEL_MAJOR_VERSION', d, 1) | ||
277 | |||
278 | cmd = "PATH=\"%s\" %sdepmod-%s -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, major_version, dvar, dvar, kernelver, kernelver_stripped) | ||
279 | f = os.popen(cmd, 'r') | ||
280 | |||
281 | deps = {} | ||
282 | pattern0 = "^(.*\.k?o):..*$" | ||
283 | pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$" | ||
284 | pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$" | ||
285 | pattern3 = "^\t(.*\.k?o)\s*\\\$" | ||
286 | pattern4 = "^\t(.*\.k?o)\s*$" | ||
287 | |||
288 | line = f.readline() | ||
289 | while line: | ||
290 | if not re.match(pattern0, line): | ||
291 | line = f.readline() | ||
292 | continue | ||
293 | m1 = re.match(pattern1, line) | ||
294 | if m1: | ||
295 | deps[m1.group(1)] = m1.group(2).split() | ||
296 | else: | ||
297 | m2 = re.match(pattern2, line) | ||
298 | if m2: | ||
299 | deps[m2.group(1)] = m2.group(2).split() | ||
300 | line = f.readline() | ||
301 | m3 = re.match(pattern3, line) | ||
302 | while m3: | ||
303 | deps[m2.group(1)].extend(m3.group(1).split()) | ||
304 | line = f.readline() | ||
305 | m3 = re.match(pattern3, line) | ||
306 | m4 = re.match(pattern4, line) | ||
307 | deps[m2.group(1)].extend(m4.group(1).split()) | ||
308 | line = f.readline() | ||
309 | f.close() | ||
310 | return deps | ||
311 | |||
312 | def get_dependencies(file, pattern, format): | ||
313 | file = file.replace(bb.data.getVar('D', d, 1) or '', '', 1) | ||
314 | |||
315 | if module_deps.has_key(file): | ||
316 | import os.path, re | ||
317 | dependencies = [] | ||
318 | for i in module_deps[file]: | ||
319 | m = re.match(pattern, os.path.basename(i)) | ||
320 | if not m: | ||
321 | continue | ||
322 | on = legitimize_package_name(m.group(1)) | ||
323 | dependency_pkg = format % on | ||
324 | v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0" | ||
325 | if v == "1": | ||
326 | kv = bb.data.getVar("KERNEL_MAJOR_VERSION", d, 1) | ||
327 | dependency_pkg = "%s-%s" % (dependency_pkg, kv) | ||
328 | dependencies.append(dependency_pkg) | ||
329 | return dependencies | ||
330 | return [] | ||
331 | |||
332 | def frob_metadata(file, pkg, pattern, format, basename): | ||
333 | import re | ||
334 | vals = extract_modinfo(file) | ||
335 | |||
336 | dvar = bb.data.getVar('D', d, 1) | ||
337 | |||
338 | # If autoloading is requested, output /etc/modutils/<name> and append | ||
339 | # appropriate modprobe commands to the postinst | ||
340 | autoload = bb.data.getVar('module_autoload_%s' % basename, d, 1) | ||
341 | if autoload: | ||
342 | name = '%s/etc/modutils/%s' % (dvar, basename) | ||
343 | f = open(name, 'w') | ||
344 | for m in autoload.split(): | ||
345 | f.write('%s\n' % m) | ||
346 | f.close() | ||
347 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) | ||
348 | if not postinst: | ||
349 | bb.fatal("pkg_postinst_%s not defined" % pkg) | ||
350 | postinst += bb.data.getVar('autoload_postinst_fragment', d, 1) % autoload | ||
351 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | ||
352 | |||
353 | # Write out any modconf fragment | ||
354 | modconf = bb.data.getVar('module_conf_%s' % basename, d, 1) | ||
355 | if modconf: | ||
356 | name = '%s/etc/modutils/%s.conf' % (dvar, basename) | ||
357 | f = open(name, 'w') | ||
358 | f.write("%s\n" % modconf) | ||
359 | f.close() | ||
360 | |||
361 | files = bb.data.getVar('FILES_%s' % pkg, d, 1) | ||
362 | files = "%s /etc/modutils/%s /etc/modutils/%s.conf" % (files, basename, basename) | ||
363 | bb.data.setVar('FILES_%s' % pkg, files, d) | ||
364 | |||
365 | if vals.has_key("description"): | ||
366 | old_desc = bb.data.getVar('DESCRIPTION_' + pkg, d, 1) or "" | ||
367 | bb.data.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"], d) | ||
368 | |||
369 | rdepends_str = bb.data.getVar('RDEPENDS_' + pkg, d, 1) | ||
370 | if rdepends_str: | ||
371 | rdepends = rdepends_str.split() | ||
372 | else: | ||
373 | rdepends = [] | ||
374 | rdepends.extend(get_dependencies(file, pattern, format)) | ||
375 | bb.data.setVar('RDEPENDS_' + pkg, ' '.join(rdepends), d) | ||
376 | |||
377 | module_deps = parse_depmod() | ||
378 | module_regex = '^(.*)\.k?o$' | ||
379 | module_pattern = 'kernel-module-%s' | ||
380 | |||
381 | postinst = bb.data.getVar('pkg_postinst_modules', d, 1) | ||
382 | postrm = bb.data.getVar('pkg_postrm_modules', d, 1) | ||
383 | do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-image-%s' % bb.data.getVar("KERNEL_VERSION", d, 1)) | ||
384 | |||
385 | import re, os | ||
386 | metapkg = "kernel-modules" | ||
387 | bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d) | ||
388 | bb.data.setVar('FILES_' + metapkg, "", d) | ||
389 | blacklist = [ 'kernel-dev', 'kernel-image' ] | ||
390 | for l in module_deps.values(): | ||
391 | for i in l: | ||
392 | pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) | ||
393 | blacklist.append(pkg) | ||
394 | metapkg_rdepends = [] | ||
395 | packages = bb.data.getVar('PACKAGES', d, 1).split() | ||
396 | for pkg in packages[1:]: | ||
397 | if not pkg in blacklist and not pkg in metapkg_rdepends: | ||
398 | metapkg_rdepends.append(pkg) | ||
399 | bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) | ||
400 | bb.data.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package', d) | ||
401 | packages.append(metapkg) | ||
402 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | ||
403 | |||
404 | v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0" | ||
405 | if v == "1": | ||
406 | kv = bb.data.getVar("KERNEL_MAJOR_VERSION", d, 1) | ||
407 | packages = bb.data.getVar("PACKAGES", d, 1).split() | ||
408 | module_re = re.compile("^kernel-module-") | ||
409 | |||
410 | newmetapkg = "kernel-modules-%s" % kv | ||
411 | bb.data.setVar('ALLOW_EMPTY_' + newmetapkg, "1", d) | ||
412 | bb.data.setVar('FILES_' + newmetapkg, "", d) | ||
413 | |||
414 | newmetapkg_rdepends = [] | ||
415 | |||
416 | for p in packages: | ||
417 | if not module_re.match(p): | ||
418 | continue | ||
419 | pkg = bb.data.getVar("PKG_%s" % p, d, 1) or p | ||
420 | newpkg = "%s-%s" % (pkg, kv) | ||
421 | bb.data.setVar("PKG_%s" % p, newpkg, d) | ||
422 | rprovides = bb.data.getVar("RPROVIDES_%s" % p, d, 1) | ||
423 | if rprovides: | ||
424 | rprovides = "%s %s" % (rprovides, pkg) | ||
425 | else: | ||
426 | rprovides = pkg | ||
427 | bb.data.setVar("RPROVIDES_%s" % p, rprovides, d) | ||
428 | newmetapkg_rdepends.append(newpkg) | ||
429 | |||
430 | bb.data.setVar('RDEPENDS_' + newmetapkg, ' '.join(newmetapkg_rdepends), d) | ||
431 | bb.data.setVar('DESCRIPTION_' + newmetapkg, 'Kernel modules meta package', d) | ||
432 | packages.append(newmetapkg) | ||
433 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | ||
434 | |||
435 | } | ||
diff --git a/meta/classes/lib_package.bbclass b/meta/classes/lib_package.bbclass new file mode 100644 index 0000000000..e29d2659b0 --- /dev/null +++ b/meta/classes/lib_package.bbclass | |||
@@ -0,0 +1,9 @@ | |||
1 | PACKAGES = "${PN} ${PN}-dev ${PN}-doc ${PN}-bin" | ||
2 | |||
3 | FILES_${PN} = "${libexecdir} ${libdir}/lib*.so.* \ | ||
4 | ${sysconfdir} ${sharedstatedir} ${localstatedir} \ | ||
5 | /lib/*.so* ${datadir}/${PN} ${libdir}/${PN}" | ||
6 | FILES_${PN}-dev = "${includedir} ${libdir}/lib*.so ${libdir}/*.la \ | ||
7 | ${libdir}/*.a ${libdir}/pkgconfig /lib/*.a /lib/*.o \ | ||
8 | ${datadir}/aclocal ${bindir}/*-config" | ||
9 | FILES_${PN}-bin = "${bindir} ${sbindir} /bin /sbin" | ||
diff --git a/meta/classes/linux_modules.bbclass b/meta/classes/linux_modules.bbclass new file mode 100644 index 0000000000..d5c4e74ca1 --- /dev/null +++ b/meta/classes/linux_modules.bbclass | |||
@@ -0,0 +1,19 @@ | |||
1 | def get_kernelmajorversion(p): | ||
2 | import re | ||
3 | r = re.compile("([0-9]+\.[0-9]+).*") | ||
4 | m = r.match(p); | ||
5 | if m: | ||
6 | return m.group(1) | ||
7 | return None | ||
8 | |||
9 | def linux_module_packages(s, d): | ||
10 | import bb, os.path | ||
11 | suffix = "" | ||
12 | if (bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) == "1"): | ||
13 | file = bb.data.expand('${STAGING_KERNEL_DIR}/kernel-abiversion', d) | ||
14 | if (os.path.exists(file)): | ||
15 | suffix = "-%s" % (get_kernelmajorversion(base_read_file(file))) | ||
16 | return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split())) | ||
17 | |||
18 | # that's all | ||
19 | |||
diff --git a/meta/classes/manifest.bbclass b/meta/classes/manifest.bbclass new file mode 100644 index 0000000000..687f4b756e --- /dev/null +++ b/meta/classes/manifest.bbclass | |||
@@ -0,0 +1,80 @@ | |||
1 | |||
2 | python read_manifest () { | ||
3 | import sys, bb.manifest | ||
4 | mfn = bb.data.getVar("MANIFEST", d, 1) | ||
5 | if os.access(mfn, os.R_OK): | ||
6 | # we have a manifest, so emit do_stage and do_populate_pkgs, | ||
7 | # and stuff some additional bits of data into the metadata store | ||
8 | mfile = file(mfn, "r") | ||
9 | manifest = bb.manifest.parse(mfile, d) | ||
10 | if not manifest: | ||
11 | return | ||
12 | |||
13 | bb.data.setVar('manifest', manifest, d) | ||
14 | } | ||
15 | |||
16 | python parse_manifest () { | ||
17 | manifest = bb.data.getVar("manifest", d) | ||
18 | if not manifest: | ||
19 | return | ||
20 | for func in ("do_populate_staging", "do_populate_pkgs"): | ||
21 | value = bb.manifest.emit(func, manifest, d) | ||
22 | if value: | ||
23 | bb.data.setVar("manifest_" + func, value, d) | ||
24 | bb.data.delVarFlag("manifest_" + func, "python", d) | ||
25 | bb.data.delVarFlag("manifest_" + func, "fakeroot", d) | ||
26 | bb.data.setVarFlag("manifest_" + func, "func", 1, d) | ||
27 | packages = [] | ||
28 | for l in manifest: | ||
29 | if "pkg" in l and l["pkg"] is not None: | ||
30 | packages.append(l["pkg"]) | ||
31 | bb.data.setVar("PACKAGES", " ".join(packages), d) | ||
32 | } | ||
33 | |||
34 | python __anonymous () { | ||
35 | try: | ||
36 | bb.build.exec_func('read_manifest', d) | ||
37 | bb.build.exec_func('parse_manifest', d) | ||
38 | except exceptions.KeyboardInterrupt: | ||
39 | raise | ||
40 | except Exception, e: | ||
41 | bb.error("anonymous function: %s" % e) | ||
42 | pass | ||
43 | } | ||
44 | |||
45 | #python do_populate_staging () { | ||
46 | # if not bb.data.getVar('manifest', d): | ||
47 | # bb.build.exec_func('do_emit_manifest', d) | ||
48 | # if bb.data.getVar('do_stage', d): | ||
49 | # bb.build.exec_func('do_stage', d) | ||
50 | # else: | ||
51 | # bb.build.exec_func('manifest_do_populate_staging', d) | ||
52 | #} | ||
53 | |||
54 | #addtask populate_pkgs after do_compile | ||
55 | #python do_populate_pkgs () { | ||
56 | # if not bb.data.getVar('manifest', d): | ||
57 | # bb.build.exec_func('do_emit_manifest', d) | ||
58 | # bb.build.exec_func('manifest_do_populate_pkgs', d) | ||
59 | # bb.build.exec_func('package_do_shlibs', d) | ||
60 | #} | ||
61 | |||
62 | addtask emit_manifest | ||
63 | python do_emit_manifest () { | ||
64 | # FIXME: emit a manifest here | ||
65 | # 1) adjust PATH to hit the wrapper scripts | ||
66 | wrappers = bb.which(bb.data.getVar("BBPATH", d, 1), 'build/install', 0) | ||
67 | path = (bb.data.getVar('PATH', d, 1) or '').split(':') | ||
68 | path.insert(0, os.path.dirname(wrappers)) | ||
69 | bb.data.setVar('PATH', ':'.join(path), d) | ||
70 | # 2) exec_func("do_install", d) | ||
71 | bb.build.exec_func('do_install', d) | ||
72 | # 3) read in data collected by the wrappers | ||
73 | print("Got here2 213") | ||
74 | bb.build.exec_func('read_manifest', d) | ||
75 | # 4) mangle the manifest we just generated, get paths back into | ||
76 | # our variable form | ||
77 | # 5) write it back out | ||
78 | # 6) re-parse it to ensure the generated functions are proper | ||
79 | bb.build.exec_func('parse_manifest', d) | ||
80 | } | ||
diff --git a/meta/classes/module-base.bbclass b/meta/classes/module-base.bbclass new file mode 100644 index 0000000000..da5bd01dae --- /dev/null +++ b/meta/classes/module-base.bbclass | |||
@@ -0,0 +1,25 @@ | |||
1 | inherit module_strip | ||
2 | |||
3 | inherit kernel-arch | ||
4 | |||
5 | export OS = "${TARGET_OS}" | ||
6 | export CROSS_COMPILE = "${TARGET_PREFIX}" | ||
7 | |||
8 | export KERNEL_VERSION = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-abiversion')}" | ||
9 | export KERNEL_SOURCE = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-source')}" | ||
10 | KERNEL_OBJECT_SUFFIX = "${@[".o", ".ko"][base_read_file('${STAGING_KERNEL_DIR}/kernel-abiversion') > "2.6.0"]}" | ||
11 | KERNEL_CCSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-ccsuffix')}" | ||
12 | KERNEL_LDSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-ldsuffix')}" | ||
13 | |||
14 | # Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture | ||
15 | # specific options necessary for building the kernel and modules. | ||
16 | TARGET_CC_KERNEL_ARCH ?= "" | ||
17 | HOST_CC_KERNEL_ARCH ?= "${TARGET_CC_KERNEL_ARCH}" | ||
18 | TARGET_LD_KERNEL_ARCH ?= "" | ||
19 | HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}" | ||
20 | |||
21 | KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc${KERNEL_CCSUFFIX} ${HOST_CC_KERNEL_ARCH}" | ||
22 | KERNEL_LD = "${LD}${KERNEL_LDSUFFIX} ${HOST_LD_KERNEL_ARCH}" | ||
23 | |||
24 | # kernel modules are generally machine specific | ||
25 | PACKAGE_ARCH = "${MACHINE_ARCH}" | ||
diff --git a/meta/classes/module.bbclass b/meta/classes/module.bbclass new file mode 100644 index 0000000000..8a13f1f858 --- /dev/null +++ b/meta/classes/module.bbclass | |||
@@ -0,0 +1,51 @@ | |||
1 | RDEPENDS += "kernel (${KERNEL_VERSION})" | ||
2 | DEPENDS += "virtual/kernel" | ||
3 | |||
4 | inherit module-base | ||
5 | |||
6 | python populate_packages_prepend() { | ||
7 | v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0" | ||
8 | if v == "1": | ||
9 | kv = bb.data.getVar("KERNEL_VERSION", d, 1) | ||
10 | packages = bb.data.getVar("PACKAGES", d, 1) | ||
11 | for p in packages.split(): | ||
12 | pkg = bb.data.getVar("PKG_%s" % p, d, 1) or p | ||
13 | newpkg = "%s-%s" % (pkg, kv) | ||
14 | bb.data.setVar("PKG_%s" % p, newpkg, d) | ||
15 | rprovides = bb.data.getVar("RPROVIDES_%s" % p, d, 1) | ||
16 | if rprovides: | ||
17 | rprovides = "%s %s" % (rprovides, pkg) | ||
18 | else: | ||
19 | rprovides = pkg | ||
20 | bb.data.setVar("RPROVIDES_%s" % p, rprovides, d) | ||
21 | } | ||
22 | |||
23 | module_do_compile() { | ||
24 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS | ||
25 | oe_runmake KERNEL_PATH=${STAGING_KERNEL_DIR} \ | ||
26 | KERNEL_SRC=${STAGING_KERNEL_DIR} \ | ||
27 | KERNEL_VERSION=${KERNEL_VERSION} \ | ||
28 | CC="${KERNEL_CC}" LD="${KERNEL_LD}" \ | ||
29 | ${MAKE_TARGETS} | ||
30 | } | ||
31 | |||
32 | module_do_install() { | ||
33 | unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS | ||
34 | oe_runmake DEPMOD=echo INSTALL_MOD_PATH="${D}" CC="${KERNEL_CC}" LD="${KERNEL_LD}" modules_install | ||
35 | } | ||
36 | |||
37 | pkg_postinst_append () { | ||
38 | if [ -n "$D" ]; then | ||
39 | exit 1 | ||
40 | fi | ||
41 | depmod -A | ||
42 | update-modules || true | ||
43 | } | ||
44 | |||
45 | pkg_postrm_append () { | ||
46 | update-modules || true | ||
47 | } | ||
48 | |||
49 | EXPORT_FUNCTIONS do_compile do_install | ||
50 | |||
51 | FILES_${PN} = "/etc /lib/modules" | ||
diff --git a/meta/classes/module_strip.bbclass b/meta/classes/module_strip.bbclass new file mode 100644 index 0000000000..116e8b902f --- /dev/null +++ b/meta/classes/module_strip.bbclass | |||
@@ -0,0 +1,18 @@ | |||
1 | #DEPENDS_append = " module-strip" | ||
2 | |||
3 | do_strip_modules () { | ||
4 | for p in ${PACKAGES}; do | ||
5 | if test -e ${WORKDIR}/install/$p/lib/modules; then | ||
6 | modules="`find ${WORKDIR}/install/$p/lib/modules -name \*${KERNEL_OBJECT_SUFFIX}`" | ||
7 | if [ -n "$modules" ]; then | ||
8 | ${STRIP} -v -g $modules | ||
9 | # NM="${CROSS_DIR}/bin/${HOST_PREFIX}nm" OBJCOPY="${CROSS_DIR}/bin/${HOST_PREFIX}objcopy" strip_module $modules | ||
10 | fi | ||
11 | fi | ||
12 | done | ||
13 | } | ||
14 | |||
15 | python do_package_append () { | ||
16 | if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, 1) != '1'): | ||
17 | bb.build.exec_func('do_strip_modules', d) | ||
18 | } | ||
diff --git a/meta/classes/mozilla.bbclass b/meta/classes/mozilla.bbclass new file mode 100644 index 0000000000..629f2531da --- /dev/null +++ b/meta/classes/mozilla.bbclass | |||
@@ -0,0 +1,53 @@ | |||
1 | SECTION = "x11/utils" | ||
2 | DEPENDS += "gnu-config-native virtual/libintl xt libxi \ | ||
3 | zip-native gtk+ orbit2 libidl-native" | ||
4 | LICENSE = "MPL NPL" | ||
5 | SRC_URI += "file://mozconfig" | ||
6 | |||
7 | inherit gettext | ||
8 | |||
9 | EXTRA_OECONF = "--target=${TARGET_SYS} --host=${BUILD_SYS} \ | ||
10 | --build=${BUILD_SYS} --prefix=${prefix}" | ||
11 | EXTRA_OEMAKE = "'HOST_LIBIDL_LIBS=${HOST_LIBIDL_LIBS}' \ | ||
12 | 'HOST_LIBIDL_CFLAGS=${HOST_LIBIDL_CFLAGS}'" | ||
13 | SELECTED_OPTIMIZATION = "-Os -fsigned-char -fno-strict-aliasing" | ||
14 | |||
15 | export CROSS_COMPILE = "1" | ||
16 | export MOZCONFIG = "${WORKDIR}/mozconfig" | ||
17 | export MOZ_OBJDIR = "${S}" | ||
18 | |||
19 | export CONFIGURE_ARGS = "${EXTRA_OECONF}" | ||
20 | export HOST_LIBIDL_CFLAGS = "`${HOST_LIBIDL_CONFIG} --cflags`" | ||
21 | export HOST_LIBIDL_LIBS = "`${HOST_LIBIDL_CONFIG} --libs`" | ||
22 | export HOST_LIBIDL_CONFIG = "PKG_CONFIG_PATH=${STAGING_BINDIR}/../share/pkgconfig pkg-config libIDL-2.0" | ||
23 | export HOST_CC = "${BUILD_CC}" | ||
24 | export HOST_CXX = "${BUILD_CXX}" | ||
25 | export HOST_CFLAGS = "${BUILD_CFLAGS}" | ||
26 | export HOST_CXXFLAGS = "${BUILD_CXXFLAGS}" | ||
27 | export HOST_LDFLAGS = "${BUILD_LDFLAGS}" | ||
28 | export HOST_RANLIB = "${BUILD_RANLIB}" | ||
29 | export HOST_AR = "${BUILD_AR}" | ||
30 | |||
31 | mozilla_do_configure() { | ||
32 | ( | ||
33 | set -e | ||
34 | for cg in `find ${S} -name config.guess`; do | ||
35 | install -m 0755 \ | ||
36 | ${STAGING_BINDIR}/../share/gnu-config/config.guess \ | ||
37 | ${STAGING_BINDIR}/../share/gnu-config/config.sub \ | ||
38 | `dirname $cg`/ | ||
39 | done | ||
40 | ) | ||
41 | oe_runmake -f client.mk ${MOZ_OBJDIR}/Makefile \ | ||
42 | ${MOZ_OBJDIR}/config.status | ||
43 | } | ||
44 | |||
45 | mozilla_do_compile() { | ||
46 | oe_runmake -f client.mk build_all | ||
47 | } | ||
48 | |||
49 | mozilla_do_install() { | ||
50 | oe_runmake DESTDIR="${D}" destdir="${D}" install | ||
51 | } | ||
52 | |||
53 | EXPORT_FUNCTIONS do_configure do_compile do_install | ||
diff --git a/meta/classes/multimachine.bbclass b/meta/classes/multimachine.bbclass new file mode 100644 index 0000000000..2248f326cc --- /dev/null +++ b/meta/classes/multimachine.bbclass | |||
@@ -0,0 +1,22 @@ | |||
1 | STAMP = "${TMPDIR}/stamps/${MULTIMACH_ARCH}-${HOST_OS}/${PF}" | ||
2 | WORKDIR = "${TMPDIR}/work/${MULTIMACH_ARCH}-${HOST_OS}/${PF}" | ||
3 | STAGING_KERNEL_DIR = "${STAGING_DIR}/${MULTIMACH_ARCH}-${HOST_OS}/kernel" | ||
4 | |||
5 | # Find any machine specific sub packages and if present, mark the | ||
6 | # whole package as machine specific for multimachine purposes. | ||
7 | python __anonymous () { | ||
8 | packages = bb.data.getVar('PACKAGES', d, 1).split() | ||
9 | macharch = bb.data.getVar('MACHINE_ARCH', d, 1) | ||
10 | multiarch = bb.data.getVar('PACKAGE_ARCH', d, 1) | ||
11 | |||
12 | for pkg in packages: | ||
13 | pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1) | ||
14 | |||
15 | # We could look for != PACKAGE_ARCH here but how to choose | ||
16 | # if multiple differences are present? | ||
17 | # Look through IPKG_ARCHS for the priority order? | ||
18 | if pkgarch and pkgarch == macharch: | ||
19 | multiarch = macharch | ||
20 | |||
21 | bb.data.setVar('MULTIMACH_ARCH', multiarch, d) | ||
22 | } | ||
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass new file mode 100644 index 0000000000..04ff7d92d1 --- /dev/null +++ b/meta/classes/native.bbclass | |||
@@ -0,0 +1,95 @@ | |||
1 | inherit base | ||
2 | |||
3 | # Native packages are built indirectly via dependency, | ||
4 | # no need for them to be a direct target of 'world' | ||
5 | EXCLUDE_FROM_WORLD = "1" | ||
6 | |||
7 | PACKAGES = "" | ||
8 | PACKAGE_ARCH = "${BUILD_ARCH}" | ||
9 | |||
10 | # When this class has packaging enabled, setting | ||
11 | # RPROVIDES becomes unnecessary. | ||
12 | RPROVIDES = "${PN}" | ||
13 | |||
14 | # Need to resolve package RDEPENDS as well as DEPENDS | ||
15 | BUILD_ALL_DEPS = "1" | ||
16 | |||
17 | # Break the circular dependency as a result of DEPENDS | ||
18 | # in package.bbclass | ||
19 | PACKAGE_DEPENDS = "" | ||
20 | |||
21 | TARGET_ARCH = "${BUILD_ARCH}" | ||
22 | TARGET_OS = "${BUILD_OS}" | ||
23 | TARGET_VENDOR = "${BUILD_VENDOR}" | ||
24 | TARGET_PREFIX = "${BUILD_PREFIX}" | ||
25 | TARGET_CC_ARCH = "${BUILD_CC_ARCH}" | ||
26 | |||
27 | HOST_ARCH = "${BUILD_ARCH}" | ||
28 | HOST_OS = "${BUILD_OS}" | ||
29 | HOST_VENDOR = "${BUILD_VENDOR}" | ||
30 | HOST_PREFIX = "${BUILD_PREFIX}" | ||
31 | HOST_CC_ARCH = "${BUILD_CC_ARCH}" | ||
32 | |||
33 | CPPFLAGS = "${BUILD_CPPFLAGS}" | ||
34 | CFLAGS = "${BUILD_CFLAGS}" | ||
35 | CXXFLAGS = "${BUILD_CFLAGS}" | ||
36 | LDFLAGS = "${BUILD_LDFLAGS}" | ||
37 | LDFLAGS_build-darwin = "-L${STAGING_DIR}/${BUILD_SYS}/lib " | ||
38 | |||
39 | |||
40 | # set the compiler as well. It could have been set to something else | ||
41 | export CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_ARCH}" | ||
42 | export CXX = "${CCACHE}${HOST_PREFIX}g++ ${HOST_CC_ARCH}" | ||
43 | export F77 = "${CCACHE}${HOST_PREFIX}g77 ${HOST_CC_ARCH}" | ||
44 | export CPP = "${HOST_PREFIX}gcc -E" | ||
45 | export LD = "${HOST_PREFIX}ld" | ||
46 | export CCLD = "${CC}" | ||
47 | export AR = "${HOST_PREFIX}ar" | ||
48 | export AS = "${HOST_PREFIX}as" | ||
49 | export RANLIB = "${HOST_PREFIX}ranlib" | ||
50 | export STRIP = "${HOST_PREFIX}strip" | ||
51 | |||
52 | |||
53 | # Path prefixes | ||
54 | base_prefix = "${exec_prefix}" | ||
55 | prefix = "${STAGING_DIR}" | ||
56 | exec_prefix = "${STAGING_DIR}/${BUILD_ARCH}-${BUILD_OS}" | ||
57 | |||
58 | # Base paths | ||
59 | base_bindir = "${base_prefix}/bin" | ||
60 | base_sbindir = "${base_prefix}/bin" | ||
61 | base_libdir = "${base_prefix}/lib" | ||
62 | |||
63 | # Architecture independent paths | ||
64 | sysconfdir = "${prefix}/etc" | ||
65 | sharedstatedir = "${prefix}/com" | ||
66 | localstatedir = "${prefix}/var" | ||
67 | infodir = "${datadir}/info" | ||
68 | mandir = "${datadir}/man" | ||
69 | docdir = "${datadir}/doc" | ||
70 | servicedir = "${prefix}/srv" | ||
71 | |||
72 | # Architecture dependent paths | ||
73 | bindir = "${exec_prefix}/bin" | ||
74 | sbindir = "${exec_prefix}/bin" | ||
75 | libexecdir = "${exec_prefix}/libexec" | ||
76 | libdir = "${exec_prefix}/lib" | ||
77 | includedir = "${exec_prefix}/include" | ||
78 | oldincludedir = "${exec_prefix}/include" | ||
79 | |||
80 | # Datadir is made arch dependent here, primarily | ||
81 | # for autoconf macros, and other things that | ||
82 | # may be manipulated to handle crosscompilation | ||
83 | # issues. | ||
84 | datadir = "${exec_prefix}/share" | ||
85 | |||
86 | do_stage () { | ||
87 | if [ "${INHIBIT_NATIVE_STAGE_INSTALL}" != "1" ] | ||
88 | then | ||
89 | oe_runmake install | ||
90 | fi | ||
91 | } | ||
92 | |||
93 | do_install () { | ||
94 | true | ||
95 | } | ||
diff --git a/meta/classes/nslu2-jffs2-image.bbclass b/meta/classes/nslu2-jffs2-image.bbclass new file mode 100644 index 0000000000..56ad0f0659 --- /dev/null +++ b/meta/classes/nslu2-jffs2-image.bbclass | |||
@@ -0,0 +1,18 @@ | |||
1 | NSLU2_SLUGIMAGE_ARGS ?= "" | ||
2 | |||
3 | nslu2_pack_image () { | ||
4 | install -d ${DEPLOY_DIR_IMAGE}/slug | ||
5 | install -m 0644 ${STAGING_LIBDIR}/nslu2-binaries/RedBoot \ | ||
6 | ${STAGING_LIBDIR}/nslu2-binaries/Trailer \ | ||
7 | ${STAGING_LIBDIR}/nslu2-binaries/SysConf \ | ||
8 | ${DEPLOY_DIR_IMAGE}/slug/ | ||
9 | install -m 0644 ${DEPLOY_DIR_IMAGE}/zImage-${IMAGE_BASENAME} ${DEPLOY_DIR_IMAGE}/slug/vmlinuz | ||
10 | install -m 0644 ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.jffs2 ${DEPLOY_DIR_IMAGE}/slug/flashdisk.jffs2 | ||
11 | cd ${DEPLOY_DIR_IMAGE}/slug | ||
12 | slugimage -p -b RedBoot -s SysConf -r Ramdisk:1,Flashdisk:flashdisk.jffs2 -t Trailer \ | ||
13 | -o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.flashdisk.img ${NSLU2_SLUGIMAGE_ARGS} | ||
14 | rm -rf ${DEPLOY_DIR_IMAGE}/slug | ||
15 | } | ||
16 | |||
17 | EXTRA_IMAGEDEPENDS += 'slugimage-native nslu2-linksys-firmware' | ||
18 | IMAGE_POSTPROCESS_COMMAND += "nslu2_pack_image; " | ||
diff --git a/meta/classes/nslu2-mirrors.bbclass b/meta/classes/nslu2-mirrors.bbclass new file mode 100644 index 0000000000..1181edc716 --- /dev/null +++ b/meta/classes/nslu2-mirrors.bbclass | |||
@@ -0,0 +1,4 @@ | |||
1 | MIRRORS_append () { | ||
2 | ftp://.*/.*/ http://sources.nslu2-linux.org/sources/ | ||
3 | http://.*/.*/ http://sources.nslu2-linux.org/sources/ | ||
4 | } | ||
diff --git a/meta/classes/nslu2-ramdisk-image.bbclass b/meta/classes/nslu2-ramdisk-image.bbclass new file mode 100644 index 0000000000..0b545854fd --- /dev/null +++ b/meta/classes/nslu2-ramdisk-image.bbclass | |||
@@ -0,0 +1,18 @@ | |||
1 | NSLU2_SLUGIMAGE_ARGS ?= "" | ||
2 | |||
3 | nslu2_pack_image () { | ||
4 | install -d ${DEPLOY_DIR_IMAGE}/slug | ||
5 | install -m 0644 ${STAGING_LIBDIR}/nslu2-binaries/RedBoot \ | ||
6 | ${STAGING_LIBDIR}/nslu2-binaries/Trailer \ | ||
7 | ${STAGING_LIBDIR}/nslu2-binaries/SysConf \ | ||
8 | ${DEPLOY_DIR_IMAGE}/slug/ | ||
9 | install -m 0644 ${DEPLOY_DIR_IMAGE}/zImage-${IMAGE_BASENAME} ${DEPLOY_DIR_IMAGE}/slug/vmlinuz | ||
10 | install -m 0644 ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ext2.gz ${DEPLOY_DIR_IMAGE}/slug/ramdisk.ext2.gz | ||
11 | cd ${DEPLOY_DIR_IMAGE}/slug | ||
12 | slugimage -p -b RedBoot -s SysConf -r Ramdisk:ramdisk.ext2.gz -t Trailer \ | ||
13 | -o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.ramdisk.img ${NSLU2_SLUGIMAGE_ARGS} | ||
14 | rm -rf ${DEPLOY_DIR_IMAGE}/slug | ||
15 | } | ||
16 | |||
17 | EXTRA_IMAGEDEPENDS += 'slugimage-native nslu2-linksys-firmware' | ||
18 | IMAGE_POSTPROCESS_COMMAND += "nslu2_pack_image; " | ||
diff --git a/meta/classes/nylon-mirrors.bbclass b/meta/classes/nylon-mirrors.bbclass new file mode 100644 index 0000000000..2986bd8f80 --- /dev/null +++ b/meta/classes/nylon-mirrors.bbclass | |||
@@ -0,0 +1,6 @@ | |||
1 | MIRRORS_append () { | ||
2 | ftp://.*/.*/ http://meshcube.org/nylon/stable/sources/ | ||
3 | http://.*/.*/ http://meshcube.org/nylon/stable/sources/ | ||
4 | ftp://.*/.*/ http://meshcube.org/nylon/unstable/sources/ | ||
5 | http://.*/.*/ http://meshcube.org/nylon/unstable/sources/ | ||
6 | } \ No newline at end of file | ||
diff --git a/meta/classes/oebuildstamp.bbclass b/meta/classes/oebuildstamp.bbclass new file mode 100644 index 0000000000..1de1b95c2e --- /dev/null +++ b/meta/classes/oebuildstamp.bbclass | |||
@@ -0,0 +1,16 @@ | |||
1 | # | ||
2 | # Because base.oeclasses set the variable | ||
3 | # | ||
4 | # do_fetch[nostamp] = "1" | ||
5 | # do_build[nostamp] = "1" | ||
6 | # | ||
7 | # for every build we're doing oemake calls all of the phases to check if | ||
8 | # something new is to download. This class unset's this nostamp flag. This | ||
9 | # makes a package "finished", once it's completely build. | ||
10 | # | ||
11 | # This means that the subsequent builds are faster, but when you change the | ||
12 | # behaviour of the package, e.g. by adding INHERIT="package_ipk", you won't | ||
13 | # get the ipk file except you delete the build stamp manually or all of them | ||
14 | # with oebuild clean <oe-file>. | ||
15 | |||
16 | do_build[nostamp] = "" | ||
diff --git a/meta/classes/oelint.bbclass b/meta/classes/oelint.bbclass new file mode 100644 index 0000000000..baa1c630b4 --- /dev/null +++ b/meta/classes/oelint.bbclass | |||
@@ -0,0 +1,174 @@ | |||
1 | addtask lint before do_fetch | ||
2 | do_lint[nostamp] = 1 | ||
3 | python do_lint() { | ||
4 | def testVar(var, explain=None): | ||
5 | try: | ||
6 | s = d[var] | ||
7 | return s["content"] | ||
8 | except KeyError: | ||
9 | bb.error("%s is not set" % var) | ||
10 | if explain: bb.note(explain) | ||
11 | return None | ||
12 | |||
13 | |||
14 | ############################## | ||
15 | # Test that DESCRIPTION exists | ||
16 | # | ||
17 | testVar("DESCRIPTION") | ||
18 | |||
19 | |||
20 | ############################## | ||
21 | # Test that HOMEPAGE exists | ||
22 | # | ||
23 | s = testVar("HOMEPAGE") | ||
24 | if s=="unknown": | ||
25 | bb.error("HOMEPAGE is not set") | ||
26 | elif not s.startswith("http://"): | ||
27 | bb.error("HOMEPAGE doesn't start with http://") | ||
28 | |||
29 | |||
30 | |||
31 | ############################## | ||
32 | # Test for valid LICENSE | ||
33 | # | ||
34 | valid_licenses = { | ||
35 | "GPL-2" : "GPLv2", | ||
36 | "GPL LGPL FDL" : True, | ||
37 | "GPL PSF" : True, | ||
38 | "GPL/QPL" : True, | ||
39 | "GPL" : True, | ||
40 | "GPLv2" : True, | ||
41 | "IBM" : True, | ||
42 | "LGPL GPL" : True, | ||
43 | "LGPL" : True, | ||
44 | "MIT" : True, | ||
45 | "OSL" : True, | ||
46 | "Perl" : True, | ||
47 | "Public Domain" : True, | ||
48 | "QPL" : "GPL/QPL", | ||
49 | } | ||
50 | s = testVar("LICENSE") | ||
51 | if s=="unknown": | ||
52 | bb.error("LICENSE is not set") | ||
53 | elif s.startswith("Vendor"): | ||
54 | pass | ||
55 | else: | ||
56 | try: | ||
57 | newlic = valid_licenses[s] | ||
58 | if newlic == False: | ||
59 | bb.note("LICENSE '%s' is not recommended" % s) | ||
60 | elif newlic != True: | ||
61 | bb.note("LICENSE '%s' is not recommended, better use '%s'" % (s, newsect)) | ||
62 | except: | ||
63 | bb.note("LICENSE '%s' is not recommended" % s) | ||
64 | |||
65 | |||
66 | ############################## | ||
67 | # Test for valid MAINTAINER | ||
68 | # | ||
69 | s = testVar("MAINTAINER") | ||
70 | if s=="OpenEmbedded Team <oe@handhelds.org>": | ||
71 | bb.error("explicit MAINTAINER is missing, using default") | ||
72 | elif s and s.find("@") == -1: | ||
73 | bb.error("You forgot to put an e-mail address into MAINTAINER") | ||
74 | |||
75 | |||
76 | ############################## | ||
77 | # Test for valid SECTION | ||
78 | # | ||
79 | # if Correct section: True section name is valid | ||
80 | # False section name is invalid, no suggestion | ||
81 | # string section name is invalid, better name suggested | ||
82 | # | ||
83 | valid_sections = { | ||
84 | # Current Section Correct section | ||
85 | "apps" : True, | ||
86 | "audio" : True, | ||
87 | "base" : True, | ||
88 | "console/games" : True, | ||
89 | "console/net" : "console/network", | ||
90 | "console/network" : True, | ||
91 | "console/utils" : True, | ||
92 | "devel" : True, | ||
93 | "developing" : "devel", | ||
94 | "devel/python" : True, | ||
95 | "fonts" : True, | ||
96 | "games" : True, | ||
97 | "games/libs" : True, | ||
98 | "gnome/base" : True, | ||
99 | "gnome/libs" : True, | ||
100 | "gpe" : True, | ||
101 | "gpe/libs" : True, | ||
102 | "gui" : False, | ||
103 | "libc" : "libs", | ||
104 | "libs" : True, | ||
105 | "libs/net" : True, | ||
106 | "multimedia" : True, | ||
107 | "net" : "network", | ||
108 | "NET" : "network", | ||
109 | "network" : True, | ||
110 | "opie/applets" : True, | ||
111 | "opie/applications" : True, | ||
112 | "opie/base" : True, | ||
113 | "opie/codecs" : True, | ||
114 | "opie/decorations" : True, | ||
115 | "opie/fontfactories" : True, | ||
116 | "opie/fonts" : True, | ||
117 | "opie/games" : True, | ||
118 | "opie/help" : True, | ||
119 | "opie/inputmethods" : True, | ||
120 | "opie/libs" : True, | ||
121 | "opie/multimedia" : True, | ||
122 | "opie/pim" : True, | ||
123 | "opie/setting" : "opie/settings", | ||
124 | "opie/settings" : True, | ||
125 | "opie/Shell" : False, | ||
126 | "opie/styles" : True, | ||
127 | "opie/today" : True, | ||
128 | "scientific" : True, | ||
129 | "utils" : True, | ||
130 | "x11" : True, | ||
131 | "x11/libs" : True, | ||
132 | "x11/wm" : True, | ||
133 | } | ||
134 | s = testVar("SECTION") | ||
135 | if s: | ||
136 | try: | ||
137 | newsect = valid_sections[s] | ||
138 | if newsect == False: | ||
139 | bb.note("SECTION '%s' is not recommended" % s) | ||
140 | elif newsect != True: | ||
141 | bb.note("SECTION '%s' is not recommended, better use '%s'" % (s, newsect)) | ||
142 | except: | ||
143 | bb.note("SECTION '%s' is not recommended" % s) | ||
144 | |||
145 | if not s.islower(): | ||
146 | bb.error("SECTION should only use lower case") | ||
147 | |||
148 | |||
149 | |||
150 | |||
151 | ############################## | ||
152 | # Test for valid PRIORITY | ||
153 | # | ||
154 | valid_priorities = { | ||
155 | "standard" : True, | ||
156 | "required" : True, | ||
157 | "optional" : True, | ||
158 | "extra" : True, | ||
159 | } | ||
160 | s = testVar("PRIORITY") | ||
161 | if s: | ||
162 | try: | ||
163 | newprio = valid_priorities[s] | ||
164 | if newprio == False: | ||
165 | bb.note("PRIORITY '%s' is not recommended" % s) | ||
166 | elif newprio != True: | ||
167 | bb.note("PRIORITY '%s' is not recommended, better use '%s'" % (s, newprio)) | ||
168 | except: | ||
169 | bb.note("PRIORITY '%s' is not recommended" % s) | ||
170 | |||
171 | if not s.islower(): | ||
172 | bb.error("PRIORITY should only use lower case") | ||
173 | |||
174 | } | ||
diff --git a/meta/classes/opie.bbclass b/meta/classes/opie.bbclass new file mode 100644 index 0000000000..47f364a644 --- /dev/null +++ b/meta/classes/opie.bbclass | |||
@@ -0,0 +1,105 @@ | |||
1 | # | ||
2 | # This oeclass takes care about some of the itchy details of installing parts | ||
3 | # of Opie applications. Depending on quicklaunch or not, plugin or not, the | ||
4 | # TARGET is either a shared object, a shared object with a link to quicklauncher, | ||
5 | # or a usual binary. | ||
6 | # | ||
7 | # You have to provide two things: 1.) A proper SECTION field, and 2.) a proper APPNAME | ||
8 | # Then opie.oeclass will: | ||
9 | # * create the directory for the binary and install the binary file(s) | ||
10 | # * for applications: create the directory for the .desktop and install the .desktop file | ||
11 | # * for quicklauncher applications: create the startup symlink to the quicklauncher | ||
12 | # You can override the automatic detection of APPTYPE, valid values are 'quicklaunch', 'binary', 'plugin' | ||
13 | # You can override the default location of APPDESKTOP (<workdir>/apps/<section>/) | ||
14 | # | ||
15 | |||
16 | inherit palmtop | ||
17 | |||
18 | # Note that when CVS changes to 1.2.2, the dash | ||
19 | # should be removed from OPIE_CVS_PV to convert | ||
20 | # to the standardised version format | ||
21 | OPIE_CVS_PV = "1.2.1+cvs-${SRCDATE}" | ||
22 | |||
23 | DEPENDS_prepend = "${@["libopie2 ", ""][(bb.data.getVar('PN', d, 1) == 'libopie2')]}" | ||
24 | |||
25 | # to be consistent, put all targets into workdir | ||
26 | # NOTE: leave one space at the end, other files are expecting that | ||
27 | EXTRA_QMAKEVARS_POST += "DESTDIR=${S} " | ||
28 | |||
29 | # Opie standard TAG value | ||
30 | TAG = "${@'v' + bb.data.getVar('PV',d,1).replace('.', '_')}" | ||
31 | |||
32 | # plan for later: | ||
33 | # add common scopes for opie applications, see qmake-native/common.pro | ||
34 | # qmake should care about all the details then. qmake can do that, i know it :) | ||
35 | # | ||
36 | |||
37 | python opie_do_opie_install() { | ||
38 | import os, shutil | ||
39 | section = bb.data.getVar( "SECTION", d ).split( '/' )[1] or "Applications" | ||
40 | section = section.title() | ||
41 | if section in ( "Base", "Libs" ): | ||
42 | bb.note( "Section = Base or Libs. Target won't be installed automatically." ) | ||
43 | return | ||
44 | |||
45 | # SECTION : BINDIR DESKTOPDIR | ||
46 | dirmap = { "Applets" : ( "/plugins/applets", None ), | ||
47 | "Applications" : ( "<BINDIR>", "/apps/Applications" ), | ||
48 | "Multimedia" : ( "<BINDIR>", "/apps/Applications" ), | ||
49 | "Games" : ( "<BINDIR>", "/apps/Games" ), | ||
50 | "Settings" : ( "<BINDIR>", "/apps/Settings" ), | ||
51 | "Pim" : ( "<BINDIR>", "/apps/1Pim" ), | ||
52 | "Examples" : ( "<BINDIR>", "/apps/Examples" ), | ||
53 | "Shell" : ( "/bin", "/apps/Opie-SH" ), | ||
54 | "Codecs" : ( "/plugins/codecs", None ), | ||
55 | "Decorations" : ( "/plugins/decorations", None ), | ||
56 | "Inputmethods" : ( "/plugins/inputmethods", None ), | ||
57 | "Fontfactories" : ( "/plugins/fontfactories", None ), | ||
58 | "Security" : ( "/plugins/security", None ), | ||
59 | "Styles" : ( "/plugins/styles", None ), | ||
60 | "Today" : ( "/plugins/today", None ), | ||
61 | "Datebook" : ( "/plugins/holidays", None ), | ||
62 | "Networksettings" : ( "/plugins/networksettings", None ) } | ||
63 | |||
64 | if section not in dirmap: | ||
65 | raise ValueError, "Unknown section '%s'. Valid sections are: %s" % ( section, dirmap.keys() ) | ||
66 | |||
67 | bindir, desktopdir = dirmap[section] | ||
68 | APPNAME = bb.data.getVar( "APPNAME", d, True ) or bb.data.getVar( "PN", d, True ) | ||
69 | APPTYPE = bb.data.getVar( "APPTYPE", d, True ) | ||
70 | if not APPTYPE: | ||
71 | if bindir == "<BINDIR>": | ||
72 | APPTYPE = "quicklaunch" | ||
73 | else: | ||
74 | APPTYPE = "plugin" | ||
75 | |||
76 | appmap = { "binary":"/bin", "quicklaunch":"/plugins/application" } | ||
77 | if bindir == "<BINDIR>": bindir = appmap[APPTYPE] | ||
78 | |||
79 | bb.note( "Section='%s', bindir='%s', desktopdir='%s', name='%s', type='%s'" % | ||
80 | ( section, bindir, desktopdir, APPNAME, APPTYPE ) ) | ||
81 | |||
82 | S = bb.data.getVar( "S", d, 1 ) | ||
83 | D = "%s/image" % bb.data.getVar( "WORKDIR", d, True ) | ||
84 | WORKDIR = bb.data.getVar( "WORKDIR", d, True ) | ||
85 | palmtopdir = bb.data.getVar( "palmtopdir", d ) | ||
86 | APPDESKTOP = bb.data.getVar( "APPDESKTOP", d, True ) or "%s/%s" % ( WORKDIR, desktopdir ) | ||
87 | |||
88 | if desktopdir is not None: | ||
89 | os.system( "install -d %s%s%s/" % ( D, palmtopdir, desktopdir ) ) | ||
90 | os.system( "install -m 0644 %s/%s.desktop %s%s%s/" % ( APPDESKTOP, APPNAME, D, palmtopdir, desktopdir ) ) | ||
91 | |||
92 | os.system( "install -d %s%s%s/" % ( D, palmtopdir, bindir ) ) | ||
93 | |||
94 | if APPTYPE == "binary": | ||
95 | os.system( "install -m 0755 %s/%s %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) ) | ||
96 | elif APPTYPE == "quicklaunch": | ||
97 | os.system( "install -m 0755 %s/lib%s.so %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) ) | ||
98 | os.system( "install -d %s%s/bin/" % ( D, palmtopdir ) ) | ||
99 | os.system( "ln -sf %s/bin/quicklauncher %s%s/bin/%s" % ( palmtopdir, D, palmtopdir, APPNAME ) ) | ||
100 | elif APPTYPE == "plugin": | ||
101 | os.system( "install -m 0755 %s/lib%s.so %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) ) | ||
102 | } | ||
103 | |||
104 | EXPORT_FUNCTIONS do_opie_install | ||
105 | addtask opie_install after do_compile before do_populate_staging | ||
diff --git a/meta/classes/opie_i18n.bbclass b/meta/classes/opie_i18n.bbclass new file mode 100644 index 0000000000..cb3d07de75 --- /dev/null +++ b/meta/classes/opie_i18n.bbclass | |||
@@ -0,0 +1,163 @@ | |||
1 | # classes/opie_i18n.oeclass Matthias 'CoreDump' Hentges 16-10-2004 | ||
2 | # | ||
3 | # Automatically builds i18n ipks for opie packages. It downloads opie-i18n from opie CVS | ||
4 | # and tries to guess the name of the .ts file based on the package name: | ||
5 | # ${PN}.ts, lib${PN}.ts and opie-${PN}.ts are all valid. The .ts "guessing" can be | ||
6 | # disabled by setting I18N_FILES in the .oe file. | ||
7 | # | ||
8 | # Todo: | ||
9 | # | ||
10 | |||
11 | I18N_STATS = "1" | ||
12 | SRC_URI += "${HANDHELDS_CVS};module=opie/i18n" | ||
13 | DEPENDS += "opie-i18n" | ||
14 | |||
15 | die () { | ||
16 | echo -e "opie_18n: ERROR: $1" | ||
17 | exit 1 | ||
18 | } | ||
19 | |||
20 | python do_build_opie_i18n_data() { | ||
21 | |||
22 | import os, bb, re | ||
23 | workdir = bb.data.getVar("WORKDIR", d, 1) | ||
24 | packages = bb.data.getVar("PACKAGES", d, 1) | ||
25 | files = bb.data.getVar("FILES", d, 1) | ||
26 | section = bb.data.getVar("SECTION", d, 1) | ||
27 | pn = bb.data.getVar("PN", d, 1) | ||
28 | rdepends = bb.data.getVar("RDEPENDS", d, 1) | ||
29 | |||
30 | if os.path.exists(workdir + "/PACKAGES.tmp"): | ||
31 | fd = open(workdir + "/PACKAGES.tmp", 'r') | ||
32 | lines = fd.readlines() | ||
33 | fd.close() | ||
34 | |||
35 | bb.data.setVar('PACKAGES', " ".join(lines).lower() + " " + packages, d) | ||
36 | |||
37 | fd = open(workdir + "/FILES.tmp", 'r') | ||
38 | lines = fd.readlines() | ||
39 | fd.close() | ||
40 | |||
41 | for l in lines: | ||
42 | x = re.split("\#", l) | ||
43 | bb.data.setVar('FILES_%s' % x[0].lower(), " " + x[1].strip('\n'), d) | ||
44 | bb.data.setVar('SECTION_%s' % x[0].lower(), "opie/translations", d) | ||
45 | bb.data.setVar('RDEPENDS_%s' % x[0].lower(), pn, d) | ||
46 | |||
47 | bb.data.setVar('SECTION_%s' % pn, section, d) | ||
48 | bb.data.setVar('RDEPENDS', rdepends, d) | ||
49 | else: | ||
50 | bb.note("No translations found for package " + pn) | ||
51 | } | ||
52 | |||
53 | do_build_opie_i18n () { | ||
54 | |||
55 | cd "${WORKDIR}/i18n" || die "ERROR:\nCouldn't find Opies i18n sources in ${PN}/i18n\nMake sure that <inherit opie_i18n> or <inherit opie> is *below* <SRC_URIS =>!" | ||
56 | |||
57 | if test -z "${I18N_FILES}" | ||
58 | then | ||
59 | package_name="`echo "${PN}"| sed "s/^opie\-//"`" | ||
60 | package_name2="`echo "${PN}"| sed "s/^opie\-//;s/\-//"`" | ||
61 | test "$package_name" != "$package_name2" && I18N_FILES="${package_name}.ts lib${package_name}.ts opie-${package_name}.ts ${package_name2}.ts lib${package_name2}.ts opie-${package_name2}.ts" | ||
62 | test "$package_name" = "$package_name2" && I18N_FILES="${package_name}.ts lib${package_name}.ts opie-${package_name}.ts" | ||
63 | echo -e "I18N Datafiles: ${I18N_FILES} (auto-detected)\nYou can overide the auto-detection by setting I18N_FILES in your .oe file" | ||
64 | else | ||
65 | echo "I18N Datafiles: ${I18N_FILES} (provided by .bb)" | ||
66 | fi | ||
67 | |||
68 | rm -f "${WORKDIR}/FILES.tmp" "${WORKDIR}/PACKAGES.tmp" | ||
69 | |||
70 | echo -e "\nFILES is set to [${FILES}]\n" | ||
71 | |||
72 | for file in ${I18N_FILES} | ||
73 | do | ||
74 | echo "Working on [$file]" | ||
75 | for ts_file in `ls -1 */*.ts | egrep "/$file"` | ||
76 | do | ||
77 | echo -e "\tCompiling [$ts_file]" | ||
78 | cd "${WORKDIR}/i18n/`dirname $ts_file`" || die "[${WORKDIR}/i18n/`dirname $ts_file`] not found" | ||
79 | opie-lrelease "`basename $ts_file`" || die "lrelease failed! Make sure that <inherit opie_i18n> or <inherit opie> is *below* <DEPENDS =>!" | ||
80 | |||
81 | # $lang is the language as in de_DE, $lang_sane replaces "_" with "-" | ||
82 | # to allow packaging as "_" is not allowed in a package name | ||
83 | lang="`echo "$ts_file" | sed -n "s#\(.*\)/\(.*\)#\1#p"`" | ||
84 | lang_sane="`echo "$ts_file" | sed -n "s#\(.*\)/\(.*\)#\1#p"|sed s/\_/\-/`" | ||
85 | echo -e "\tPackaging [`basename $ts_file`] for language [$lang]" | ||
86 | |||
87 | install -d ${D}${palmtopdir}/i18n/$lang | ||
88 | install -m 0644 ${WORKDIR}/i18n/$lang/.directory ${D}${palmtopdir}/i18n/$lang/ | ||
89 | install -m 0644 ${WORKDIR}/i18n/$lang/*.qm "${D}${palmtopdir}/i18n/$lang/" | ||
90 | |||
91 | # As it is not possible to modify OE vars from within a _shell_ function, | ||
92 | # some major hacking was needed. These two files will be read by the python | ||
93 | # function do_build_opie_i18n_data() which sets the variables FILES_* and | ||
94 | # PACKAGES as needed. | ||
95 | echo -n "${PN}-${lang_sane} " >> "${WORKDIR}/PACKAGES.tmp" | ||
96 | echo -e "${PN}-${lang_sane}#${palmtopdir}/i18n/$lang" >> "${WORKDIR}/FILES.tmp" | ||
97 | |||
98 | ts_found_something=1 | ||
99 | done | ||
100 | |||
101 | if test "$ts_found_something" != 1 | ||
102 | then | ||
103 | echo -e "\tNo translations found" | ||
104 | else | ||
105 | ts_found_something="" | ||
106 | ts_found="$ts_found $file" | ||
107 | fi | ||
108 | |||
109 | # Only used for debugging purposes | ||
110 | test "${I18N_STATS}" = 1 && cd "${WORKDIR}/i18n" | ||
111 | |||
112 | echo -e "Completed [$file]\n\n" | ||
113 | done | ||
114 | |||
115 | qt_dirs="apps bin etc lib pics plugins share sounds" | ||
116 | |||
117 | for dir in $qt_dirs | ||
118 | do | ||
119 | dir_="$dir_ ${palmtopdir}/$dir " | ||
120 | done | ||
121 | |||
122 | |||
123 | # If we don't adjust FILES to exclude the i18n directory, we will end up with | ||
124 | # _lots_ of empty i18n/$lang directories in the original .ipk. | ||
125 | if (echo "${FILES}" | egrep "${palmtopdir}/? |${palmtopdir}/?$") &>/dev/null | ||
126 | then | ||
127 | echo "NOTE: FILES was set to ${palmtopdir} which would include the i18n directory" | ||
128 | echo -e "\n\nI'll remove ${palmtopdir} from FILES and replace it with all directories" | ||
129 | echo "below QtPalmtop, except i18n ($qt_dirs). See classes/opie_i18n.oeclass for details" | ||
130 | |||
131 | # Removes /opt/QtPalmtop from FILES but keeps /opt/QtPalmtop/$some_dir | ||
132 | FILES="`echo "$FILES"| sed "s#${palmtopdir}[/]\?\$\|${palmtopdir}[/]\? ##"`" | ||
133 | |||
134 | echo "${PN}#$FILES $dir_" >> "${WORKDIR}/FILES.tmp" | ||
135 | fi | ||
136 | |||
137 | # This is the common case for OPIE apps which are installed by opie.oeclass magic | ||
138 | if test -z "${FILES}" | ||
139 | then | ||
140 | echo "NOTE:" | ||
141 | echo -e "Since FILES is empty, i'll add all directories below ${palmtopdir} to it,\nexcluding i18n: ( $qt_dirs )" | ||
142 | echo "${PN}#$FILES $dir_" >> "${WORKDIR}/FILES.tmp" | ||
143 | fi | ||
144 | |||
145 | if ! test -e "${WORKDIR}/PACKAGES.tmp" -a "${I18N_STATS}" = 1 | ||
146 | then | ||
147 | echo "No translations for package [${PN}]" >> /tmp/oe-i18n-missing.log | ||
148 | else | ||
149 | echo "Using [$ts_found ] for package [${PN}]" >> /tmp/oe-i18n.log | ||
150 | fi | ||
151 | |||
152 | # While this might not be very elegant, it safes a _ton_ of space (~30Mb) for | ||
153 | # each opie package. | ||
154 | for file in $(ls */*.ts | egrep -v "`echo "$ts_found"| sed "s/^\ //;s/\ /\|/"`") | ||
155 | do | ||
156 | rm "$file" | ||
157 | done | ||
158 | |||
159 | return 0 | ||
160 | } | ||
161 | |||
162 | addtask build_opie_i18n before do_compile | ||
163 | addtask build_opie_i18n_data after do_build_opie_i18n before do_compile | ||
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass new file mode 100644 index 0000000000..9edcc1e5ed --- /dev/null +++ b/meta/classes/package.bbclass | |||
@@ -0,0 +1,729 @@ | |||
1 | def legitimize_package_name(s): | ||
2 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') | ||
3 | |||
4 | STAGING_PKGMAPS_DIR ?= "${STAGING_DIR}/pkgmaps" | ||
5 | |||
6 | def add_package_mapping (pkg, new_name, d): | ||
7 | import bb, os | ||
8 | |||
9 | def encode(str): | ||
10 | import codecs | ||
11 | c = codecs.getencoder("string_escape") | ||
12 | return c(str)[0] | ||
13 | |||
14 | pmap_dir = bb.data.getVar('STAGING_PKGMAPS_DIR', d, 1) | ||
15 | |||
16 | bb.mkdirhier(pmap_dir) | ||
17 | |||
18 | data_file = os.path.join(pmap_dir, pkg) | ||
19 | |||
20 | f = open(data_file, 'w') | ||
21 | f.write("%s\n" % encode(new_name)) | ||
22 | f.close() | ||
23 | |||
24 | def get_package_mapping (pkg, d): | ||
25 | import bb, os | ||
26 | |||
27 | def decode(str): | ||
28 | import codecs | ||
29 | c = codecs.getdecoder("string_escape") | ||
30 | return c(str)[0] | ||
31 | |||
32 | data_file = bb.data.expand("${STAGING_PKGMAPS_DIR}/%s" % pkg, d) | ||
33 | |||
34 | if os.access(data_file, os.R_OK): | ||
35 | f = file(data_file, 'r') | ||
36 | lines = f.readlines() | ||
37 | f.close() | ||
38 | for l in lines: | ||
39 | return decode(l).strip() | ||
40 | return pkg | ||
41 | |||
42 | def runtime_mapping_rename (varname, d): | ||
43 | import bb, os | ||
44 | |||
45 | #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, 1))) | ||
46 | |||
47 | new_depends = [] | ||
48 | for depend in explode_deps(bb.data.getVar(varname, d, 1) or ""): | ||
49 | # Have to be careful with any version component of the depend | ||
50 | split_depend = depend.split(' (') | ||
51 | new_depend = get_package_mapping(split_depend[0].strip(), d) | ||
52 | if len(split_depend) > 1: | ||
53 | new_depends.append("%s (%s" % (new_depend, split_depend[1])) | ||
54 | else: | ||
55 | new_depends.append(new_depend) | ||
56 | |||
57 | bb.data.setVar(varname, " ".join(new_depends) or None, d) | ||
58 | |||
59 | #bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, 1))) | ||
60 | |||
61 | python package_mapping_rename_hook () { | ||
62 | runtime_mapping_rename("RDEPENDS", d) | ||
63 | runtime_mapping_rename("RRECOMMENDS", d) | ||
64 | runtime_mapping_rename("RSUGGESTS", d) | ||
65 | runtime_mapping_rename("RPROVIDES", d) | ||
66 | runtime_mapping_rename("RREPLACES", d) | ||
67 | runtime_mapping_rename("RCONFLICTS", d) | ||
68 | } | ||
69 | |||
70 | |||
71 | def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None): | ||
72 | import os, os.path, bb | ||
73 | |||
74 | dvar = bb.data.getVar('D', d, 1) | ||
75 | if not dvar: | ||
76 | bb.error("D not defined") | ||
77 | return | ||
78 | |||
79 | packages = bb.data.getVar('PACKAGES', d, 1).split() | ||
80 | if not packages: | ||
81 | # nothing to do | ||
82 | return | ||
83 | |||
84 | if postinst: | ||
85 | postinst = '#!/bin/sh\n' + postinst + '\n' | ||
86 | if postrm: | ||
87 | postrm = '#!/bin/sh\n' + postrm + '\n' | ||
88 | if not recursive: | ||
89 | objs = os.listdir(dvar + root) | ||
90 | else: | ||
91 | objs = [] | ||
92 | for walkroot, dirs, files in os.walk(dvar + root): | ||
93 | for file in files: | ||
94 | relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1) | ||
95 | if relpath: | ||
96 | objs.append(relpath) | ||
97 | |||
98 | if extra_depends == None: | ||
99 | extra_depends = bb.data.getVar('PKG_' + packages[0], d, 1) or packages[0] | ||
100 | |||
101 | for o in objs: | ||
102 | import re, stat | ||
103 | if match_path: | ||
104 | m = re.match(file_regex, o) | ||
105 | else: | ||
106 | m = re.match(file_regex, os.path.basename(o)) | ||
107 | |||
108 | if not m: | ||
109 | continue | ||
110 | f = os.path.join(dvar + root, o) | ||
111 | mode = os.lstat(f).st_mode | ||
112 | if not (stat.S_ISREG(mode) or (allow_dirs and stat.S_ISDIR(mode))): | ||
113 | continue | ||
114 | on = legitimize_package_name(m.group(1)) | ||
115 | pkg = output_pattern % on | ||
116 | if not pkg in packages: | ||
117 | if prepend: | ||
118 | packages = [pkg] + packages | ||
119 | else: | ||
120 | packages.append(pkg) | ||
121 | the_files = [os.path.join(root, o)] | ||
122 | if aux_files_pattern: | ||
123 | if type(aux_files_pattern) is list: | ||
124 | for fp in aux_files_pattern: | ||
125 | the_files.append(fp % on) | ||
126 | else: | ||
127 | the_files.append(aux_files_pattern % on) | ||
128 | if aux_files_pattern_verbatim: | ||
129 | if type(aux_files_pattern_verbatim) is list: | ||
130 | for fp in aux_files_pattern_verbatim: | ||
131 | the_files.append(fp % m.group(1)) | ||
132 | else: | ||
133 | the_files.append(aux_files_pattern_verbatim % m.group(1)) | ||
134 | bb.data.setVar('FILES_' + pkg, " ".join(the_files), d) | ||
135 | if extra_depends != '': | ||
136 | the_depends = bb.data.getVar('RDEPENDS_' + pkg, d, 1) | ||
137 | if the_depends: | ||
138 | the_depends = '%s %s' % (the_depends, extra_depends) | ||
139 | else: | ||
140 | the_depends = extra_depends | ||
141 | bb.data.setVar('RDEPENDS_' + pkg, the_depends, d) | ||
142 | bb.data.setVar('DESCRIPTION_' + pkg, description % on, d) | ||
143 | if postinst: | ||
144 | bb.data.setVar('pkg_postinst_' + pkg, postinst, d) | ||
145 | if postrm: | ||
146 | bb.data.setVar('pkg_postrm_' + pkg, postrm, d) | ||
147 | else: | ||
148 | oldfiles = bb.data.getVar('FILES_' + pkg, d, 1) | ||
149 | if not oldfiles: | ||
150 | bb.fatal("Package '%s' exists but has no files" % pkg) | ||
151 | bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d) | ||
152 | if callable(hook): | ||
153 | hook(f, pkg, file_regex, output_pattern, m.group(1)) | ||
154 | |||
155 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | ||
156 | |||
157 | # Function to strip a single file, called from RUNSTRIP below | ||
158 | # A working 'file' (one which works on the target architecture) | ||
159 | # is necessary for this stuff to work. | ||
160 | #PACKAGE_DEPENDS ?= "file-native" | ||
161 | #DEPENDS_prepend =+ "${PACKAGE_DEPENDS} " | ||
162 | #FIXME: this should be "" when any errors are gone! | ||
163 | IGNORE_STRIP_ERRORS ?= "1" | ||
164 | |||
165 | runstrip() { | ||
166 | local ro st | ||
167 | st=0 | ||
168 | if { file "$1" || { | ||
169 | oewarn "file $1: failed (forced strip)" >&2 | ||
170 | echo 'not stripped' | ||
171 | } | ||
172 | } | grep -q 'not stripped' | ||
173 | then | ||
174 | oenote "${STRIP} $1" | ||
175 | ro= | ||
176 | test -w "$1" || { | ||
177 | ro=1 | ||
178 | chmod +w "$1" | ||
179 | } | ||
180 | '${STRIP}' "$1" | ||
181 | st=$? | ||
182 | test -n "$ro" && chmod -w "$1" | ||
183 | if test $st -ne 0 | ||
184 | then | ||
185 | oewarn "runstrip: ${STRIP} $1: strip failed" >&2 | ||
186 | if [ x${IGNORE_STRIP_ERRORS} == x1 ] | ||
187 | then | ||
188 | #FIXME: remove this, it's for error detection | ||
189 | if file "$1" 2>/dev/null >&2 | ||
190 | then | ||
191 | (oefatal "${STRIP} $1: command failed" >/dev/tty) | ||
192 | else | ||
193 | (oefatal "file $1: command failed" >/dev/tty) | ||
194 | fi | ||
195 | st=0 | ||
196 | fi | ||
197 | fi | ||
198 | else | ||
199 | oenote "runstrip: skip $1" | ||
200 | fi | ||
201 | return $st | ||
202 | } | ||
203 | |||
204 | python populate_packages () { | ||
205 | import glob, stat, errno, re | ||
206 | |||
207 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
208 | if not workdir: | ||
209 | bb.error("WORKDIR not defined, unable to package") | ||
210 | return | ||
211 | |||
212 | import os # path manipulations | ||
213 | outdir = bb.data.getVar('DEPLOY_DIR', d, 1) | ||
214 | if not outdir: | ||
215 | bb.error("DEPLOY_DIR not defined, unable to package") | ||
216 | return | ||
217 | bb.mkdirhier(outdir) | ||
218 | |||
219 | dvar = bb.data.getVar('D', d, 1) | ||
220 | if not dvar: | ||
221 | bb.error("D not defined, unable to package") | ||
222 | return | ||
223 | bb.mkdirhier(dvar) | ||
224 | |||
225 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
226 | if not packages: | ||
227 | bb.debug(1, "PACKAGES not defined, nothing to package") | ||
228 | return | ||
229 | |||
230 | pn = bb.data.getVar('PN', d, 1) | ||
231 | if not pn: | ||
232 | bb.error("PN not defined") | ||
233 | return | ||
234 | |||
235 | os.chdir(dvar) | ||
236 | |||
237 | def isexec(path): | ||
238 | try: | ||
239 | s = os.stat(path) | ||
240 | except (os.error, AttributeError): | ||
241 | return 0 | ||
242 | return (s[stat.ST_MODE] & stat.S_IEXEC) | ||
243 | |||
244 | # Sanity check PACKAGES for duplicates - should be moved to | ||
245 | # sanity.bbclass once we have he infrastucture | ||
246 | pkgs = [] | ||
247 | for pkg in packages.split(): | ||
248 | if pkg in pkgs: | ||
249 | bb.error("%s is listed in PACKAGES mutliple times. Undefined behaviour will result." % pkg) | ||
250 | pkgs += pkg | ||
251 | |||
252 | for pkg in packages.split(): | ||
253 | localdata = bb.data.createCopy(d) | ||
254 | root = os.path.join(workdir, "install", pkg) | ||
255 | |||
256 | os.system('rm -rf %s' % root) | ||
257 | |||
258 | bb.data.setVar('ROOT', '', localdata) | ||
259 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | ||
260 | pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) | ||
261 | if not pkgname: | ||
262 | pkgname = pkg | ||
263 | bb.data.setVar('PKG', pkgname, localdata) | ||
264 | |||
265 | overrides = bb.data.getVar('OVERRIDES', localdata, 1) | ||
266 | if not overrides: | ||
267 | raise bb.build.FuncFailed('OVERRIDES not defined') | ||
268 | bb.data.setVar('OVERRIDES', overrides+':'+pkg, localdata) | ||
269 | |||
270 | bb.data.update_data(localdata) | ||
271 | |||
272 | root = bb.data.getVar('ROOT', localdata, 1) | ||
273 | bb.mkdirhier(root) | ||
274 | filesvar = bb.data.getVar('FILES', localdata, 1) or "" | ||
275 | files = filesvar.split() | ||
276 | stripfunc = "" | ||
277 | for file in files: | ||
278 | if os.path.isabs(file): | ||
279 | file = '.' + file | ||
280 | if not os.path.islink(file): | ||
281 | if os.path.isdir(file): | ||
282 | newfiles = [ os.path.join(file,x) for x in os.listdir(file) ] | ||
283 | if newfiles: | ||
284 | files += newfiles | ||
285 | continue | ||
286 | globbed = glob.glob(file) | ||
287 | if globbed: | ||
288 | if [ file ] != globbed: | ||
289 | files += globbed | ||
290 | continue | ||
291 | if (not os.path.islink(file)) and (not os.path.exists(file)): | ||
292 | continue | ||
293 | fpath = os.path.join(root,file) | ||
294 | dpath = os.path.dirname(fpath) | ||
295 | bb.mkdirhier(dpath) | ||
296 | if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, 1) != '1') and not os.path.islink(file) and isexec(file): | ||
297 | stripfunc += "\trunstrip %s || st=1\n" % fpath | ||
298 | ret = bb.movefile(file,fpath) | ||
299 | if ret is None or ret == 0: | ||
300 | raise bb.build.FuncFailed("File population failed") | ||
301 | if not stripfunc == "": | ||
302 | from bb import build | ||
303 | # strip | ||
304 | bb.data.setVar('RUNSTRIP', '\tlocal st\n\tst=0\n%s\treturn $st' % stripfunc, localdata) | ||
305 | bb.data.setVarFlag('RUNSTRIP', 'func', 1, localdata) | ||
306 | bb.build.exec_func('RUNSTRIP', localdata) | ||
307 | del localdata | ||
308 | os.chdir(workdir) | ||
309 | |||
310 | unshipped = [] | ||
311 | for root, dirs, files in os.walk(dvar): | ||
312 | for f in files: | ||
313 | path = os.path.join(root[len(dvar):], f) | ||
314 | unshipped.append(path) | ||
315 | |||
316 | if unshipped != []: | ||
317 | bb.note("the following files were installed but not shipped in any package:") | ||
318 | for f in unshipped: | ||
319 | bb.note(" " + f) | ||
320 | |||
321 | bb.build.exec_func("package_name_hook", d) | ||
322 | |||
323 | for pkg in packages.split(): | ||
324 | pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1) | ||
325 | if pkgname is None: | ||
326 | bb.data.setVar('PKG_%s' % pkg, pkg, d) | ||
327 | else: | ||
328 | add_package_mapping(pkg, pkgname, d) | ||
329 | |||
330 | dangling_links = {} | ||
331 | pkg_files = {} | ||
332 | for pkg in packages.split(): | ||
333 | dangling_links[pkg] = [] | ||
334 | pkg_files[pkg] = [] | ||
335 | inst_root = os.path.join(workdir, "install", pkg) | ||
336 | for root, dirs, files in os.walk(inst_root): | ||
337 | for f in files: | ||
338 | path = os.path.join(root, f) | ||
339 | rpath = path[len(inst_root):] | ||
340 | pkg_files[pkg].append(rpath) | ||
341 | try: | ||
342 | s = os.stat(path) | ||
343 | except OSError, (err, strerror): | ||
344 | if err != errno.ENOENT: | ||
345 | raise | ||
346 | target = os.readlink(path) | ||
347 | if target[0] != '/': | ||
348 | target = os.path.join(root[len(inst_root):], target) | ||
349 | dangling_links[pkg].append(os.path.normpath(target)) | ||
350 | |||
351 | for pkg in packages.split(): | ||
352 | rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "") | ||
353 | for l in dangling_links[pkg]: | ||
354 | found = False | ||
355 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) | ||
356 | for p in packages.split(): | ||
357 | for f in pkg_files[p]: | ||
358 | if f == l: | ||
359 | found = True | ||
360 | bb.debug(1, "target found in %s" % p) | ||
361 | if p == pkg: | ||
362 | break | ||
363 | dp = bb.data.getVar('PKG_' + p, d, 1) or p | ||
364 | if not dp in rdepends: | ||
365 | rdepends.append(dp) | ||
366 | break | ||
367 | if found == False: | ||
368 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | ||
369 | bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) | ||
370 | |||
371 | def write_if_exists(f, pkg, var): | ||
372 | def encode(str): | ||
373 | import codecs | ||
374 | c = codecs.getencoder("string_escape") | ||
375 | return c(str)[0] | ||
376 | |||
377 | val = bb.data.getVar('%s_%s' % (var, pkg), d, 1) | ||
378 | if val: | ||
379 | f.write('%s_%s: %s\n' % (var, pkg, encode(val))) | ||
380 | |||
381 | data_file = os.path.join(workdir, "install", pn + ".package") | ||
382 | f = open(data_file, 'w') | ||
383 | f.write("PACKAGES: %s\n" % packages) | ||
384 | for pkg in packages.split(): | ||
385 | write_if_exists(f, pkg, 'DESCRIPTION') | ||
386 | write_if_exists(f, pkg, 'RDEPENDS') | ||
387 | write_if_exists(f, pkg, 'RPROVIDES') | ||
388 | write_if_exists(f, pkg, 'PKG') | ||
389 | write_if_exists(f, pkg, 'ALLOW_EMPTY') | ||
390 | write_if_exists(f, pkg, 'FILES') | ||
391 | write_if_exists(f, pkg, 'pkg_postinst') | ||
392 | write_if_exists(f, pkg, 'pkg_postrm') | ||
393 | write_if_exists(f, pkg, 'pkg_preinst') | ||
394 | write_if_exists(f, pkg, 'pkg_prerm') | ||
395 | f.close() | ||
396 | bb.build.exec_func("read_subpackage_metadata", d) | ||
397 | } | ||
398 | |||
399 | ldconfig_postinst_fragment() { | ||
400 | if [ x"$D" = "x" ]; then | ||
401 | ldconfig | ||
402 | fi | ||
403 | } | ||
404 | |||
405 | python package_do_shlibs() { | ||
406 | import os, re, os.path | ||
407 | |||
408 | exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0) | ||
409 | if exclude_shlibs: | ||
410 | bb.note("not generating shlibs") | ||
411 | return | ||
412 | |||
413 | lib_re = re.compile("^lib.*\.so") | ||
414 | libdir_re = re.compile(".*/lib$") | ||
415 | |||
416 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
417 | if not packages: | ||
418 | bb.debug(1, "no packages to build; not calculating shlibs") | ||
419 | return | ||
420 | |||
421 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
422 | if not workdir: | ||
423 | bb.error("WORKDIR not defined") | ||
424 | return | ||
425 | |||
426 | staging = bb.data.getVar('STAGING_DIR', d, 1) | ||
427 | if not staging: | ||
428 | bb.error("STAGING_DIR not defined") | ||
429 | return | ||
430 | |||
431 | ver = bb.data.getVar('PV', d, 1) | ||
432 | if not ver: | ||
433 | bb.error("PV not defined") | ||
434 | return | ||
435 | |||
436 | target_sys = bb.data.getVar('TARGET_SYS', d, 1) | ||
437 | if not target_sys: | ||
438 | bb.error("TARGET_SYS not defined") | ||
439 | return | ||
440 | |||
441 | shlibs_dir = os.path.join(staging, target_sys, "shlibs") | ||
442 | old_shlibs_dir = os.path.join(staging, "shlibs") | ||
443 | bb.mkdirhier(shlibs_dir) | ||
444 | |||
445 | needed = {} | ||
446 | for pkg in packages.split(): | ||
447 | needs_ldconfig = False | ||
448 | bb.debug(2, "calculating shlib provides for %s" % pkg) | ||
449 | |||
450 | pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1) | ||
451 | if not pkgname: | ||
452 | pkgname = pkg | ||
453 | |||
454 | needed[pkg] = [] | ||
455 | sonames = list() | ||
456 | top = os.path.join(workdir, "install", pkg) | ||
457 | for root, dirs, files in os.walk(top): | ||
458 | for file in files: | ||
459 | soname = None | ||
460 | path = os.path.join(root, file) | ||
461 | if os.access(path, os.X_OK) or lib_re.match(file): | ||
462 | cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + path + " 2>/dev/null" | ||
463 | fd = os.popen(cmd) | ||
464 | lines = fd.readlines() | ||
465 | fd.close() | ||
466 | for l in lines: | ||
467 | m = re.match("\s+NEEDED\s+([^\s]*)", l) | ||
468 | if m: | ||
469 | needed[pkg].append(m.group(1)) | ||
470 | m = re.match("\s+SONAME\s+([^\s]*)", l) | ||
471 | if m and not m.group(1) in sonames: | ||
472 | sonames.append(m.group(1)) | ||
473 | if m and libdir_re.match(root): | ||
474 | needs_ldconfig = True | ||
475 | shlibs_file = os.path.join(shlibs_dir, pkgname + ".list") | ||
476 | if os.path.exists(shlibs_file): | ||
477 | os.remove(shlibs_file) | ||
478 | shver_file = os.path.join(shlibs_dir, pkgname + ".ver") | ||
479 | if os.path.exists(shver_file): | ||
480 | os.remove(shver_file) | ||
481 | if len(sonames): | ||
482 | fd = open(shlibs_file, 'w') | ||
483 | for s in sonames: | ||
484 | fd.write(s + '\n') | ||
485 | fd.close() | ||
486 | fd = open(shver_file, 'w') | ||
487 | fd.write(ver + '\n') | ||
488 | fd.close() | ||
489 | if needs_ldconfig: | ||
490 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | ||
491 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) | ||
492 | if not postinst: | ||
493 | postinst = '#!/bin/sh\n' | ||
494 | postinst += bb.data.getVar('ldconfig_postinst_fragment', d, 1) | ||
495 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | ||
496 | |||
497 | shlib_provider = {} | ||
498 | list_re = re.compile('^(.*)\.list$') | ||
499 | for dir in [old_shlibs_dir, shlibs_dir]: | ||
500 | if not os.path.exists(dir): | ||
501 | continue | ||
502 | for file in os.listdir(dir): | ||
503 | m = list_re.match(file) | ||
504 | if m: | ||
505 | dep_pkg = m.group(1) | ||
506 | fd = open(os.path.join(dir, file)) | ||
507 | lines = fd.readlines() | ||
508 | fd.close() | ||
509 | ver_file = os.path.join(dir, dep_pkg + '.ver') | ||
510 | lib_ver = None | ||
511 | if os.path.exists(ver_file): | ||
512 | fd = open(ver_file) | ||
513 | lib_ver = fd.readline().rstrip() | ||
514 | fd.close() | ||
515 | for l in lines: | ||
516 | shlib_provider[l.rstrip()] = (dep_pkg, lib_ver) | ||
517 | |||
518 | |||
519 | for pkg in packages.split(): | ||
520 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | ||
521 | |||
522 | p_pkg = bb.data.getVar("PKG_%s" % pkg, d, 1) or pkg | ||
523 | |||
524 | deps = list() | ||
525 | for n in needed[pkg]: | ||
526 | if n in shlib_provider.keys(): | ||
527 | (dep_pkg, ver_needed) = shlib_provider[n] | ||
528 | |||
529 | if dep_pkg == p_pkg: | ||
530 | continue | ||
531 | |||
532 | if ver_needed: | ||
533 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) | ||
534 | else: | ||
535 | dep = dep_pkg | ||
536 | if not dep in deps: | ||
537 | deps.append(dep) | ||
538 | else: | ||
539 | bb.note("Couldn't find shared library provider for %s" % n) | ||
540 | |||
541 | |||
542 | deps_file = os.path.join(workdir, "install", pkg + ".shlibdeps") | ||
543 | if os.path.exists(deps_file): | ||
544 | os.remove(deps_file) | ||
545 | if len(deps): | ||
546 | fd = open(deps_file, 'w') | ||
547 | for dep in deps: | ||
548 | fd.write(dep + '\n') | ||
549 | fd.close() | ||
550 | } | ||
551 | |||
552 | python package_do_pkgconfig () { | ||
553 | import re, os | ||
554 | |||
555 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
556 | if not packages: | ||
557 | bb.debug(1, "no packages to build; not calculating pkgconfig dependencies") | ||
558 | return | ||
559 | |||
560 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
561 | if not workdir: | ||
562 | bb.error("WORKDIR not defined") | ||
563 | return | ||
564 | |||
565 | staging = bb.data.getVar('STAGING_DIR', d, 1) | ||
566 | if not staging: | ||
567 | bb.error("STAGING_DIR not defined") | ||
568 | return | ||
569 | |||
570 | target_sys = bb.data.getVar('TARGET_SYS', d, 1) | ||
571 | if not target_sys: | ||
572 | bb.error("TARGET_SYS not defined") | ||
573 | return | ||
574 | |||
575 | shlibs_dir = os.path.join(staging, target_sys, "shlibs") | ||
576 | old_shlibs_dir = os.path.join(staging, "shlibs") | ||
577 | bb.mkdirhier(shlibs_dir) | ||
578 | |||
579 | pc_re = re.compile('(.*)\.pc$') | ||
580 | var_re = re.compile('(.*)=(.*)') | ||
581 | field_re = re.compile('(.*): (.*)') | ||
582 | |||
583 | pkgconfig_provided = {} | ||
584 | pkgconfig_needed = {} | ||
585 | for pkg in packages.split(): | ||
586 | pkgconfig_provided[pkg] = [] | ||
587 | pkgconfig_needed[pkg] = [] | ||
588 | top = os.path.join(workdir, "install", pkg) | ||
589 | for root, dirs, files in os.walk(top): | ||
590 | for file in files: | ||
591 | m = pc_re.match(file) | ||
592 | if m: | ||
593 | pd = bb.data.init() | ||
594 | name = m.group(1) | ||
595 | pkgconfig_provided[pkg].append(name) | ||
596 | path = os.path.join(root, file) | ||
597 | if not os.access(path, os.R_OK): | ||
598 | continue | ||
599 | f = open(path, 'r') | ||
600 | lines = f.readlines() | ||
601 | f.close() | ||
602 | for l in lines: | ||
603 | m = var_re.match(l) | ||
604 | if m: | ||
605 | name = m.group(1) | ||
606 | val = m.group(2) | ||
607 | bb.data.setVar(name, bb.data.expand(val, pd), pd) | ||
608 | continue | ||
609 | m = field_re.match(l) | ||
610 | if m: | ||
611 | hdr = m.group(1) | ||
612 | exp = bb.data.expand(m.group(2), pd) | ||
613 | if hdr == 'Requires': | ||
614 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | ||
615 | |||
616 | for pkg in packages.split(): | ||
617 | ppkg = bb.data.getVar("PKG_" + pkg, d, 1) or pkg | ||
618 | pkgs_file = os.path.join(shlibs_dir, ppkg + ".pclist") | ||
619 | if os.path.exists(pkgs_file): | ||
620 | os.remove(pkgs_file) | ||
621 | if pkgconfig_provided[pkg] != []: | ||
622 | f = open(pkgs_file, 'w') | ||
623 | for p in pkgconfig_provided[pkg]: | ||
624 | f.write('%s\n' % p) | ||
625 | f.close() | ||
626 | |||
627 | for dir in [old_shlibs_dir, shlibs_dir]: | ||
628 | if not os.path.exists(dir): | ||
629 | continue | ||
630 | for file in os.listdir(dir): | ||
631 | m = re.match('^(.*)\.pclist$', file) | ||
632 | if m: | ||
633 | pkg = m.group(1) | ||
634 | fd = open(os.path.join(dir, file)) | ||
635 | lines = fd.readlines() | ||
636 | fd.close() | ||
637 | pkgconfig_provided[pkg] = [] | ||
638 | for l in lines: | ||
639 | pkgconfig_provided[pkg].append(l.rstrip()) | ||
640 | |||
641 | for pkg in packages.split(): | ||
642 | deps = [] | ||
643 | for n in pkgconfig_needed[pkg]: | ||
644 | found = False | ||
645 | for k in pkgconfig_provided.keys(): | ||
646 | if n in pkgconfig_provided[k]: | ||
647 | if k != pkg and not (k in deps): | ||
648 | deps.append(k) | ||
649 | found = True | ||
650 | if found == False: | ||
651 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) | ||
652 | deps_file = os.path.join(workdir, "install", pkg + ".pcdeps") | ||
653 | if os.path.exists(deps_file): | ||
654 | os.remove(deps_file) | ||
655 | if len(deps): | ||
656 | fd = open(deps_file, 'w') | ||
657 | for dep in deps: | ||
658 | fd.write(dep + '\n') | ||
659 | fd.close() | ||
660 | } | ||
661 | |||
662 | python package_do_split_locales() { | ||
663 | import os | ||
664 | |||
665 | if (bb.data.getVar('PACKAGE_NO_LOCALE', d, 1) == '1'): | ||
666 | bb.debug(1, "package requested not splitting locales") | ||
667 | return | ||
668 | |||
669 | packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() | ||
670 | if not packages: | ||
671 | bb.debug(1, "no packages to build; not splitting locales") | ||
672 | return | ||
673 | |||
674 | datadir = bb.data.getVar('datadir', d, 1) | ||
675 | if not datadir: | ||
676 | bb.note("datadir not defined") | ||
677 | return | ||
678 | |||
679 | dvar = bb.data.getVar('D', d, 1) | ||
680 | if not dvar: | ||
681 | bb.error("D not defined") | ||
682 | return | ||
683 | |||
684 | pn = bb.data.getVar('PN', d, 1) | ||
685 | if not pn: | ||
686 | bb.error("PN not defined") | ||
687 | return | ||
688 | |||
689 | if pn + '-locale' in packages: | ||
690 | packages.remove(pn + '-locale') | ||
691 | |||
692 | localedir = os.path.join(dvar + datadir, 'locale') | ||
693 | |||
694 | if not os.path.isdir(localedir): | ||
695 | bb.debug(1, "No locale files in this package") | ||
696 | return | ||
697 | |||
698 | locales = os.listdir(localedir) | ||
699 | |||
700 | mainpkg = packages[0] | ||
701 | |||
702 | for l in locales: | ||
703 | ln = legitimize_package_name(l) | ||
704 | pkg = pn + '-locale-' + ln | ||
705 | packages.append(pkg) | ||
706 | bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d) | ||
707 | bb.data.setVar('RDEPENDS_' + pkg, '${PKG_%s} virtual-locale-%s' % (mainpkg, ln), d) | ||
708 | bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d) | ||
709 | bb.data.setVar('DESCRIPTION_' + pkg, '%s translation for %s' % (l, pn), d) | ||
710 | |||
711 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | ||
712 | |||
713 | rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split() | ||
714 | rdep.append('%s-locale*' % pn) | ||
715 | bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) | ||
716 | } | ||
717 | |||
718 | PACKAGEFUNCS = "do_install package_do_split_locales \ | ||
719 | populate_packages package_do_shlibs \ | ||
720 | package_do_pkgconfig read_shlibdeps" | ||
721 | python package_do_package () { | ||
722 | for f in (bb.data.getVar('PACKAGEFUNCS', d, 1) or '').split(): | ||
723 | bb.build.exec_func(f, d) | ||
724 | } | ||
725 | |||
726 | do_package[dirs] = "${D}" | ||
727 | populate_packages[dirs] = "${D}" | ||
728 | EXPORT_FUNCTIONS do_package do_shlibs do_split_locales mapping_rename_hook | ||
729 | addtask package before do_build after do_populate_staging | ||
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass new file mode 100644 index 0000000000..9ae526bb3b --- /dev/null +++ b/meta/classes/package_ipk.bbclass | |||
@@ -0,0 +1,234 @@ | |||
1 | inherit package | ||
2 | DEPENDS_prepend="${@["ipkg-utils-native ", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}" | ||
3 | BOOTSTRAP_EXTRA_RDEPENDS += "ipkg-collateral ipkg ipkg-link" | ||
4 | PACKAGEFUNCS += "do_package_ipk" | ||
5 | |||
6 | python package_ipk_fn () { | ||
7 | from bb import data | ||
8 | bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) | ||
9 | } | ||
10 | |||
11 | python package_ipk_install () { | ||
12 | import os, sys | ||
13 | pkg = bb.data.getVar('PKG', d, 1) | ||
14 | pkgfn = bb.data.getVar('PKGFN', d, 1) | ||
15 | rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) | ||
16 | ipkdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1) | ||
17 | stagingdir = bb.data.getVar('STAGING_DIR', d, 1) | ||
18 | tmpdir = bb.data.getVar('TMPDIR', d, 1) | ||
19 | |||
20 | if None in (pkg,pkgfn,rootfs): | ||
21 | raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") | ||
22 | try: | ||
23 | bb.mkdirhier(rootfs) | ||
24 | os.chdir(rootfs) | ||
25 | except OSError: | ||
26 | (type, value, traceback) = sys.exc_info() | ||
27 | print value | ||
28 | raise bb.build.FuncFailed | ||
29 | |||
30 | # Generate ipk.conf if it or the stamp doesnt exist | ||
31 | conffile = os.path.join(stagingdir,"ipkg.conf") | ||
32 | if not os.access(conffile, os.R_OK): | ||
33 | ipkg_archs = bb.data.getVar('IPKG_ARCHS',d) | ||
34 | if ipkg_archs is None: | ||
35 | bb.error("IPKG_ARCHS missing") | ||
36 | raise FuncFailed | ||
37 | ipkg_archs = ipkg_archs.split() | ||
38 | arch_priority = 1 | ||
39 | |||
40 | f = open(conffile,"w") | ||
41 | for arch in ipkg_archs: | ||
42 | f.write("arch %s %s\n" % ( arch, arch_priority )) | ||
43 | arch_priority += 1 | ||
44 | f.write("src local file:%s" % ipkdir) | ||
45 | f.close() | ||
46 | |||
47 | |||
48 | if (not os.access(os.path.join(ipkdir,"Packages"), os.R_OK) or | ||
49 | not os.access(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),os.R_OK)): | ||
50 | ret = os.system('ipkg-make-index -p %s %s ' % (os.path.join(ipkdir, "Packages"), ipkdir)) | ||
51 | if (ret != 0 ): | ||
52 | raise bb.build.FuncFailed | ||
53 | f=open(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),"w") | ||
54 | f.close() | ||
55 | |||
56 | ret = os.system('ipkg-cl -o %s -f %s update' % (rootfs, conffile)) | ||
57 | ret = os.system('ipkg-cl -o %s -f %s install %s' % (rootfs, conffile, pkgfn)) | ||
58 | if (ret != 0 ): | ||
59 | raise bb.build.FuncFailed | ||
60 | } | ||
61 | |||
62 | python do_package_ipk () { | ||
63 | import copy # to back up env data | ||
64 | import sys | ||
65 | import re | ||
66 | |||
67 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
68 | if not workdir: | ||
69 | bb.error("WORKDIR not defined, unable to package") | ||
70 | return | ||
71 | |||
72 | import os # path manipulations | ||
73 | outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1) | ||
74 | if not outdir: | ||
75 | bb.error("DEPLOY_DIR_IPK not defined, unable to package") | ||
76 | return | ||
77 | bb.mkdirhier(outdir) | ||
78 | |||
79 | dvar = bb.data.getVar('D', d, 1) | ||
80 | if not dvar: | ||
81 | bb.error("D not defined, unable to package") | ||
82 | return | ||
83 | bb.mkdirhier(dvar) | ||
84 | |||
85 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
86 | if not packages: | ||
87 | bb.debug(1, "PACKAGES not defined, nothing to package") | ||
88 | return | ||
89 | |||
90 | tmpdir = bb.data.getVar('TMPDIR', d, 1) | ||
91 | # Invalidate the packages file | ||
92 | if os.access(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),os.R_OK): | ||
93 | os.unlink(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages")) | ||
94 | |||
95 | if packages == []: | ||
96 | bb.debug(1, "No packages; nothing to do") | ||
97 | return | ||
98 | |||
99 | for pkg in packages.split(): | ||
100 | localdata = bb.data.createCopy(d) | ||
101 | root = "%s/install/%s" % (workdir, pkg) | ||
102 | |||
103 | bb.data.setVar('ROOT', '', localdata) | ||
104 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | ||
105 | pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) | ||
106 | if not pkgname: | ||
107 | pkgname = pkg | ||
108 | bb.data.setVar('PKG', pkgname, localdata) | ||
109 | |||
110 | overrides = bb.data.getVar('OVERRIDES', localdata) | ||
111 | if not overrides: | ||
112 | raise bb.build.FuncFailed('OVERRIDES not defined') | ||
113 | overrides = bb.data.expand(overrides, localdata) | ||
114 | bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata) | ||
115 | |||
116 | bb.data.update_data(localdata) | ||
117 | basedir = os.path.join(os.path.dirname(root)) | ||
118 | pkgoutdir = outdir | ||
119 | bb.mkdirhier(pkgoutdir) | ||
120 | os.chdir(root) | ||
121 | from glob import glob | ||
122 | g = glob('*') | ||
123 | try: | ||
124 | del g[g.index('CONTROL')] | ||
125 | del g[g.index('./CONTROL')] | ||
126 | except ValueError: | ||
127 | pass | ||
128 | if not g and not bb.data.getVar('ALLOW_EMPTY', localdata): | ||
129 | from bb import note | ||
130 | note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) | ||
131 | continue | ||
132 | controldir = os.path.join(root, 'CONTROL') | ||
133 | bb.mkdirhier(controldir) | ||
134 | try: | ||
135 | ctrlfile = file(os.path.join(controldir, 'control'), 'w') | ||
136 | except OSError: | ||
137 | raise bb.build.FuncFailed("unable to open control file for writing.") | ||
138 | |||
139 | fields = [] | ||
140 | fields.append(["Version: %s-%s\n", ['PV', 'PR']]) | ||
141 | fields.append(["Description: %s\n", ['DESCRIPTION']]) | ||
142 | fields.append(["Section: %s\n", ['SECTION']]) | ||
143 | fields.append(["Priority: %s\n", ['PRIORITY']]) | ||
144 | fields.append(["Maintainer: %s\n", ['MAINTAINER']]) | ||
145 | fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']]) | ||
146 | fields.append(["OE: %s\n", ['P']]) | ||
147 | fields.append(["Homepage: %s\n", ['HOMEPAGE']]) | ||
148 | |||
149 | def pullData(l, d): | ||
150 | l2 = [] | ||
151 | for i in l: | ||
152 | l2.append(bb.data.getVar(i, d, 1)) | ||
153 | return l2 | ||
154 | |||
155 | ctrlfile.write("Package: %s\n" % pkgname) | ||
156 | # check for required fields | ||
157 | try: | ||
158 | for (c, fs) in fields: | ||
159 | for f in fs: | ||
160 | if bb.data.getVar(f, localdata) is None: | ||
161 | raise KeyError(f) | ||
162 | ctrlfile.write(c % tuple(pullData(fs, localdata))) | ||
163 | except KeyError: | ||
164 | (type, value, traceback) = sys.exc_info() | ||
165 | ctrlfile.close() | ||
166 | raise bb.build.FuncFailed("Missing field for ipk generation: %s" % value) | ||
167 | # more fields | ||
168 | |||
169 | bb.build.exec_func("mapping_rename_hook", localdata) | ||
170 | |||
171 | rdepends = explode_deps(bb.data.getVar("RDEPENDS", localdata, 1) or "") | ||
172 | rrecommends = explode_deps(bb.data.getVar("RRECOMMENDS", localdata, 1) or "") | ||
173 | rsuggests = (bb.data.getVar("RSUGGESTS", localdata, 1) or "").split() | ||
174 | rprovides = (bb.data.getVar("RPROVIDES", localdata, 1) or "").split() | ||
175 | rreplaces = (bb.data.getVar("RREPLACES", localdata, 1) or "").split() | ||
176 | rconflicts = (bb.data.getVar("RCONFLICTS", localdata, 1) or "").split() | ||
177 | if rdepends: | ||
178 | ctrlfile.write("Depends: %s\n" % ", ".join(rdepends)) | ||
179 | if rsuggests: | ||
180 | ctrlfile.write("Suggests: %s\n" % ", ".join(rsuggests)) | ||
181 | if rrecommends: | ||
182 | ctrlfile.write("Recommends: %s\n" % ", ".join(rrecommends)) | ||
183 | if rprovides: | ||
184 | ctrlfile.write("Provides: %s\n" % ", ".join(rprovides)) | ||
185 | if rreplaces: | ||
186 | ctrlfile.write("Replaces: %s\n" % ", ".join(rreplaces)) | ||
187 | if rconflicts: | ||
188 | ctrlfile.write("Conflicts: %s\n" % ", ".join(rconflicts)) | ||
189 | src_uri = bb.data.getVar("SRC_URI", localdata, 1) | ||
190 | if src_uri: | ||
191 | src_uri = re.sub("\s+", " ", src_uri) | ||
192 | ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) | ||
193 | ctrlfile.close() | ||
194 | |||
195 | for script in ["preinst", "postinst", "prerm", "postrm"]: | ||
196 | scriptvar = bb.data.getVar('pkg_%s' % script, localdata, 1) | ||
197 | if not scriptvar: | ||
198 | continue | ||
199 | try: | ||
200 | scriptfile = file(os.path.join(controldir, script), 'w') | ||
201 | except OSError: | ||
202 | raise bb.build.FuncFailed("unable to open %s script file for writing." % script) | ||
203 | scriptfile.write(scriptvar) | ||
204 | scriptfile.close() | ||
205 | os.chmod(os.path.join(controldir, script), 0755) | ||
206 | |||
207 | conffiles_str = bb.data.getVar("CONFFILES", localdata, 1) | ||
208 | if conffiles_str: | ||
209 | try: | ||
210 | conffiles = file(os.path.join(controldir, 'conffiles'), 'w') | ||
211 | except OSError: | ||
212 | raise bb.build.FuncFailed("unable to open conffiles for writing.") | ||
213 | for f in conffiles_str.split(): | ||
214 | conffiles.write('%s\n' % f) | ||
215 | conffiles.close() | ||
216 | |||
217 | os.chdir(basedir) | ||
218 | ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1), | ||
219 | bb.data.getVar("IPKGBUILDCMD",d,1), pkg, pkgoutdir)) | ||
220 | if ret != 0: | ||
221 | raise bb.build.FuncFailed("ipkg-build execution failed") | ||
222 | |||
223 | for script in ["preinst", "postinst", "prerm", "postrm", "control" ]: | ||
224 | scriptfile = os.path.join(controldir, script) | ||
225 | try: | ||
226 | os.remove(scriptfile) | ||
227 | except OSError: | ||
228 | pass | ||
229 | try: | ||
230 | os.rmdir(controldir) | ||
231 | except OSError: | ||
232 | pass | ||
233 | del localdata | ||
234 | } | ||
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass new file mode 100644 index 0000000000..c29ab5f423 --- /dev/null +++ b/meta/classes/package_rpm.bbclass | |||
@@ -0,0 +1,133 @@ | |||
1 | inherit package | ||
2 | inherit rpm_core | ||
3 | |||
4 | RPMBUILD="rpmbuild --short-circuit ${RPMOPTS}" | ||
5 | PACKAGEFUNCS += "do_package_rpm" | ||
6 | |||
7 | python write_specfile() { | ||
8 | from bb import data, build | ||
9 | import sys | ||
10 | out_vartranslate = { | ||
11 | "PKG": "Name", | ||
12 | "PV": "Version", | ||
13 | "PR": "Release", | ||
14 | "DESCRIPTION": "%description", | ||
15 | "ROOT": "BuildRoot", | ||
16 | "LICENSE": "License", | ||
17 | "SECTION": "Group", | ||
18 | } | ||
19 | |||
20 | root = bb.data.getVar('ROOT', d) | ||
21 | |||
22 | # get %files | ||
23 | filesvar = bb.data.expand(bb.data.getVar('FILES', d), d) or "" | ||
24 | from glob import glob | ||
25 | files = filesvar.split() | ||
26 | todelete = [] | ||
27 | for file in files: | ||
28 | if file[0] == '.': | ||
29 | newfile = file[1:] | ||
30 | files[files.index(file)] = newfile | ||
31 | file = newfile | ||
32 | else: | ||
33 | newfile = file | ||
34 | realfile = os.path.join(root, './'+file) | ||
35 | if not glob(realfile): | ||
36 | todelete.append(files[files.index(newfile)]) | ||
37 | for r in todelete: | ||
38 | try: | ||
39 | del files[files.index(r)] | ||
40 | except ValueError: | ||
41 | pass | ||
42 | if not files: | ||
43 | from bb import note | ||
44 | note("Not creating empty archive for %s-%s-%s" % (bb.data.getVar('PKG',d, 1), bb.data.getVar('PV', d, 1), bb.data.getVar('PR', d, 1))) | ||
45 | return | ||
46 | |||
47 | # output .spec using this metadata store | ||
48 | try: | ||
49 | from __builtin__ import file | ||
50 | if not bb.data.getVar('OUTSPECFILE', d): | ||
51 | raise OSError('eek!') | ||
52 | specfile = file(bb.data.getVar('OUTSPECFILE', d), 'w') | ||
53 | except OSError: | ||
54 | raise bb.build.FuncFailed("unable to open spec file for writing.") | ||
55 | |||
56 | # fd = sys.__stdout__ | ||
57 | fd = specfile | ||
58 | for var in out_vartranslate.keys(): | ||
59 | if out_vartranslate[var][0] == "%": | ||
60 | continue | ||
61 | fd.write("%s\t: %s\n" % (out_vartranslate[var], bb.data.getVar(var, d))) | ||
62 | fd.write("Summary\t: .\n") | ||
63 | |||
64 | for var in out_vartranslate.keys(): | ||
65 | if out_vartranslate[var][0] != "%": | ||
66 | continue | ||
67 | fd.write(out_vartranslate[var] + "\n") | ||
68 | fd.write(bb.data.getVar(var, d) + "\n\n") | ||
69 | |||
70 | fd.write("%files\n") | ||
71 | for file in files: | ||
72 | fd.write("%s\n" % file) | ||
73 | |||
74 | fd.close() | ||
75 | |||
76 | # call out rpm -bb on the .spec, thereby creating an rpm | ||
77 | |||
78 | bb.data.setVar('BUILDSPEC', "${RPMBUILD} -bb ${OUTSPECFILE}\n", d) | ||
79 | bb.data.setVarFlag('BUILDSPEC', 'func', '1', d) | ||
80 | bb.build.exec_func('BUILDSPEC', d) | ||
81 | |||
82 | # move the rpm into the pkgoutdir | ||
83 | rpm = bb.data.expand('${RPMBUILDPATH}/RPMS/${TARGET_ARCH}/${PKG}-${PV}-${PR}.${TARGET_ARCH}.rpm', d) | ||
84 | outrpm = bb.data.expand('${DEPLOY_DIR_RPM}/${PKG}-${PV}-${PR}.${TARGET_ARCH}.rpm', d) | ||
85 | bb.movefile(rpm, outrpm) | ||
86 | } | ||
87 | |||
88 | python do_package_rpm () { | ||
89 | workdir = bb.data.getVar('WORKDIR', d) | ||
90 | if not workdir: | ||
91 | raise bb.build.FuncFailed("WORKDIR not defined") | ||
92 | workdir = bb.data.expand(workdir, d) | ||
93 | |||
94 | import os # path manipulations | ||
95 | outdir = bb.data.getVar('DEPLOY_DIR_RPM', d) | ||
96 | if not outdir: | ||
97 | raise bb.build.FuncFailed("DEPLOY_DIR_RPM not defined") | ||
98 | outdir = bb.data.expand(outdir, d) | ||
99 | bb.mkdirhier(outdir) | ||
100 | |||
101 | packages = bb.data.getVar('PACKAGES', d) | ||
102 | if not packages: | ||
103 | packages = "${PN}" | ||
104 | bb.data.setVar('FILES', '', d) | ||
105 | ddir = bb.data.expand(bb.data.getVar('D', d), d) | ||
106 | bb.mkdirhier(ddir) | ||
107 | bb.data.setVar(bb.data.expand('FILES_${PN}', d), ''.join([ "./%s" % x for x in os.listdir(ddir)]), d) | ||
108 | packages = bb.data.expand(packages, d) | ||
109 | |||
110 | for pkg in packages.split(): | ||
111 | localdata = bb.data.createCopy(d) | ||
112 | root = "%s/install/%s" % (workdir, pkg) | ||
113 | |||
114 | bb.data.setVar('ROOT', '', localdata) | ||
115 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | ||
116 | bb.data.setVar('PKG', pkg, localdata) | ||
117 | |||
118 | overrides = bb.data.getVar('OVERRIDES', localdata) | ||
119 | if not overrides: | ||
120 | raise bb.build.FuncFailed('OVERRIDES not defined') | ||
121 | overrides = bb.data.expand(overrides, localdata) | ||
122 | bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata) | ||
123 | |||
124 | bb.data.update_data(localdata) | ||
125 | # stuff | ||
126 | root = bb.data.getVar('ROOT', localdata) | ||
127 | basedir = os.path.dirname(root) | ||
128 | pkgoutdir = outdir | ||
129 | bb.mkdirhier(pkgoutdir) | ||
130 | bb.data.setVar('OUTSPECFILE', os.path.join(workdir, "%s.spec" % pkg), localdata) | ||
131 | bb.build.exec_func('write_specfile', localdata) | ||
132 | del localdata | ||
133 | } | ||
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass new file mode 100644 index 0000000000..359e35f113 --- /dev/null +++ b/meta/classes/package_tar.bbclass | |||
@@ -0,0 +1,99 @@ | |||
1 | inherit package | ||
2 | |||
3 | PACKAGEFUNCS += "do_package_tar" | ||
4 | |||
5 | python package_tar_fn () { | ||
6 | import os | ||
7 | from bb import data | ||
8 | fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PV', d), bb.data.getVar('PR', d))) | ||
9 | fn = bb.data.expand(fn, d) | ||
10 | bb.data.setVar('PKGFN', fn, d) | ||
11 | } | ||
12 | |||
13 | python package_tar_install () { | ||
14 | import os, sys | ||
15 | pkg = bb.data.getVar('PKG', d, 1) | ||
16 | pkgfn = bb.data.getVar('PKGFN', d, 1) | ||
17 | rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) | ||
18 | |||
19 | if None in (pkg,pkgfn,rootfs): | ||
20 | bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") | ||
21 | raise bb.build.FuncFailed | ||
22 | try: | ||
23 | bb.mkdirhier(rootfs) | ||
24 | os.chdir(rootfs) | ||
25 | except OSError: | ||
26 | (type, value, traceback) = sys.exc_info() | ||
27 | print value | ||
28 | raise bb.build.FuncFailed | ||
29 | |||
30 | if not os.access(pkgfn, os.R_OK): | ||
31 | bb.debug(1, "%s does not exist, skipping" % pkgfn) | ||
32 | raise bb.build.FuncFailed | ||
33 | |||
34 | ret = os.system('zcat %s | tar -xf -' % pkgfn) | ||
35 | if ret != 0: | ||
36 | raise bb.build.FuncFailed | ||
37 | } | ||
38 | |||
39 | python do_package_tar () { | ||
40 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
41 | if not workdir: | ||
42 | bb.error("WORKDIR not defined, unable to package") | ||
43 | return | ||
44 | |||
45 | import os # path manipulations | ||
46 | outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1) | ||
47 | if not outdir: | ||
48 | bb.error("DEPLOY_DIR_TAR not defined, unable to package") | ||
49 | return | ||
50 | bb.mkdirhier(outdir) | ||
51 | |||
52 | dvar = bb.data.getVar('D', d, 1) | ||
53 | if not dvar: | ||
54 | bb.error("D not defined, unable to package") | ||
55 | return | ||
56 | bb.mkdirhier(dvar) | ||
57 | |||
58 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
59 | if not packages: | ||
60 | bb.debug(1, "PACKAGES not defined, nothing to package") | ||
61 | return | ||
62 | |||
63 | for pkg in packages.split(): | ||
64 | localdata = bb.data.createCopy(d) | ||
65 | root = "%s/install/%s" % (workdir, pkg) | ||
66 | |||
67 | bb.data.setVar('ROOT', '', localdata) | ||
68 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | ||
69 | bb.data.setVar('PKG', pkg, localdata) | ||
70 | |||
71 | overrides = bb.data.getVar('OVERRIDES', localdata) | ||
72 | if not overrides: | ||
73 | raise bb.build.FuncFailed('OVERRIDES not defined') | ||
74 | overrides = bb.data.expand(overrides, localdata) | ||
75 | bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata) | ||
76 | |||
77 | bb.data.update_data(localdata) | ||
78 | # stuff | ||
79 | root = bb.data.getVar('ROOT', localdata) | ||
80 | bb.mkdirhier(root) | ||
81 | basedir = os.path.dirname(root) | ||
82 | pkgoutdir = outdir | ||
83 | bb.mkdirhier(pkgoutdir) | ||
84 | bb.build.exec_func('package_tar_fn', localdata) | ||
85 | tarfn = bb.data.getVar('PKGFN', localdata, 1) | ||
86 | # if os.path.exists(tarfn): | ||
87 | # del localdata | ||
88 | # continue | ||
89 | os.chdir(root) | ||
90 | from glob import glob | ||
91 | if not glob('*'): | ||
92 | bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) | ||
93 | continue | ||
94 | ret = os.system("tar -czvf %s %s" % (tarfn, '.')) | ||
95 | if ret != 0: | ||
96 | bb.error("Creation of tar %s failed." % tarfn) | ||
97 | # end stuff | ||
98 | del localdata | ||
99 | } | ||
diff --git a/meta/classes/palmtop.bbclass b/meta/classes/palmtop.bbclass new file mode 100644 index 0000000000..9d54de8748 --- /dev/null +++ b/meta/classes/palmtop.bbclass | |||
@@ -0,0 +1,20 @@ | |||
1 | # this build class sets up qmake variables to | ||
2 | # * build using the Qt Windowing System (QWS) | ||
3 | # * use qt | ||
4 | # * link against supc++ instead of stdc++ | ||
5 | # * use threads, if requested via PALMTOP_USE_MULTITHREADED_QT = "yes" | ||
6 | # inherit this class to build programs against libqpe | ||
7 | # inherit opie if you want to build programs against libopie2 | ||
8 | # don't override EXTRA_QMAKEVARS_POST, if you use inherit this class | ||
9 | |||
10 | inherit qmake | ||
11 | |||
12 | # special case for DISTRO = sharprom | ||
13 | CPP_SUPPORT_LIB = "LIBS-=-lstdc++ LIBS+=-lsupc++" | ||
14 | CPP_SUPPORT_LIB_sharprom = "LIBS-=-lstdc++" | ||
15 | EXTRA_QMAKEVARS_POST += "DEFINES+=QWS CONFIG+=qt ${CPP_SUPPORT_LIB}" | ||
16 | EXTRA_QMAKEVARS_POST += '${@base_conditional("PALMTOP_USE_MULTITHREADED_QT", "yes", "CONFIG+=thread", "CONFIG-=thread",d)}' | ||
17 | EXTRA_QMAKEVARS_POST += "${@["LIBS+=-lqpe ", ""][(bb.data.getVar('PN', d, 1) == 'libqpe-opie')]}" | ||
18 | DEPENDS_prepend = "${@["virtual/libqpe1 uicmoc-native ", ""][(bb.data.getVar('PN', d, 1) == 'libqpe-opie')]}" | ||
19 | |||
20 | FILES_${PN} = "${palmtopdir}" | ||
diff --git a/meta/classes/patcher.bbclass b/meta/classes/patcher.bbclass new file mode 100644 index 0000000000..c8a1b0350f --- /dev/null +++ b/meta/classes/patcher.bbclass | |||
@@ -0,0 +1,7 @@ | |||
1 | # Now that BitBake/OpenEmbedded uses Quilt by default, you can simply add an | ||
2 | # inherit patcher | ||
3 | # to one of your config files to let BB/OE use patcher again. | ||
4 | |||
5 | PATCHCLEANCMD = "patcher -B" | ||
6 | PATCHCMD = "patcher -R -p '%s' -n '%s' -i '%s'" | ||
7 | PATCH_DEPENDS = "${@["patcher-native", ""][(bb.data.getVar('PN', d, 1) == 'patcher-native')]}" | ||
diff --git a/meta/classes/pkg_distribute.bbclass b/meta/classes/pkg_distribute.bbclass new file mode 100644 index 0000000000..81978e3e3b --- /dev/null +++ b/meta/classes/pkg_distribute.bbclass | |||
@@ -0,0 +1,29 @@ | |||
1 | PKG_DISTRIBUTECOMMAND[func] = "1" | ||
2 | python do_distribute_packages () { | ||
3 | cmd = bb.data.getVar('PKG_DISTRIBUTECOMMAND', d, 1) | ||
4 | if not cmd: | ||
5 | raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined") | ||
6 | bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d) | ||
7 | } | ||
8 | |||
9 | addtask distribute_packages before do_build after do_fetch | ||
10 | |||
11 | PKG_DIST_LOCAL ?= "symlink" | ||
12 | PKG_DISTRIBUTEDIR ?= "${DEPLOY_DIR}/packages" | ||
13 | |||
14 | PKG_DISTRIBUTECOMMAND () { | ||
15 | p=`dirname ${FILE}` | ||
16 | d=`basename $p` | ||
17 | mkdir -p ${PKG_DISTRIBUTEDIR} | ||
18 | case "${PKG_DIST_LOCAL}" in | ||
19 | copy) | ||
20 | # use this weird tar command to copy because we want to | ||
21 | # exclude the BitKeeper directories | ||
22 | test -e ${PKG_DISTRIBUTEDIR}/${d} || mkdir ${PKG_DISTRIBUTEDIR}/${d}; | ||
23 | (cd ${p}; tar -c --exclude SCCS -f - . ) | tar -C ${PKG_DISTRIBUTEDIR}/${d} -xpf - | ||
24 | ;; | ||
25 | symlink) | ||
26 | ln -sf $p ${PKG_DISTRIBUTEDIR}/ | ||
27 | ;; | ||
28 | esac | ||
29 | } | ||
diff --git a/meta/classes/pkg_metainfo.bbclass b/meta/classes/pkg_metainfo.bbclass new file mode 100644 index 0000000000..ac4f73c77b --- /dev/null +++ b/meta/classes/pkg_metainfo.bbclass | |||
@@ -0,0 +1,22 @@ | |||
1 | python do_pkg_write_metainfo () { | ||
2 | deploydir = bb.data.getVar('DEPLOY_DIR', d, 1) | ||
3 | if not deploydir: | ||
4 | bb.error("DEPLOY_DIR not defined, unable to write package info") | ||
5 | return | ||
6 | |||
7 | try: | ||
8 | infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a') | ||
9 | except OSError: | ||
10 | raise bb.build.FuncFailed("unable to open package-info file for writing.") | ||
11 | |||
12 | name = bb.data.getVar('PN', d, 1) | ||
13 | version = bb.data.getVar('PV', d, 1) | ||
14 | desc = bb.data.getVar('DESCRIPTION', d, 1) | ||
15 | page = bb.data.getVar('HOMEPAGE', d, 1) | ||
16 | lic = bb.data.getVar('LICENSE', d, 1) | ||
17 | |||
18 | infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) | ||
19 | infofile.close() | ||
20 | } | ||
21 | |||
22 | addtask pkg_write_metainfo after do_package before do_build \ No newline at end of file | ||
diff --git a/meta/classes/pkgconfig.bbclass b/meta/classes/pkgconfig.bbclass new file mode 100644 index 0000000000..62f15f312d --- /dev/null +++ b/meta/classes/pkgconfig.bbclass | |||
@@ -0,0 +1,28 @@ | |||
1 | inherit base | ||
2 | |||
3 | DEPENDS_prepend = "pkgconfig-native " | ||
4 | |||
5 | # The namespaces can clash here hence the two step replace | ||
6 | def get_pkgconfig_mangle(d): | ||
7 | import bb.data | ||
8 | s = "-e ''" | ||
9 | if not bb.data.inherits_class('native', d): | ||
10 | s += " -e 's:=${libdir}:=OELIBDIR:;'" | ||
11 | s += " -e 's:=${includedir}:=OEINCDIR:;'" | ||
12 | s += " -e 's:=${datadir}:=OEDATADIR:'" | ||
13 | s += " -e 's:=${prefix}:=OEPREFIX:'" | ||
14 | s += " -e 's:=${exec_prefix}:=OEEXECPREFIX:'" | ||
15 | s += " -e 's:OELIBDIR:${STAGING_LIBDIR}:;'" | ||
16 | s += " -e 's:OEINCDIR:${STAGING_INCDIR}:;'" | ||
17 | s += " -e 's:OEDATADIR:${STAGING_DATADIR}:'" | ||
18 | s += " -e 's:OEPREFIX:${STAGING_LIBDIR}/..:'" | ||
19 | s += " -e 's:OEEXECPREFIX:${STAGING_LIBDIR}/..:'" | ||
20 | return s | ||
21 | |||
22 | do_stage_append () { | ||
23 | for pc in `find ${S} -name '*.pc' | grep -v -- '-uninstalled.pc$'`; do | ||
24 | pcname=`basename $pc` | ||
25 | install -d ${PKG_CONFIG_PATH} | ||
26 | cat $pc | sed ${@get_pkgconfig_mangle(d)} > ${PKG_CONFIG_PATH}/$pcname | ||
27 | done | ||
28 | } | ||
diff --git a/meta/classes/poky.bbclass b/meta/classes/poky.bbclass new file mode 100644 index 0000000000..885fb77441 --- /dev/null +++ b/meta/classes/poky.bbclass | |||
@@ -0,0 +1,4 @@ | |||
1 | MIRRORS_append () { | ||
2 | ftp://.*/.*/ http://www.o-hand.com/~richard/poky/sources/ | ||
3 | http://.*/.*/ http://www.o-hand.com/~richard/poky/sources/ | ||
4 | } | ||
diff --git a/meta/classes/qmake-base.bbclass b/meta/classes/qmake-base.bbclass new file mode 100644 index 0000000000..36ecfb622f --- /dev/null +++ b/meta/classes/qmake-base.bbclass | |||
@@ -0,0 +1,44 @@ | |||
1 | DEPENDS_prepend = "qmake-native " | ||
2 | |||
3 | OE_QMAKE_PLATFORM = "${TARGET_OS}-oe-g++" | ||
4 | QMAKESPEC := "${QMAKE_MKSPEC_PATH}/${OE_QMAKE_PLATFORM}" | ||
5 | |||
6 | # We override this completely to eliminate the -e normally passed in | ||
7 | EXTRA_OEMAKE = ' MAKEFLAGS= ' | ||
8 | |||
9 | export OE_QMAKE_CC="${CC}" | ||
10 | export OE_QMAKE_CFLAGS="${CFLAGS}" | ||
11 | export OE_QMAKE_CXX="${CXX}" | ||
12 | export OE_QMAKE_CXXFLAGS="-fno-exceptions -fno-rtti ${CXXFLAGS}" | ||
13 | export OE_QMAKE_LDFLAGS="${LDFLAGS}" | ||
14 | export OE_QMAKE_LINK="${CCLD}" | ||
15 | export OE_QMAKE_AR="${AR}" | ||
16 | export OE_QMAKE_STRIP="${STRIP}" | ||
17 | export OE_QMAKE_UIC="${STAGING_BINDIR}/uic" | ||
18 | export OE_QMAKE_MOC="${STAGING_BINDIR}/moc" | ||
19 | export OE_QMAKE_RCC="non-existant" | ||
20 | export OE_QMAKE_QMAKE="${STAGING_BINDIR}/qmake" | ||
21 | export OE_QMAKE_RPATH="-Wl,-rpath-link," | ||
22 | |||
23 | # default to qte2 via bb.conf, inherit qt3x11 to configure for qt3x11 | ||
24 | export OE_QMAKE_INCDIR_QT="${QTDIR}/include" | ||
25 | export OE_QMAKE_LIBDIR_QT="${QTDIR}/lib" | ||
26 | export OE_QMAKE_LIBS_QT="qte" | ||
27 | export OE_QMAKE_LIBS_X11="" | ||
28 | |||
29 | oe_qmake_mkspecs () { | ||
30 | mkdir -p mkspecs/${OE_QMAKE_PLATFORM} | ||
31 | for f in ${QMAKE_MKSPEC_PATH}/${OE_QMAKE_PLATFORM}/*; do | ||
32 | if [ -L $f ]; then | ||
33 | lnk=`readlink $f` | ||
34 | if [ -f mkspecs/${OE_QMAKE_PLATFORM}/$lnk ]; then | ||
35 | ln -s $lnk mkspecs/${OE_QMAKE_PLATFORM}/`basename $f` | ||
36 | else | ||
37 | cp $f mkspecs/${OE_QMAKE_PLATFORM}/ | ||
38 | fi | ||
39 | else | ||
40 | cp $f mkspecs/${OE_QMAKE_PLATFORM}/ | ||
41 | fi | ||
42 | done | ||
43 | } | ||
44 | |||
diff --git a/meta/classes/qmake.bbclass b/meta/classes/qmake.bbclass new file mode 100644 index 0000000000..4f2fceff35 --- /dev/null +++ b/meta/classes/qmake.bbclass | |||
@@ -0,0 +1,57 @@ | |||
1 | inherit qmake-base | ||
2 | |||
3 | qmake_do_configure() { | ||
4 | case ${QMAKESPEC} in | ||
5 | *linux-oe-g++|*linux-uclibc-oe-g++|*linux-gnueabi-oe-g++) | ||
6 | ;; | ||
7 | *-oe-g++) | ||
8 | die Unsupported target ${TARGET_OS} for oe-g++ qmake spec | ||
9 | ;; | ||
10 | *) | ||
11 | oenote Searching for qmake spec file | ||
12 | paths="${QMAKE_MKSPEC_PATH}/qws/${TARGET_OS}-${TARGET_ARCH}-g++" | ||
13 | paths="${QMAKE_MKSPEC_PATH}/${TARGET_OS}-g++ $paths" | ||
14 | |||
15 | if (echo "${TARGET_ARCH}"|grep -q 'i.86'); then | ||
16 | paths="${QMAKE_MKSPEC_PATH}/qws/${TARGET_OS}-x86-g++ $paths" | ||
17 | fi | ||
18 | for i in $paths; do | ||
19 | if test -e $i; then | ||
20 | export QMAKESPEC=$i | ||
21 | break | ||
22 | fi | ||
23 | done | ||
24 | ;; | ||
25 | esac | ||
26 | |||
27 | oenote "using qmake spec in ${QMAKESPEC}, using profiles '${QMAKE_PROFILES}'" | ||
28 | |||
29 | if [ -z "${QMAKE_PROFILES}" ]; then | ||
30 | PROFILES="`ls *.pro`" | ||
31 | else | ||
32 | PROFILES="${QMAKE_PROFILES}" | ||
33 | fi | ||
34 | |||
35 | if [ -z "$PROFILES" ]; then | ||
36 | die "QMAKE_PROFILES not set and no profiles found in $PWD" | ||
37 | fi | ||
38 | |||
39 | if [ ! -z "${EXTRA_QMAKEVARS_POST}" ]; then | ||
40 | AFTER="-after" | ||
41 | QMAKE_VARSUBST_POST="${EXTRA_QMAKEVARS_POST}" | ||
42 | oenote "qmake postvar substitution: ${EXTRA_QMAKEVARS_POST}" | ||
43 | fi | ||
44 | |||
45 | if [ ! -z "${EXTRA_QMAKEVARS_PRE}" ]; then | ||
46 | QMAKE_VARSUBST_PRE="${EXTRA_QMAKEVARS_PRE}" | ||
47 | oenote "qmake prevar substitution: ${EXTRA_QMAKEVARS_PRE}" | ||
48 | fi | ||
49 | |||
50 | #oenote "Calling '${OE_QMAKE_QMAKE} -makefile -spec ${QMAKESPEC} -o Makefile $QMAKE_VARSUBST_PRE $AFTER $PROFILES $QMAKE_VARSUBST_POST'" | ||
51 | unset QMAKESPEC || true | ||
52 | ${OE_QMAKE_QMAKE} -makefile -spec ${QMAKESPEC} -o Makefile $QMAKE_VARSUBST_PRE $AFTER $PROFILES $QMAKE_VARSUBST_POST || die "Error calling ${OE_QMAKE_QMAKE} on $PROFILES" | ||
53 | } | ||
54 | |||
55 | EXPORT_FUNCTIONS do_configure | ||
56 | |||
57 | addtask configure after do_unpack do_patch before do_compile | ||
diff --git a/meta/classes/qpf.bbclass b/meta/classes/qpf.bbclass new file mode 100644 index 0000000000..d6e58871d5 --- /dev/null +++ b/meta/classes/qpf.bbclass | |||
@@ -0,0 +1,36 @@ | |||
1 | PACKAGE_ARCH = "all" | ||
2 | |||
3 | do_configure() { | ||
4 | : | ||
5 | } | ||
6 | |||
7 | do_compile() { | ||
8 | : | ||
9 | } | ||
10 | |||
11 | pkg_postinst_fonts() { | ||
12 | #!/bin/sh | ||
13 | set -e | ||
14 | . /etc/profile | ||
15 | ${sbindir}/update-qtfontdir | ||
16 | } | ||
17 | |||
18 | pkg_postrm_fonts() { | ||
19 | #!/bin/sh | ||
20 | set -e | ||
21 | . /etc/profile | ||
22 | ${sbindir}/update-qtfontdir -f | ||
23 | } | ||
24 | |||
25 | python populate_packages_prepend() { | ||
26 | postinst = bb.data.getVar('pkg_postinst_fonts', d, 1) | ||
27 | postrm = bb.data.getVar('pkg_postrm_fonts', d, 1) | ||
28 | fontdir = bb.data.getVar('palmtopdir', d, 1) + '/lib/fonts' | ||
29 | pkgregex = "^([a-z-]*_[0-9]*).*.qpf$" | ||
30 | pkgpattern = bb.data.getVar('QPF_PKGPATTERN', d, 1) or 'qpf-%s' | ||
31 | pkgdescription = bb.data.getVar('QPF_DESCRIPTION', d, 1) or 'QPF font %s' | ||
32 | |||
33 | do_split_packages(d, root=fontdir, file_regex=pkgregex, output_pattern=pkgpattern, | ||
34 | description=pkgdescription, postinst=postinst, postrm=postrm, recursive=True, hook=None, | ||
35 | extra_depends='qpf-font-common') | ||
36 | } | ||
diff --git a/meta/classes/qt3e.bbclass b/meta/classes/qt3e.bbclass new file mode 100644 index 0000000000..c34d7c04f5 --- /dev/null +++ b/meta/classes/qt3e.bbclass | |||
@@ -0,0 +1,11 @@ | |||
1 | # | ||
2 | # override variables set by qmake-base to compile Qt/X11 apps | ||
3 | # | ||
4 | export QTDIR="${STAGING_DIR}/${HOST_SYS}/qte3" | ||
5 | export QTEDIR="${STAGING_DIR}/${HOST_SYS}/qte3" | ||
6 | export OE_QMAKE_UIC="${STAGING_BINDIR}/uic3" | ||
7 | export OE_QMAKE_MOC="${STAGING_BINDIR}/moc3" | ||
8 | export OE_QMAKE_CXXFLAGS="${CXXFLAGS} " | ||
9 | export OE_QMAKE_INCDIR_QT="${QTEDIR}/include" | ||
10 | export OE_QMAKE_LIBDIR_QT="${QTEDIR}/lib" | ||
11 | export OE_QMAKE_LIBS_QT="qte" | ||
diff --git a/meta/classes/qt3x11.bbclass b/meta/classes/qt3x11.bbclass new file mode 100644 index 0000000000..6e3d5f8ba2 --- /dev/null +++ b/meta/classes/qt3x11.bbclass | |||
@@ -0,0 +1,15 @@ | |||
1 | DEPENDS_prepend = "${@["qt3x11 ", ""][(bb.data.getVar('PN', d, 1) == 'qt-x11-free')]}" | ||
2 | EXTRA_QMAKEVARS_POST += "CONFIG+=thread" | ||
3 | # | ||
4 | # override variables set by qmake-base to compile Qt/X11 apps | ||
5 | # | ||
6 | export QTDIR = "${STAGING_DIR}/${HOST_SYS}/qt3" | ||
7 | export OE_QMAKE_UIC = "${STAGING_BINDIR}/uic3" | ||
8 | export OE_QMAKE_MOC = "${STAGING_BINDIR}/moc3" | ||
9 | export OE_QMAKE_CXXFLAGS = "${CXXFLAGS} -DQT_NO_XIM" | ||
10 | export OE_QMAKE_INCDIR_QT = "${QTDIR}/include" | ||
11 | export OE_QMAKE_LIBDIR_QT = "${QTDIR}/lib" | ||
12 | export OE_QMAKE_LIBS_QT = "qt" | ||
13 | export OE_QMAKE_LIBS_X11 = "-lXext -lX11 -lm" | ||
14 | |||
15 | |||
diff --git a/meta/classes/qt4x11.bbclass b/meta/classes/qt4x11.bbclass new file mode 100644 index 0000000000..635fc67694 --- /dev/null +++ b/meta/classes/qt4x11.bbclass | |||
@@ -0,0 +1,17 @@ | |||
1 | DEPENDS_prepend = "qmake2-native " | ||
2 | DEPENDS_prepend = "${@["qt4x11 ", ""][(bb.data.getVar('PN', d, 1) == 'qt4-x11-free')]}" | ||
3 | # | ||
4 | # override variables set by qmake-base to compile Qt4/X11 apps | ||
5 | # | ||
6 | export QTDIR = "${STAGING_DIR}/${HOST_SYS}/qt4" | ||
7 | export QMAKESPEC = "${QTDIR}/mkspecs/${TARGET_OS}-oe-g++" | ||
8 | export OE_QMAKE_UIC = "${STAGING_BINDIR}/uic4" | ||
9 | export OE_QMAKE_MOC = "${STAGING_BINDIR}/moc4" | ||
10 | export OE_QMAKE_RCC = "${STAGING_BINDIR}/rcc4" | ||
11 | export OE_QMAKE_QMAKE = "${STAGING_BINDIR}/qmake2" | ||
12 | export OE_QMAKE_LINK = "${CXX}" | ||
13 | export OE_QMAKE_CXXFLAGS = "${CXXFLAGS}" | ||
14 | export OE_QMAKE_INCDIR_QT = "${QTDIR}/include" | ||
15 | export OE_QMAKE_LIBDIR_QT = "${QTDIR}/lib" | ||
16 | export OE_QMAKE_LIBS_QT = "qt" | ||
17 | export OE_QMAKE_LIBS_X11 = "-lXext -lX11 -lm" | ||
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass new file mode 100644 index 0000000000..340446917e --- /dev/null +++ b/meta/classes/rm_work.bbclass | |||
@@ -0,0 +1,22 @@ | |||
1 | # | ||
2 | # Removes source after build | ||
3 | # | ||
4 | # To use it add that line to conf/local.conf: | ||
5 | # | ||
6 | # INHERIT += "rm_work" | ||
7 | # | ||
8 | |||
9 | do_rm_work () { | ||
10 | cd ${WORKDIR} | ||
11 | for dir in * | ||
12 | do | ||
13 | if [ `basename ${S}` == $dir ]; then | ||
14 | rm -rf $dir/* | ||
15 | elif [ $dir != 'temp' ]; then | ||
16 | rm -rf $dir | ||
17 | fi | ||
18 | done | ||
19 | } | ||
20 | |||
21 | addtask rm_work before do_build | ||
22 | addtask rm_work after do_package | ||
diff --git a/meta/classes/rootfs_ipk.bbclass b/meta/classes/rootfs_ipk.bbclass new file mode 100644 index 0000000000..2729503507 --- /dev/null +++ b/meta/classes/rootfs_ipk.bbclass | |||
@@ -0,0 +1,145 @@ | |||
1 | # | ||
2 | # Creates a root filesystem out of IPKs | ||
3 | # | ||
4 | # This rootfs can be mounted via root-nfs or it can be put into an cramfs/jffs etc. | ||
5 | # See image_ipk.oeclass for a usage of this. | ||
6 | # | ||
7 | |||
8 | DEPENDS_prepend="ipkg-native ipkg-utils-native fakeroot-native " | ||
9 | DEPENDS_append=" ${EXTRA_IMAGEDEPENDS}" | ||
10 | |||
11 | PACKAGES = "" | ||
12 | |||
13 | do_rootfs[nostamp] = 1 | ||
14 | do_rootfs[dirs] = ${TOPDIR} | ||
15 | do_build[nostamp] = 1 | ||
16 | |||
17 | IPKG_ARGS = "-f ${T}/ipkg.conf -o ${IMAGE_ROOTFS}" | ||
18 | |||
19 | ROOTFS_POSTPROCESS_COMMAND ?= "" | ||
20 | |||
21 | PID = "${@os.getpid()}" | ||
22 | |||
23 | # some default locales | ||
24 | IMAGE_LINGUAS ?= "de-de fr-fr en-gb" | ||
25 | |||
26 | LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, bb.data.getVar('IMAGE_LINGUAS', d, 1).split()))}" | ||
27 | |||
28 | real_do_rootfs () { | ||
29 | set -x | ||
30 | |||
31 | mkdir -p ${IMAGE_ROOTFS}/dev | ||
32 | |||
33 | if [ -z "${DEPLOY_KEEP_PACKAGES}" ]; then | ||
34 | rm -f ${DEPLOY_DIR_IPK}/Packages | ||
35 | touch ${DEPLOY_DIR_IPK}/Packages | ||
36 | ipkg-make-index -r ${DEPLOY_DIR_IPK}/Packages -p ${DEPLOY_DIR_IPK}/Packages -l ${DEPLOY_DIR_IPK}/Packages.filelist -m ${DEPLOY_DIR_IPK} | ||
37 | fi | ||
38 | mkdir -p ${T} | ||
39 | echo "src oe file:${DEPLOY_DIR_IPK}" > ${T}/ipkg.conf | ||
40 | ipkgarchs="${IPKG_ARCHS}" | ||
41 | priority=1 | ||
42 | for arch in $ipkgarchs; do | ||
43 | echo "arch $arch $priority" >> ${T}/ipkg.conf | ||
44 | priority=$(expr $priority + 5) | ||
45 | done | ||
46 | ipkg-cl ${IPKG_ARGS} update | ||
47 | if [ ! -z "${LINGUAS_INSTALL}" ]; then | ||
48 | ipkg-cl ${IPKG_ARGS} install glibc-localedata-i18n | ||
49 | for i in ${LINGUAS_INSTALL}; do | ||
50 | ipkg-cl ${IPKG_ARGS} install $i | ||
51 | done | ||
52 | fi | ||
53 | if [ ! -z "${IPKG_INSTALL}" ]; then | ||
54 | ipkg-cl ${IPKG_ARGS} install ${IPKG_INSTALL} | ||
55 | fi | ||
56 | |||
57 | export D=${IMAGE_ROOTFS} | ||
58 | export IPKG_OFFLINE_ROOT=${IMAGE_ROOTFS} | ||
59 | mkdir -p ${IMAGE_ROOTFS}/etc/ipkg/ | ||
60 | grep "^arch" ${T}/ipkg.conf >${IMAGE_ROOTFS}/etc/ipkg/arch.conf | ||
61 | |||
62 | for i in ${IMAGE_ROOTFS}${libdir}/ipkg/info/*.preinst; do | ||
63 | if [ -f $i ] && ! sh $i; then | ||
64 | ipkg-cl ${IPKG_ARGS} flag unpacked `basename $i .preinst` | ||
65 | fi | ||
66 | done | ||
67 | for i in ${IMAGE_ROOTFS}${libdir}/ipkg/info/*.postinst; do | ||
68 | if [ -f $i ] && ! sh $i configure; then | ||
69 | ipkg-cl ${IPKG_ARGS} flag unpacked `basename $i .postinst` | ||
70 | fi | ||
71 | done | ||
72 | |||
73 | install -d ${IMAGE_ROOTFS}/${sysconfdir} | ||
74 | echo ${BUILDNAME} > ${IMAGE_ROOTFS}/${sysconfdir}/version | ||
75 | |||
76 | ${ROOTFS_POSTPROCESS_COMMAND} | ||
77 | |||
78 | log_check rootfs | ||
79 | } | ||
80 | |||
81 | log_check() { | ||
82 | set +x | ||
83 | for target in $* | ||
84 | do | ||
85 | lf_path="${WORKDIR}/temp/log.do_$target.${PID}" | ||
86 | |||
87 | echo "log_check: Using $lf_path as logfile" | ||
88 | |||
89 | if test -e "$lf_path" | ||
90 | then | ||
91 | lf_txt="`cat $lf_path`" | ||
92 | |||
93 | for keyword_die in "Cannot find package" "exit 1" ERR Fail | ||
94 | do | ||
95 | |||
96 | if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") &>/dev/null | ||
97 | then | ||
98 | echo "log_check: There were error messages in the logfile" | ||
99 | echo -e "log_check: Matched keyword: [$keyword_die]\n" | ||
100 | echo "$lf_txt" | grep -v log_check | grep -i "$keyword_die" | ||
101 | echo "" | ||
102 | do_exit=1 | ||
103 | fi | ||
104 | done | ||
105 | test "$do_exit" = 1 && exit 1 | ||
106 | else | ||
107 | echo "Cannot find logfile [$lf_path]" | ||
108 | fi | ||
109 | echo "Logfile is clean" | ||
110 | done | ||
111 | |||
112 | set -x | ||
113 | |||
114 | } | ||
115 | |||
116 | fakeroot do_rootfs () { | ||
117 | rm -rf ${IMAGE_ROOTFS} | ||
118 | real_do_rootfs | ||
119 | } | ||
120 | |||
121 | # set '*' as the rootpassword so the images | ||
122 | # can decide if they want it or not | ||
123 | |||
124 | zap_root_password () { | ||
125 | sed 's%^root:[^:]*:%root:*:%' < ${IMAGE_ROOTFS}/etc/passwd >${IMAGE_ROOTFS}/etc/passwd.new | ||
126 | mv ${IMAGE_ROOTFS}/etc/passwd.new ${IMAGE_ROOTFS}/etc/passwd | ||
127 | } | ||
128 | |||
129 | create_etc_timestamp() { | ||
130 | date +%2m%2d%2H%2M%Y >${IMAGE_ROOTFS}/etc/timestamp | ||
131 | } | ||
132 | |||
133 | # Turn any symbolic /sbin/init link into a file | ||
134 | remove_init_link () { | ||
135 | if [ -h ${IMAGE_ROOTFS}/sbin/init ]; then | ||
136 | LINKFILE=${IMAGE_ROOTFS}`readlink ${IMAGE_ROOTFS}/sbin/init` | ||
137 | rm ${IMAGE_ROOTFS}/sbin/init | ||
138 | cp $LINKFILE ${IMAGE_ROOTFS}/sbin/init | ||
139 | fi | ||
140 | } | ||
141 | |||
142 | # export the zap_root_password, create_etc_timestamp and remote_init_link | ||
143 | EXPORT_FUNCTIONS zap_root_password create_etc_timestamp remove_init_link | ||
144 | |||
145 | addtask rootfs before do_build after do_install | ||
diff --git a/meta/classes/rpm_core.bbclass b/meta/classes/rpm_core.bbclass new file mode 100644 index 0000000000..f28abbb1c3 --- /dev/null +++ b/meta/classes/rpm_core.bbclass | |||
@@ -0,0 +1,16 @@ | |||
1 | RPMBUILDPATH="${WORKDIR}/rpm" | ||
2 | |||
3 | RPMOPTS="--rcfile=${WORKDIR}/rpmrc" | ||
4 | RPMOPTS="--rcfile=${WORKDIR}/rpmrc --target ${TARGET_SYS}" | ||
5 | RPM="rpm ${RPMOPTS}" | ||
6 | RPMBUILD="rpmbuild --buildroot ${D} --short-circuit ${RPMOPTS}" | ||
7 | |||
8 | rpm_core_do_preprpm() { | ||
9 | mkdir -p ${RPMBUILDPATH}/{SPECS,RPMS/{i386,i586,i686,noarch,ppc,mips,mipsel,arm},SRPMS,SOURCES,BUILD} | ||
10 | echo 'macrofiles:/usr/lib/rpm/macros:${WORKDIR}/macros' > ${WORKDIR}/rpmrc | ||
11 | echo '%_topdir ${RPMBUILDPATH}' > ${WORKDIR}/macros | ||
12 | echo '%_repackage_dir ${WORKDIR}' >> ${WORKDIR}/macros | ||
13 | } | ||
14 | |||
15 | EXPORT_FUNCTIONS do_preprpm | ||
16 | addtask preprpm before do_fetch | ||
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass new file mode 100644 index 0000000000..a626162ffb --- /dev/null +++ b/meta/classes/sanity.bbclass | |||
@@ -0,0 +1,112 @@ | |||
1 | # | ||
2 | # Sanity check the users setup for common misconfigurations | ||
3 | # | ||
4 | |||
5 | def raise_sanity_error(msg): | ||
6 | import bb | ||
7 | bb.fatal(""" Openembedded's config sanity checker detected a potential misconfiguration. | ||
8 | Either fix the cause of this error or at your own risk disable the checker (see sanity.conf). | ||
9 | Following is the list of potential problems / advisories: | ||
10 | |||
11 | %s""" % msg) | ||
12 | |||
13 | def check_conf_exists(fn, data): | ||
14 | import bb, os | ||
15 | |||
16 | bbpath = [] | ||
17 | fn = bb.data.expand(fn, data) | ||
18 | vbbpath = bb.data.getVar("BBPATH", data) | ||
19 | if vbbpath: | ||
20 | bbpath += vbbpath.split(":") | ||
21 | for p in bbpath: | ||
22 | currname = os.path.join(bb.data.expand(p, data), fn) | ||
23 | if os.access(currname, os.R_OK): | ||
24 | return True | ||
25 | return False | ||
26 | |||
27 | def check_app_exists(app, d): | ||
28 | from bb import which, data | ||
29 | |||
30 | app = data.expand(app, d) | ||
31 | path = data.getVar('PATH', d) | ||
32 | return len(which(path, app)) != 0 | ||
33 | |||
34 | |||
35 | def check_sanity(e): | ||
36 | from bb import note, error, data, __version__ | ||
37 | from bb.event import Handled, NotHandled, getName | ||
38 | try: | ||
39 | from distutils.version import LooseVersion | ||
40 | except ImportError: | ||
41 | def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1 | ||
42 | import os | ||
43 | |||
44 | # Check the bitbake version meets minimum requirements | ||
45 | minversion = data.getVar('BB_MIN_VERSION', e.data , True) | ||
46 | if not minversion: | ||
47 | # Hack: BB_MIN_VERSION hasn't been parsed yet so return | ||
48 | # and wait for the next call | ||
49 | print "Foo %s" % minversion | ||
50 | return | ||
51 | |||
52 | if (LooseVersion(__version__) < LooseVersion(minversion)): | ||
53 | raise_sanity_error('Bitbake version %s is required and version %s was found' % (minversion, __version__)) | ||
54 | |||
55 | # Check TARGET_ARCH is set | ||
56 | if data.getVar('TARGET_ARCH', e.data, True) == 'INVALID': | ||
57 | raise_sanity_error('Please set TARGET_ARCH directly, or choose a MACHINE or DISTRO that does so.') | ||
58 | |||
59 | # Check TARGET_OS is set | ||
60 | if data.getVar('TARGET_OS', e.data, True) == 'INVALID': | ||
61 | raise_sanity_error('Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.') | ||
62 | |||
63 | # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf | ||
64 | if "diffstat-native" not in data.getVar('ASSUME_PROVIDED', e.data, True).split(): | ||
65 | raise_sanity_error('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf') | ||
66 | |||
67 | # Check the MACHINE is valid | ||
68 | if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data): | ||
69 | raise_sanity_error('Please set a valid MACHINE in your local.conf') | ||
70 | |||
71 | # Check the distro is valid | ||
72 | if not check_conf_exists("conf/distro/${DISTRO}.conf", e.data): | ||
73 | raise_sanity_error('Please set a valid DISTRO in your local.conf') | ||
74 | |||
75 | if not check_app_exists("${MAKE}", e.data): | ||
76 | raise_sanity_error('GNU make missing. Please install GNU make') | ||
77 | |||
78 | if not check_app_exists('${BUILD_PREFIX}gcc', e.data): | ||
79 | raise_sanity_error('C Host-Compiler is missing, please install one' ) | ||
80 | |||
81 | if not check_app_exists('${BUILD_PREFIX}g++', e.data): | ||
82 | raise_sanity_error('C++ Host-Compiler is missing, please install one' ) | ||
83 | |||
84 | if not check_app_exists('patch', e.data): | ||
85 | raise_sanity_error('Please install the patch utility, preferable GNU patch.') | ||
86 | |||
87 | if not check_app_exists('diffstat', e.data): | ||
88 | raise_sanity_error('Please install the diffstat utility') | ||
89 | |||
90 | if not check_app_exists('texi2html', e.data): | ||
91 | raise_sanity_error('Please install the texi2html binary') | ||
92 | |||
93 | if not check_app_exists('cvs', e.data): | ||
94 | raise_sanity_error('Please install the cvs utility') | ||
95 | |||
96 | if not check_app_exists('svn', e.data): | ||
97 | raise_sanity_error('Please install the svn utility') | ||
98 | |||
99 | oes_bb_conf = data.getVar( 'OES_BITBAKE_CONF', e.data, True ) | ||
100 | if not oes_bb_conf: | ||
101 | raise_sanity_error('You do not include OpenEmbeddeds version of conf/bitbake.conf') | ||
102 | |||
103 | addhandler check_sanity_eventhandler | ||
104 | python check_sanity_eventhandler() { | ||
105 | from bb import note, error, data, __version__ | ||
106 | from bb.event import getName | ||
107 | |||
108 | if getName(e) == "BuildStarted": | ||
109 | check_sanity(e) | ||
110 | |||
111 | return NotHandled | ||
112 | } | ||
diff --git a/meta/classes/scons.bbclass b/meta/classes/scons.bbclass new file mode 100644 index 0000000000..3160eca69a --- /dev/null +++ b/meta/classes/scons.bbclass | |||
@@ -0,0 +1,13 @@ | |||
1 | DEPENDS += "python-scons-native" | ||
2 | |||
3 | scons_do_compile() { | ||
4 | ${STAGING_BINDIR}/scons || \ | ||
5 | oefatal "scons build execution failed." | ||
6 | } | ||
7 | |||
8 | scons_do_install() { | ||
9 | ${STAGING_BINDIR}/scons install || \ | ||
10 | oefatal "scons install execution failed." | ||
11 | } | ||
12 | |||
13 | EXPORT_FUNCTIONS do_compile do_install | ||
diff --git a/meta/classes/sdk.bbclass b/meta/classes/sdk.bbclass new file mode 100644 index 0000000000..bcabbc79bd --- /dev/null +++ b/meta/classes/sdk.bbclass | |||
@@ -0,0 +1,26 @@ | |||
1 | # SDK packages are built either explicitly by the user, | ||
2 | # or indirectly via dependency. No need to be in 'world'. | ||
3 | EXCLUDE_FROM_WORLD = "1" | ||
4 | |||
5 | SDK_NAME = "${TARGET_ARCH}/oe" | ||
6 | PACKAGE_ARCH = "${BUILD_ARCH}" | ||
7 | |||
8 | HOST_ARCH = "${BUILD_ARCH}" | ||
9 | HOST_VENDOR = "${BUILD_VENDOR}" | ||
10 | HOST_OS = "${BUILD_OS}" | ||
11 | HOST_PREFIX = "${BUILD_PREFIX}" | ||
12 | HOST_CC_ARCH = "${BUILD_CC_ARCH}" | ||
13 | |||
14 | CPPFLAGS = "${BUILD_CPPFLAGS}" | ||
15 | CFLAGS = "${BUILD_CFLAGS}" | ||
16 | CXXFLAGS = "${BUILD_CFLAGS}" | ||
17 | LDFLAGS = "${BUILD_LDFLAGS}" | ||
18 | |||
19 | prefix = "/usr/local/${SDK_NAME}" | ||
20 | exec_prefix = "${prefix}" | ||
21 | base_prefix = "${exec_prefix}" | ||
22 | |||
23 | FILES_${PN} = "${prefix}" | ||
24 | |||
25 | |||
26 | |||
diff --git a/meta/classes/sdl.bbclass b/meta/classes/sdl.bbclass new file mode 100644 index 0000000000..c0b21427a4 --- /dev/null +++ b/meta/classes/sdl.bbclass | |||
@@ -0,0 +1,44 @@ | |||
1 | # | ||
2 | # (C) Michael 'Mickey' Lauer <mickey@Vanille.de> | ||
3 | # | ||
4 | |||
5 | DEPENDS += "virtual/libsdl libsdl-mixer libsdl-image" | ||
6 | |||
7 | APPDESKTOP ?= "${PN}.desktop" | ||
8 | APPNAME ?= "${PN}" | ||
9 | APPIMAGE ?= "${PN}.png" | ||
10 | |||
11 | sdl_do_sdl_install() { | ||
12 | install -d ${D}${palmtopdir}/bin | ||
13 | install -d ${D}${palmtopdir}/pics | ||
14 | install -d ${D}${palmtopdir}/apps/Games | ||
15 | ln -sf ${bindir}/${APPNAME} ${D}${palmtopdir}/bin/${APPNAME} | ||
16 | install -m 0644 ${APPIMAGE} ${D}${palmtopdir}/pics/${PN}.png | ||
17 | |||
18 | if [ -e "${APPDESKTOP}" ] | ||
19 | then | ||
20 | echo ${APPDESKTOP} present, installing to palmtopdir... | ||
21 | install -m 0644 ${APPDESKTOP} ${D}${palmtopdir}/apps/Games/${PN}.desktop | ||
22 | else | ||
23 | echo ${APPDESKTOP} not present, creating one on-the-fly... | ||
24 | cat >${D}${palmtopdir}/apps/Games/${PN}.desktop <<EOF | ||
25 | [Desktop Entry] | ||
26 | Note=Auto Generated... this may be not what you want | ||
27 | Comment=${DESCRIPTION} | ||
28 | Exec=${APPNAME} | ||
29 | Icon=${APPIMAGE} | ||
30 | Type=Application | ||
31 | Name=${PN} | ||
32 | EOF | ||
33 | fi | ||
34 | } | ||
35 | |||
36 | EXPORT_FUNCTIONS do_sdl_install | ||
37 | addtask sdl_install after do_compile before do_populate_staging | ||
38 | |||
39 | SECTION = "x11/games" | ||
40 | SECTION_${PN}-opie = "opie/games" | ||
41 | |||
42 | PACKAGES += "${PN}-opie" | ||
43 | RDEPENDS_${PN}-opie += "${PN}" | ||
44 | FILES_${PN}-opie = "${palmtopdir}" | ||
diff --git a/meta/classes/sip.bbclass b/meta/classes/sip.bbclass new file mode 100644 index 0000000000..adf179b130 --- /dev/null +++ b/meta/classes/sip.bbclass | |||
@@ -0,0 +1,58 @@ | |||
1 | # Build Class for Sip based Python Bindings | ||
2 | # (C) Michael 'Mickey' Lauer <mickey@Vanille.de> | ||
3 | # | ||
4 | |||
5 | DEPENDS =+ "sip-native python-sip" | ||
6 | |||
7 | # default stuff, do not uncomment | ||
8 | # EXTRA_SIPTAGS = "-tWS_QWS -tQtPE_1_6_0 -tQt_2_3_1" | ||
9 | |||
10 | sip_do_generate() { | ||
11 | if [ -z "${SIP_MODULES}" ]; then | ||
12 | MODULES="`ls sip/*mod.sip`" | ||
13 | else | ||
14 | MODULES="${SIP_MODULES}" | ||
15 | fi | ||
16 | |||
17 | if [ -z "$MODULES" ]; then | ||
18 | die "SIP_MODULES not set and no modules found in $PWD" | ||
19 | else | ||
20 | oenote "using modules '${SIP_MODULES}' and tags '${EXTRA_SIPTAGS}'" | ||
21 | fi | ||
22 | |||
23 | if [ -z "${EXTRA_SIPTAGS}" ]; then | ||
24 | die "EXTRA_SIPTAGS needs to be set!" | ||
25 | else | ||
26 | SIPTAGS="${EXTRA_SIPTAGS}" | ||
27 | fi | ||
28 | |||
29 | if [ ! -z "${SIP_FEATURES}" ]; then | ||
30 | FEATURES="-z ${SIP_FEATURES}" | ||
31 | oenote "sip feature file: ${SIP_FEATURES}" | ||
32 | fi | ||
33 | |||
34 | for module in $MODULES | ||
35 | do | ||
36 | install -d ${module}/ | ||
37 | oenote "calling 'sip -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'" | ||
38 | sip -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf sip/${module}/${module}mod.sip \ | ||
39 | || die "Error calling sip on ${module}" | ||
40 | cat ${module}/${module}.sbf | sed s,target,TARGET, \ | ||
41 | | sed s,sources,SOURCES, \ | ||
42 | | sed s,headers,HEADERS, \ | ||
43 | | sed s,"moc_HEADERS =","HEADERS +=", \ | ||
44 | >${module}/${module}.pro | ||
45 | echo "TEMPLATE=lib" >>${module}/${module}.pro | ||
46 | [ "${module}" = "qt" ] && echo "" >>${module}/${module}.pro | ||
47 | [ "${module}" = "qtcanvas" ] && echo "" >>${module}/${module}.pro | ||
48 | [ "${module}" = "qttable" ] && echo "" >>${module}/${module}.pro | ||
49 | [ "${module}" = "qwt" ] && echo "" >>${module}/${module}.pro | ||
50 | [ "${module}" = "qtpe" ] && echo "" >>${module}/${module}.pro | ||
51 | [ "${module}" = "qtpe" ] && echo "LIBS+=-lqpe" >>${module}/${module}.pro | ||
52 | true | ||
53 | done | ||
54 | } | ||
55 | |||
56 | EXPORT_FUNCTIONS do_generate | ||
57 | |||
58 | addtask generate after do_unpack do_patch before do_configure | ||
diff --git a/meta/classes/sourcepkg.bbclass b/meta/classes/sourcepkg.bbclass new file mode 100644 index 0000000000..390d3684d4 --- /dev/null +++ b/meta/classes/sourcepkg.bbclass | |||
@@ -0,0 +1,111 @@ | |||
1 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/source" | ||
2 | EXCLUDE_FROM ?= ".pc autom4te.cache" | ||
3 | |||
4 | # used as part of a path. make sure it's set | ||
5 | DISTRO ?= "openembedded" | ||
6 | |||
7 | def get_src_tree(d): | ||
8 | import bb | ||
9 | import os, os.path | ||
10 | |||
11 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
12 | if not workdir: | ||
13 | bb.error("WORKDIR not defined, unable to find source tree.") | ||
14 | return | ||
15 | |||
16 | s = bb.data.getVar('S', d, 0) | ||
17 | if not s: | ||
18 | bb.error("S not defined, unable to find source tree.") | ||
19 | return | ||
20 | |||
21 | s_tree_raw = s.split('/')[1] | ||
22 | s_tree = bb.data.expand(s_tree_raw, d) | ||
23 | |||
24 | src_tree_path = os.path.join(workdir, s_tree) | ||
25 | try: | ||
26 | os.listdir(src_tree_path) | ||
27 | except OSError: | ||
28 | bb.fatal("Expected to find source tree in '%s' which doesn't exist." % src_tree_path) | ||
29 | bb.debug("Assuming source tree is '%s'" % src_tree_path) | ||
30 | |||
31 | return s_tree | ||
32 | |||
33 | sourcepkg_do_create_orig_tgz(){ | ||
34 | |||
35 | mkdir -p ${DEPLOY_DIR_SRC} | ||
36 | cd ${WORKDIR} | ||
37 | for i in ${EXCLUDE_FROM}; do | ||
38 | echo $i >> temp/exclude-from-file | ||
39 | done | ||
40 | |||
41 | src_tree=${@get_src_tree(d)} | ||
42 | |||
43 | echo $src_tree | ||
44 | oenote "Creating .orig.tar.gz in ${DEPLOY_DIR_SRC}/${P}.orig.tar.gz" | ||
45 | tar cvzf ${DEPLOY_DIR_SRC}/${P}.orig.tar.gz --exclude-from temp/exclude-from-file $src_tree | ||
46 | cp -pPR $src_tree $src_tree.orig | ||
47 | } | ||
48 | |||
49 | sourcepkg_do_archive_bb() { | ||
50 | |||
51 | src_tree=${@get_src_tree(d)} | ||
52 | dest=${WORKDIR}/$src_tree/${DISTRO} | ||
53 | mkdir -p $dest | ||
54 | |||
55 | cp ${FILE} $dest | ||
56 | } | ||
57 | |||
58 | python sourcepkg_do_dumpdata() { | ||
59 | import os | ||
60 | import os.path | ||
61 | |||
62 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
63 | distro = bb.data.getVar('DISTRO', d, 1) | ||
64 | s_tree = get_src_tree(d) | ||
65 | openembeddeddir = os.path.join(workdir, s_tree, distro) | ||
66 | dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d)) | ||
67 | |||
68 | try: | ||
69 | os.mkdir(openembeddeddir) | ||
70 | except OSError: | ||
71 | # dir exists | ||
72 | pass | ||
73 | |||
74 | bb.note("Dumping metadata into '%s'" % dumpfile) | ||
75 | f = open(dumpfile, "w") | ||
76 | # emit variables and shell functions | ||
77 | bb.data.emit_env(f, d, True) | ||
78 | # emit the metadata which isnt valid shell | ||
79 | for e in d.keys(): | ||
80 | if bb.data.getVarFlag(e, 'python', d): | ||
81 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | ||
82 | f.close() | ||
83 | } | ||
84 | |||
85 | sourcepkg_do_create_diff_gz(){ | ||
86 | |||
87 | cd ${WORKDIR} | ||
88 | for i in ${EXCLUDE_FROM}; do | ||
89 | echo $i >> temp/exclude-from-file | ||
90 | done | ||
91 | |||
92 | |||
93 | src_tree=${@get_src_tree(d)} | ||
94 | |||
95 | for i in `find . -maxdepth 1 -type f`; do | ||
96 | mkdir -p $src_tree/${DISTRO}/files | ||
97 | cp $i $src_tree/${DISTRO}/files | ||
98 | done | ||
99 | |||
100 | oenote "Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz" | ||
101 | LC_ALL=C TZ=UTC0 diff --exclude-from=temp/exclude-from-file -Naur $src_tree.orig $src_tree | gzip -c > ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz | ||
102 | rm -rf $src_tree.orig | ||
103 | } | ||
104 | |||
105 | EXPORT_FUNCTIONS do_create_orig_tgz do_archive_bb do_dumpdata do_create_diff_gz | ||
106 | |||
107 | addtask create_orig_tgz after do_unpack before do_patch | ||
108 | addtask archive_bb after do_patch before do_dumpdata | ||
109 | addtask dumpdata after archive_bb before do_create_diff_gz | ||
110 | addtask create_diff_gz after do_dump_data before do_configure | ||
111 | |||
diff --git a/meta/classes/src_distribute.bbclass b/meta/classes/src_distribute.bbclass new file mode 100644 index 0000000000..5daf526018 --- /dev/null +++ b/meta/classes/src_distribute.bbclass | |||
@@ -0,0 +1,40 @@ | |||
1 | include conf/licenses.conf | ||
2 | |||
3 | SRC_DISTRIBUTECOMMAND[func] = "1" | ||
4 | python do_distribute_sources () { | ||
5 | l = bb.data.createCopy(d) | ||
6 | bb.data.update_data(l) | ||
7 | licenses = (bb.data.getVar('LICENSE', d, 1) or "").split() | ||
8 | if not licenses: | ||
9 | bb.note("LICENSE not defined") | ||
10 | src_distribute_licenses = (bb.data.getVar('SRC_DISTRIBUTE_LICENSES', d, 1) or "").split() | ||
11 | # Explanation: | ||
12 | # Space seperated items in LICENSE must *all* be distributable | ||
13 | # Each space seperated item may be used under any number of | seperated licenses. | ||
14 | # If any of those | seperated licenses are distributable, then that component is. | ||
15 | # i.e. LICENSE = "GPL LGPL" | ||
16 | # In this case, both components are distributable. | ||
17 | # LICENSE = "GPL|QPL|Proprietary" | ||
18 | # In this case, GPL is distributable, so the component is. | ||
19 | valid = 1 | ||
20 | for l in licenses: | ||
21 | lvalid = 0 | ||
22 | for i in l.split("|"): | ||
23 | if i in src_distribute_licenses: | ||
24 | lvalid = 1 | ||
25 | if lvalid != 1: | ||
26 | valid = 0 | ||
27 | if valid == 0: | ||
28 | bb.note("Licenses (%s) are not all listed in SRC_DISTRIBUTE_LICENSES, skipping source distribution" % licenses) | ||
29 | return | ||
30 | import re | ||
31 | for s in (bb.data.getVar('A', d, 1) or "").split(): | ||
32 | s = re.sub(';.*$', '', s) | ||
33 | cmd = bb.data.getVar('SRC_DISTRIBUTECOMMAND', d, 1) | ||
34 | if not cmd: | ||
35 | raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined") | ||
36 | bb.data.setVar('SRC', s, d) | ||
37 | bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d) | ||
38 | } | ||
39 | |||
40 | addtask distribute_sources before do_build after do_fetch | ||
diff --git a/meta/classes/src_distribute_local.bbclass b/meta/classes/src_distribute_local.bbclass new file mode 100644 index 0000000000..5f0cef5bec --- /dev/null +++ b/meta/classes/src_distribute_local.bbclass | |||
@@ -0,0 +1,31 @@ | |||
1 | inherit src_distribute | ||
2 | |||
3 | # SRC_DIST_LOCAL possible values: | ||
4 | # copy copies the files from ${A} to the distributedir | ||
5 | # symlink symlinks the files from ${A} to the distributedir | ||
6 | # move+symlink moves the files into distributedir, and symlinks them back | ||
7 | SRC_DIST_LOCAL ?= "move+symlink" | ||
8 | SRC_DISTRIBUTEDIR ?= "${DEPLOY_DIR}/sources" | ||
9 | SRC_DISTRIBUTECOMMAND () { | ||
10 | s="${SRC}" | ||
11 | if [ ! -L "$s" ] && (echo "$s"|grep "^${DL_DIR}"); then | ||
12 | : | ||
13 | else | ||
14 | exit 0; | ||
15 | fi | ||
16 | mkdir -p ${SRC_DISTRIBUTEDIR} | ||
17 | case "${SRC_DIST_LOCAL}" in | ||
18 | copy) | ||
19 | test -e $s.md5 && cp -f $s.md5 ${SRC_DISTRIBUTEDIR}/ | ||
20 | cp -f $s ${SRC_DISTRIBUTEDIR}/ | ||
21 | ;; | ||
22 | symlink) | ||
23 | test -e $s.md5 && ln -sf $s.md5 ${SRC_DISTRIBUTEDIR}/ | ||
24 | ln -sf $s ${SRC_DISTRIBUTEDIR}/ | ||
25 | ;; | ||
26 | move+symlink) | ||
27 | mv $s ${SRC_DISTRIBUTEDIR}/ | ||
28 | ln -sf ${SRC_DISTRIBUTEDIR}/`basename $s` $s | ||
29 | ;; | ||
30 | esac | ||
31 | } | ||
diff --git a/meta/classes/srec.bbclass b/meta/classes/srec.bbclass new file mode 100644 index 0000000000..e7bdc6c75d --- /dev/null +++ b/meta/classes/srec.bbclass | |||
@@ -0,0 +1,28 @@ | |||
1 | # | ||
2 | # Creates .srec files from images. | ||
3 | # | ||
4 | # Useful for loading with Yamon. | ||
5 | |||
6 | # Define SREC_VMAADDR in your machine.conf. | ||
7 | |||
8 | SREC_CMD = "${TARGET_PREFIX}objcopy -O srec -I binary --adjust-vma ${SREC_VMAADDR} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${type} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${type}.srec" | ||
9 | |||
10 | # Do not build srec files for these types of images: | ||
11 | SREC_SKIP = "tar" | ||
12 | |||
13 | do_srec[nostamp] = 1 | ||
14 | |||
15 | do_srec () { | ||
16 | if [ ${SREC_VMAADDR} = "" ] ; then | ||
17 | oefatal Cannot do_srec without SREC_VMAADDR defined. | ||
18 | fi | ||
19 | for type in ${IMAGE_FSTYPES}; do | ||
20 | for skiptype in ${SREC_SKIP}; do | ||
21 | if [ $type = $skiptype ] ; then continue 2 ; fi | ||
22 | done | ||
23 | ${SREC_CMD} | ||
24 | done | ||
25 | return 0 | ||
26 | } | ||
27 | |||
28 | addtask srec after do_rootfs before do_build | ||
diff --git a/meta/classes/tinderclient.bbclass b/meta/classes/tinderclient.bbclass new file mode 100644 index 0000000000..6e10d0f34b --- /dev/null +++ b/meta/classes/tinderclient.bbclass | |||
@@ -0,0 +1,332 @@ | |||
1 | def tinder_http_post(server, selector, content_type, body): | ||
2 | import httplib | ||
3 | # now post it | ||
4 | for i in range(0,5): | ||
5 | try: | ||
6 | h = httplib.HTTP(server) | ||
7 | h.putrequest('POST', selector) | ||
8 | h.putheader('content-type', content_type) | ||
9 | h.putheader('content-length', str(len(body))) | ||
10 | h.endheaders() | ||
11 | h.send(body) | ||
12 | errcode, errmsg, headers = h.getreply() | ||
13 | #print errcode, errmsg, headers | ||
14 | return (errcode,errmsg, headers, h.file) | ||
15 | except: | ||
16 | # try again | ||
17 | pass | ||
18 | |||
19 | def tinder_form_data(bound, dict, log): | ||
20 | output = [] | ||
21 | #br | ||
22 | # for each key in the dictionary | ||
23 | for name in dict: | ||
24 | output.append( "--" + bound ) | ||
25 | output.append( 'Content-Disposition: form-data; name="%s"' % name ) | ||
26 | output.append( "" ) | ||
27 | output.append( dict[name] ) | ||
28 | if log: | ||
29 | output.append( "--" + bound ) | ||
30 | output.append( 'Content-Disposition: form-data; name="log"; filename="log.txt"' ) | ||
31 | output.append( '' ) | ||
32 | output.append( log ) | ||
33 | output.append( '--' + bound + '--' ) | ||
34 | output.append( '' ) | ||
35 | |||
36 | return "\r\n".join(output) | ||
37 | |||
38 | def tinder_time_string(): | ||
39 | """ | ||
40 | Return the time as GMT | ||
41 | """ | ||
42 | return "" | ||
43 | |||
44 | def tinder_format_http_post(d,status,log): | ||
45 | """ | ||
46 | Format the Tinderbox HTTP post with the data needed | ||
47 | for the tinderbox to be happy. | ||
48 | """ | ||
49 | |||
50 | from bb import data, build | ||
51 | import os,random | ||
52 | |||
53 | # the variables we will need to send on this form post | ||
54 | variables = { | ||
55 | "tree" : data.getVar('TINDER_TREE', d, True), | ||
56 | "machine_name" : data.getVar('TINDER_MACHINE', d, True), | ||
57 | "os" : os.uname()[0], | ||
58 | "os_version" : os.uname()[2], | ||
59 | "compiler" : "gcc", | ||
60 | "clobber" : data.getVar('TINDER_CLOBBER', d, True) | ||
61 | } | ||
62 | |||
63 | # optionally add the status | ||
64 | if status: | ||
65 | variables["status"] = str(status) | ||
66 | |||
67 | # try to load the machine id | ||
68 | # we only need on build_status.pl but sending it | ||
69 | # always does not hurt | ||
70 | try: | ||
71 | f = file(data.getVar('TMPDIR',d,True)+'/tinder-machine.id', 'r') | ||
72 | id = f.read() | ||
73 | variables['machine_id'] = id | ||
74 | except: | ||
75 | pass | ||
76 | |||
77 | # the boundary we will need | ||
78 | boundary = "----------------------------------%d" % int(random.random()*1000000000000) | ||
79 | |||
80 | # now format the body | ||
81 | body = tinder_form_data( boundary, variables, log ) | ||
82 | |||
83 | return ("multipart/form-data; boundary=%s" % boundary),body | ||
84 | |||
85 | |||
86 | def tinder_build_start(d): | ||
87 | """ | ||
88 | Inform the tinderbox that a build is starting. We do this | ||
89 | by posting our name and tree to the build_start.pl script | ||
90 | on the server. | ||
91 | """ | ||
92 | from bb import data | ||
93 | |||
94 | # get the body and type | ||
95 | content_type, body = tinder_format_http_post(d,None,None) | ||
96 | server = data.getVar('TINDER_HOST', d, True ) | ||
97 | url = data.getVar('TINDER_URL', d, True ) | ||
98 | |||
99 | selector = url + "/xml/build_start.pl" | ||
100 | |||
101 | #print "selector %s and url %s" % (selector, url) | ||
102 | |||
103 | # now post it | ||
104 | errcode, errmsg, headers, h_file = tinder_http_post(server,selector,content_type, body) | ||
105 | #print errcode, errmsg, headers | ||
106 | report = h_file.read() | ||
107 | |||
108 | # now let us find the machine id that was assigned to us | ||
109 | search = "<machine id='" | ||
110 | report = report[report.find(search)+len(search):] | ||
111 | report = report[0:report.find("'")] | ||
112 | |||
113 | import bb | ||
114 | bb.note("Machine ID assigned by tinderbox: %s" % report ) | ||
115 | |||
116 | # now we will need to save the machine number | ||
117 | # we will override any previous numbers | ||
118 | f = file(data.getVar('TMPDIR', d, True)+"/tinder-machine.id", 'w') | ||
119 | f.write(report) | ||
120 | |||
121 | |||
122 | def tinder_send_http(d, status, log): | ||
123 | """ | ||
124 | Send this log as build status | ||
125 | """ | ||
126 | from bb import data | ||
127 | |||
128 | |||
129 | # get the body and type | ||
130 | content_type, body = tinder_format_http_post(d,status,log) | ||
131 | server = data.getVar('TINDER_HOST', d, True ) | ||
132 | url = data.getVar('TINDER_URL', d, True ) | ||
133 | |||
134 | selector = url + "/xml/build_status.pl" | ||
135 | |||
136 | # now post it | ||
137 | errcode, errmsg, headers, h_file = tinder_http_post(server,selector,content_type, body) | ||
138 | #print errcode, errmsg, headers | ||
139 | #print h.file.read() | ||
140 | |||
141 | |||
142 | def tinder_print_info(d): | ||
143 | """ | ||
144 | Print the TinderBox Info | ||
145 | Including informations of the BaseSystem and the Tree | ||
146 | we use. | ||
147 | """ | ||
148 | |||
149 | from bb import data | ||
150 | import os | ||
151 | # get the local vars | ||
152 | |||
153 | time = tinder_time_string() | ||
154 | ops = os.uname()[0] | ||
155 | version = os.uname()[2] | ||
156 | url = data.getVar( 'TINDER_URL' , d, True ) | ||
157 | tree = data.getVar( 'TINDER_TREE', d, True ) | ||
158 | branch = data.getVar( 'TINDER_BRANCH', d, True ) | ||
159 | srcdate = data.getVar( 'SRCDATE', d, True ) | ||
160 | machine = data.getVar( 'MACHINE', d, True ) | ||
161 | distro = data.getVar( 'DISTRO', d, True ) | ||
162 | bbfiles = data.getVar( 'BBFILES', d, True ) | ||
163 | tarch = data.getVar( 'TARGET_ARCH', d, True ) | ||
164 | fpu = data.getVar( 'TARGET_FPU', d, True ) | ||
165 | oerev = data.getVar( 'OE_REVISION', d, True ) or "unknown" | ||
166 | |||
167 | # there is a bug with tipple quoted strings | ||
168 | # i will work around but will fix the original | ||
169 | # bug as well | ||
170 | output = [] | ||
171 | output.append("== Tinderbox Info" ) | ||
172 | output.append("Time: %(time)s" ) | ||
173 | output.append("OS: %(ops)s" ) | ||
174 | output.append("%(version)s" ) | ||
175 | output.append("Compiler: gcc" ) | ||
176 | output.append("Tinderbox Client: 0.1" ) | ||
177 | output.append("Tinderbox Client Last Modified: yesterday" ) | ||
178 | output.append("Tinderbox Protocol: 0.1" ) | ||
179 | output.append("URL: %(url)s" ) | ||
180 | output.append("Tree: %(tree)s" ) | ||
181 | output.append("Config:" ) | ||
182 | output.append("branch = '%(branch)s'" ) | ||
183 | output.append("TARGET_ARCH = '%(tarch)s'" ) | ||
184 | output.append("TARGET_FPU = '%(fpu)s'" ) | ||
185 | output.append("SRCDATE = '%(srcdate)s'" ) | ||
186 | output.append("MACHINE = '%(machine)s'" ) | ||
187 | output.append("DISTRO = '%(distro)s'" ) | ||
188 | output.append("BBFILES = '%(bbfiles)s'" ) | ||
189 | output.append("OEREV = '%(oerev)s'" ) | ||
190 | output.append("== End Tinderbox Client Info" ) | ||
191 | |||
192 | # now create the real output | ||
193 | return "\n".join(output) % vars() | ||
194 | |||
195 | |||
196 | def tinder_print_env(): | ||
197 | """ | ||
198 | Print the environment variables of this build | ||
199 | """ | ||
200 | from bb import data | ||
201 | import os | ||
202 | |||
203 | time_start = tinder_time_string() | ||
204 | time_end = tinder_time_string() | ||
205 | |||
206 | # build the environment | ||
207 | env = "" | ||
208 | for var in os.environ: | ||
209 | env += "%s=%s\n" % (var, os.environ[var]) | ||
210 | |||
211 | output = [] | ||
212 | output.append( "---> TINDERBOX RUNNING env %(time_start)s" ) | ||
213 | output.append( env ) | ||
214 | output.append( "<--- TINDERBOX FINISHED (SUCCESS) %(time_end)s" ) | ||
215 | |||
216 | return "\n".join(output) % vars() | ||
217 | |||
218 | def tinder_tinder_start(d, event): | ||
219 | """ | ||
220 | PRINT the configuration of this build | ||
221 | """ | ||
222 | |||
223 | time_start = tinder_time_string() | ||
224 | config = tinder_print_info(d) | ||
225 | #env = tinder_print_env() | ||
226 | time_end = tinder_time_string() | ||
227 | packages = " ".join( event.getPkgs() ) | ||
228 | |||
229 | output = [] | ||
230 | output.append( "---> TINDERBOX PRINTING CONFIGURATION %(time_start)s" ) | ||
231 | output.append( config ) | ||
232 | #output.append( env ) | ||
233 | output.append( "<--- TINDERBOX FINISHED PRINTING CONFIGURATION %(time_end)s" ) | ||
234 | output.append( "---> TINDERBOX BUILDING '%(packages)s'" ) | ||
235 | output.append( "<--- TINDERBOX STARTING BUILD NOW" ) | ||
236 | |||
237 | output.append( "" ) | ||
238 | |||
239 | return "\n".join(output) % vars() | ||
240 | |||
241 | def tinder_do_tinder_report(event): | ||
242 | """ | ||
243 | Report to the tinderbox: | ||
244 | On the BuildStart we will inform the box directly | ||
245 | On the other events we will write to the TINDER_LOG and | ||
246 | when the Task is finished we will send the report. | ||
247 | |||
248 | The above is not yet fully implemented. Currently we send | ||
249 | information immediately. The caching/queuing needs to be | ||
250 | implemented. Also sending more or less information is not | ||
251 | implemented yet. | ||
252 | """ | ||
253 | from bb.event import getName | ||
254 | from bb import data, mkdirhier, build | ||
255 | import os, glob | ||
256 | |||
257 | # variables | ||
258 | name = getName(event) | ||
259 | log = "" | ||
260 | status = 1 | ||
261 | #print asd | ||
262 | # Check what we need to do Build* shows we start or are done | ||
263 | if name == "BuildStarted": | ||
264 | tinder_build_start(event.data) | ||
265 | log = tinder_tinder_start(event.data,event) | ||
266 | |||
267 | try: | ||
268 | # truncate the tinder log file | ||
269 | f = file(data.getVar('TINDER_LOG', event.data, True), 'rw+') | ||
270 | f.truncate(0) | ||
271 | f.close() | ||
272 | except IOError: | ||
273 | pass | ||
274 | |||
275 | # Append the Task-Log (compile,configure...) to the log file | ||
276 | # we will send to the server | ||
277 | if name == "TaskSucceeded" or name == "TaskFailed": | ||
278 | log_file = glob.glob("%s/log.%s.*" % (data.getVar('T', event.data, True), event.task)) | ||
279 | |||
280 | if len(log_file) != 0: | ||
281 | to_file = data.getVar('TINDER_LOG', event.data, True) | ||
282 | log += "".join(open(log_file[0], 'r').readlines()) | ||
283 | |||
284 | # set the right 'HEADER'/Summary for the TinderBox | ||
285 | if name == "TaskStarted": | ||
286 | log += "---> TINDERBOX Task %s started\n" % event.task | ||
287 | elif name == "TaskSucceeded": | ||
288 | log += "<--- TINDERBOX Task %s done (SUCCESS)\n" % event.task | ||
289 | elif name == "TaskFailed": | ||
290 | log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task | ||
291 | elif name == "PkgStarted": | ||
292 | log += "---> TINDERBOX Package %s started\n" % data.getVar('P', event.data, True) | ||
293 | elif name == "PkgSucceeded": | ||
294 | log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('P', event.data, True) | ||
295 | elif name == "PkgFailed": | ||
296 | build.exec_task('do_clean', event.data) | ||
297 | log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('P', event.data, True) | ||
298 | status = 200 | ||
299 | elif name == "BuildCompleted": | ||
300 | log += "Build Completed\n" | ||
301 | status = 100 | ||
302 | elif name == "MultipleProviders": | ||
303 | log += "---> TINDERBOX Multiple Providers\n" | ||
304 | log += "multiple providers are available (%s);\n" % ", ".join(event.getCandidates()) | ||
305 | log += "consider defining PREFERRED_PROVIDER_%s\n" % event.getItem() | ||
306 | log += "is runtime: %d\n" % event.isRuntime() | ||
307 | log += "<--- TINDERBOX Multiple Providers\n" | ||
308 | elif name == "NoProvider": | ||
309 | log += "Error: No Provider for: %s\n" % event.getItem() | ||
310 | log += "Error:Was Runtime: %d\n" % event.isRuntime() | ||
311 | status = 200 | ||
312 | |||
313 | # now post the log | ||
314 | if len(log) == 0: | ||
315 | return | ||
316 | |||
317 | # for now we will use the http post method as it is the only one | ||
318 | log_post_method = tinder_send_http | ||
319 | log_post_method(event.data, status, log) | ||
320 | |||
321 | |||
322 | # we want to be an event handler | ||
323 | addhandler tinderclient_eventhandler | ||
324 | python tinderclient_eventhandler() { | ||
325 | from bb import note, error, data | ||
326 | from bb.event import NotHandled | ||
327 | do_tinder_report = data.getVar('TINDER_REPORT', e.data, True) | ||
328 | if do_tinder_report and do_tinder_report == "1": | ||
329 | tinder_do_tinder_report(e) | ||
330 | |||
331 | return NotHandled | ||
332 | } | ||
diff --git a/meta/classes/tmake.bbclass b/meta/classes/tmake.bbclass new file mode 100644 index 0000000000..05b82e496d --- /dev/null +++ b/meta/classes/tmake.bbclass | |||
@@ -0,0 +1,77 @@ | |||
1 | DEPENDS_prepend="tmake " | ||
2 | |||
3 | python tmake_do_createpro() { | ||
4 | import glob, sys | ||
5 | from bb import note | ||
6 | out_vartranslate = { | ||
7 | "TMAKE_HEADERS": "HEADERS", | ||
8 | "TMAKE_INTERFACES": "INTERFACES", | ||
9 | "TMAKE_TEMPLATE": "TEMPLATE", | ||
10 | "TMAKE_CONFIG": "CONFIG", | ||
11 | "TMAKE_DESTDIR": "DESTDIR", | ||
12 | "TMAKE_SOURCES": "SOURCES", | ||
13 | "TMAKE_DEPENDPATH": "DEPENDPATH", | ||
14 | "TMAKE_INCLUDEPATH": "INCLUDEPATH", | ||
15 | "TMAKE_TARGET": "TARGET", | ||
16 | "TMAKE_LIBS": "LIBS", | ||
17 | } | ||
18 | s = data.getVar('S', d, 1) or "" | ||
19 | os.chdir(s) | ||
20 | profiles = (data.getVar('TMAKE_PROFILES', d, 1) or "").split() | ||
21 | if not profiles: | ||
22 | profiles = ["*.pro"] | ||
23 | for pro in profiles: | ||
24 | ppro = glob.glob(pro) | ||
25 | if ppro: | ||
26 | if ppro != [pro]: | ||
27 | del profiles[profiles.index(pro)] | ||
28 | profiles += ppro | ||
29 | continue | ||
30 | if ppro[0].find('*'): | ||
31 | del profiles[profiles.index(pro)] | ||
32 | continue | ||
33 | else: | ||
34 | del profiles[profiles.index(pro)] | ||
35 | if len(profiles) != 0: | ||
36 | return | ||
37 | |||
38 | # output .pro using this metadata store | ||
39 | try: | ||
40 | from __builtin__ import file | ||
41 | profile = file(data.expand('${PN}.pro', d), 'w') | ||
42 | except OSError: | ||
43 | raise FuncFailed("unable to open pro file for writing.") | ||
44 | |||
45 | # fd = sys.__stdout__ | ||
46 | fd = profile | ||
47 | for var in out_vartranslate.keys(): | ||
48 | val = data.getVar(var, d, 1) | ||
49 | if val: | ||
50 | fd.write("%s\t: %s\n" % (out_vartranslate[var], val)) | ||
51 | |||
52 | # if fd is not sys.__stdout__: | ||
53 | fd.close() | ||
54 | } | ||
55 | |||
56 | tmake_do_configure() { | ||
57 | paths="${STAGING_DATADIR}/tmake/qws/${TARGET_OS}-${TARGET_ARCH}-g++ $STAGING_DIR/share/tmake/$OS-g++" | ||
58 | if (echo "${TARGET_ARCH}"|grep -q 'i.86'); then | ||
59 | paths="${STAGING_DATADIR}/tmake/qws/${TARGET_OS}-x86-g++ $paths" | ||
60 | fi | ||
61 | for i in $paths; do | ||
62 | if test -e $i; then | ||
63 | export TMAKEPATH=$i | ||
64 | break | ||
65 | fi | ||
66 | done | ||
67 | |||
68 | if [ -z "${TMAKE_PROFILES}" ]; then | ||
69 | TMAKE_PROFILES="`ls *.pro`" | ||
70 | fi | ||
71 | tmake -o Makefile $TMAKE_PROFILES || die "Error calling tmake on ${TMAKE_PROFILES}" | ||
72 | } | ||
73 | |||
74 | EXPORT_FUNCTIONS do_configure do_createpro | ||
75 | |||
76 | addtask configure after do_unpack do_patch before do_compile | ||
77 | addtask createpro before do_configure after do_unpack do_patch | ||
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass new file mode 100644 index 0000000000..6b2b547d5f --- /dev/null +++ b/meta/classes/update-alternatives.bbclass | |||
@@ -0,0 +1,33 @@ | |||
1 | # defaults | ||
2 | ALTERNATIVE_PRIORITY = "10" | ||
3 | ALTERNATIVE_LINK = "${bindir}/${ALTERNATIVE_NAME}" | ||
4 | |||
5 | update_alternatives_postinst() { | ||
6 | update-alternatives --install ${ALTERNATIVE_LINK} ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH} ${ALTERNATIVE_PRIORITY} | ||
7 | } | ||
8 | |||
9 | update_alternatives_postrm() { | ||
10 | update-alternatives --remove ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH} | ||
11 | } | ||
12 | |||
13 | python __anonymous() { | ||
14 | if bb.data.getVar('ALTERNATIVE_NAME', d) == None: | ||
15 | raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d) | ||
16 | if bb.data.getVar('ALTERNATIVE_PATH', d) == None: | ||
17 | raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % bb.data.getVar('FILE', d) | ||
18 | } | ||
19 | |||
20 | python populate_packages_prepend () { | ||
21 | pkg = bb.data.getVar('PN', d, 1) | ||
22 | bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) | ||
23 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) | ||
24 | if not postinst: | ||
25 | postinst = '#!/bin/sh\n' | ||
26 | postinst += bb.data.getVar('update_alternatives_postinst', d, 1) | ||
27 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | ||
28 | postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1) | ||
29 | if not postrm: | ||
30 | postrm = '#!/bin/sh\n' | ||
31 | postrm += bb.data.getVar('update_alternatives_postrm', d, 1) | ||
32 | bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) | ||
33 | } | ||
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass new file mode 100644 index 0000000000..0bfba467c1 --- /dev/null +++ b/meta/classes/update-rc.d.bbclass | |||
@@ -0,0 +1,69 @@ | |||
1 | DEPENDS_append = " update-rc.d" | ||
2 | RDEPENDS_append = " update-rc.d" | ||
3 | |||
4 | INITSCRIPT_PARAMS ?= "defaults" | ||
5 | |||
6 | INIT_D_DIR = "${sysconfdir}/init.d" | ||
7 | |||
8 | updatercd_postinst() { | ||
9 | if test "x$D" != "x"; then | ||
10 | D="-r $D" | ||
11 | else | ||
12 | D="-s" | ||
13 | fi | ||
14 | update-rc.d $D ${INITSCRIPT_NAME} ${INITSCRIPT_PARAMS} | ||
15 | } | ||
16 | |||
17 | updatercd_prerm() { | ||
18 | if test "x$D" != "x"; then | ||
19 | D="-r $D" | ||
20 | else | ||
21 | ${INIT_D_DIR}/${INITSCRIPT_NAME} stop | ||
22 | fi | ||
23 | } | ||
24 | |||
25 | updatercd_postrm() { | ||
26 | update-rc.d $D ${INITSCRIPT_NAME} remove | ||
27 | } | ||
28 | |||
29 | python __anonymous() { | ||
30 | if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None: | ||
31 | if bb.data.getVar('INITSCRIPT_NAME', d) == None: | ||
32 | raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d) | ||
33 | if bb.data.getVar('INITSCRIPT_PARAMS', d) == None: | ||
34 | raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % bb.data.getVar('FILE', d) | ||
35 | } | ||
36 | |||
37 | python populate_packages_prepend () { | ||
38 | def update_rcd_package(pkg): | ||
39 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) | ||
40 | localdata = bb.data.createCopy(d) | ||
41 | overrides = bb.data.getVar("OVERRIDES", localdata, 1) | ||
42 | bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata) | ||
43 | bb.data.update_data(localdata) | ||
44 | |||
45 | postinst = bb.data.getVar('pkg_postinst', localdata, 1) | ||
46 | if not postinst: | ||
47 | postinst = '#!/bin/sh\n' | ||
48 | postinst += bb.data.getVar('updatercd_postinst', localdata, 1) | ||
49 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | ||
50 | prerm = bb.data.getVar('pkg_prerm', localdata, 1) | ||
51 | if not prerm: | ||
52 | prerm = '#!/bin/sh\n' | ||
53 | prerm += bb.data.getVar('updatercd_prerm', localdata, 1) | ||
54 | bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d) | ||
55 | postrm = bb.data.getVar('pkg_postrm', localdata, 1) | ||
56 | if not postrm: | ||
57 | postrm = '#!/bin/sh\n' | ||
58 | postrm += bb.data.getVar('updatercd_postrm', localdata, 1) | ||
59 | bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) | ||
60 | |||
61 | pkgs = bb.data.getVar('INITSCRIPT_PACKAGES', d, 1) | ||
62 | if pkgs == None: | ||
63 | pkgs = bb.data.getVar('PN', d, 1) | ||
64 | packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() | ||
65 | if not pkgs in packages and packages != []: | ||
66 | pkgs = packages[0] | ||
67 | for pkg in pkgs.split(): | ||
68 | update_rcd_package(pkg) | ||
69 | } | ||
diff --git a/meta/classes/wrt-image.bbclass b/meta/classes/wrt-image.bbclass new file mode 100644 index 0000000000..ba1163a719 --- /dev/null +++ b/meta/classes/wrt-image.bbclass | |||
@@ -0,0 +1,33 @@ | |||
1 | # we dont need the kernel in the image | ||
2 | ROOTFS_POSTPROCESS_COMMAND += "rm -f ${IMAGE_ROOTFS}/boot/zImage*" | ||
3 | |||
4 | def wrt_get_kernel_version(d): | ||
5 | import bb | ||
6 | if bb.data.inherits_class('image_ipk', d): | ||
7 | skd = bb.data.getVar('STAGING_KERNEL_DIR', d, 1) | ||
8 | return base_read_file(skd+'/kernel-abiversion') | ||
9 | return "-no kernel version for available-" | ||
10 | |||
11 | wrt_create_images() { | ||
12 | I=${DEPLOY_DIR}/images | ||
13 | KERNEL_VERSION="${@wrt_get_kernel_version(d)}" | ||
14 | |||
15 | for type in ${IMAGE_FSTYPES}; do | ||
16 | # generic | ||
17 | trx -o ${I}/wrt-generic-${type}.trx ${I}/loader.gz \ | ||
18 | ${I}/wrt-kernel-${KERNEL_VERSION}.lzma -a 0x10000 ${I}/${IMAGE_NAME}.rootfs.${type} | ||
19 | |||
20 | # WRT54GS | ||
21 | addpattern -2 -i ${I}/wrt-generic-${type}.trx -o ${I}/wrt54gs-${type}.trx -g | ||
22 | |||
23 | # WRT54G | ||
24 | sed "1s,^W54S,W54G," ${I}/wrt54gs-${type}.trx > ${I}/wrt54g-${type}.trx | ||
25 | |||
26 | # motorola | ||
27 | motorola-bin ${I}/wrt-generic-${type}.trx ${I}/motorola-${type}.bin | ||
28 | done; | ||
29 | } | ||
30 | |||
31 | IMAGE_POSTPROCESS_COMMAND += "wrt_create_images;" | ||
32 | |||
33 | DEPENDS_prepend = "${@["wrt-imagetools-native ", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}" \ No newline at end of file | ||
diff --git a/meta/classes/xfce.bbclass b/meta/classes/xfce.bbclass new file mode 100644 index 0000000000..793348597f --- /dev/null +++ b/meta/classes/xfce.bbclass | |||
@@ -0,0 +1,19 @@ | |||
1 | # xfce.oeclass | ||
2 | # Copyright (C) 2004, Advanced Micro Devices, Inc. All Rights Reserved | ||
3 | # Released under the MIT license (see packages/COPYING) | ||
4 | |||
5 | # Global class to make it easier to maintain XFCE packages | ||
6 | |||
7 | HOMEPAGE = "http://www.xfce.org" | ||
8 | LICENSE = "LGPL-2" | ||
9 | |||
10 | SRC_URI = "http://www.us.xfce.org/archive/xfce-${PV}/src/${PN}-${PV}.tar.gz" | ||
11 | |||
12 | inherit autotools | ||
13 | |||
14 | EXTRA_OECONF += "--with-pluginsdir=${libdir}/xfce4/panel-plugins/" | ||
15 | |||
16 | # FIXME: Put icons in their own package too? | ||
17 | |||
18 | FILES_${PN} += "${datadir}/icons/* ${datadir}/applications/* ${libdir}/xfce4/modules/*.so*" | ||
19 | FILES_${PN}-doc += "${datadir}/xfce4/doc" | ||
diff --git a/meta/classes/xlibs.bbclass b/meta/classes/xlibs.bbclass new file mode 100644 index 0000000000..e797748770 --- /dev/null +++ b/meta/classes/xlibs.bbclass | |||
@@ -0,0 +1,15 @@ | |||
1 | LICENSE= "BSD-X" | ||
2 | SECTION = "x11/libs" | ||
3 | |||
4 | XLIBS_CVS = "${FREEDESKTOP_CVS}/xlibs" | ||
5 | |||
6 | inherit autotools pkgconfig | ||
7 | |||
8 | do_stage() { | ||
9 | oe_runmake install prefix=${STAGING_DIR} \ | ||
10 | bindir=${STAGING_BINDIR} \ | ||
11 | includedir=${STAGING_INCDIR} \ | ||
12 | libdir=${STAGING_LIBDIR} \ | ||
13 | datadir=${STAGING_DATADIR} \ | ||
14 | mandir=${STAGING_DATADIR}/man | ||
15 | } | ||