summaryrefslogtreecommitdiffstats
path: root/meta
diff options
context:
space:
mode:
Diffstat (limited to 'meta')
-rw-r--r--meta/classes/base.bbclass729
-rw-r--r--meta/classes/metadata_scm.bbclass77
-rw-r--r--meta/classes/mirrors.bbclass58
-rw-r--r--meta/classes/staging.bbclass146
-rw-r--r--meta/classes/utility-tasks.bbclass97
-rw-r--r--meta/classes/utils.bbclass340
-rw-r--r--meta/conf/bitbake.conf15
7 files changed, 740 insertions, 722 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index 256f89954c..51a514570b 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -1,87 +1,6 @@
1BB_DEFAULT_TASK ?= "build" 1BB_DEFAULT_TASK ?= "build"
2 2
3# like os.path.join but doesn't treat absolute RHS specially 3inherit utils
4def base_path_join(a, *p):
5 path = a
6 for b in p:
7 if path == '' or path.endswith('/'):
8 path += b
9 else:
10 path += '/' + b
11 return path
12
13# for MD5/SHA handling
14def base_chk_load_parser(config_path):
15 import ConfigParser
16 parser = ConfigParser.ConfigParser()
17 if not len(parser.read(config_path)) == 1:
18 bb.note("Can not open the '%s' ini file" % config_path)
19 raise Exception("Can not open the '%s'" % config_path)
20
21 return parser
22
23def base_chk_file(parser, pn, pv, src_uri, localpath, data):
24 no_checksum = False
25 # Try PN-PV-SRC_URI first and then try PN-SRC_URI
26 # we rely on the get method to create errors
27 pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
28 pn_src = "%s-%s" % (pn,src_uri)
29 if parser.has_section(pn_pv_src):
30 md5 = parser.get(pn_pv_src, "md5")
31 sha256 = parser.get(pn_pv_src, "sha256")
32 elif parser.has_section(pn_src):
33 md5 = parser.get(pn_src, "md5")
34 sha256 = parser.get(pn_src, "sha256")
35 elif parser.has_section(src_uri):
36 md5 = parser.get(src_uri, "md5")
37 sha256 = parser.get(src_uri, "sha256")
38 else:
39 no_checksum = True
40
41 # md5 and sha256 should be valid now
42 if not os.path.exists(localpath):
43 bb.note("The localpath does not exist '%s'" % localpath)
44 raise Exception("The path does not exist '%s'" % localpath)
45
46
47 # Calculate the MD5 and 256-bit SHA checksums
48 md5data = bb.utils.md5_file(localpath)
49 shadata = bb.utils.sha256_file(localpath)
50
51 # sha256_file() can return None if we are running on Python 2.4 (hashlib is
52 # 2.5 onwards, sha in 2.4 is 160-bit only), so check for this and call the
53 # standalone shasum binary if required.
54 if shadata is None:
55 try:
56 shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
57 shadata = (shapipe.readline().split() or [ "" ])[0]
58 shapipe.close()
59 except OSError:
60 raise Exception("Executing shasum failed, please build shasum-native")
61
62 if no_checksum == True: # we do not have conf/checksums.ini entry
63 try:
64 file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
65 except:
66 return False
67
68 if not file:
69 raise Exception("Creating checksums.ini failed")
70
71 file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
72 file.close()
73 return False
74
75 if not md5 == md5data:
76 bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
77 raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
78
79 if not sha256 == shadata:
80 bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
81 raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
82
83 return True
84
85 4
86def base_dep_prepend(d): 5def base_dep_prepend(d):
87 # 6 #
@@ -112,62 +31,11 @@ def base_dep_prepend(d):
112 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc " 31 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
113 return deps 32 return deps
114 33
115def base_read_file(filename):
116 try:
117 f = file( filename, "r" )
118 except IOError, reason:
119 return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
120 else:
121 return f.read().strip()
122 return None
123
124def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
125 if bb.data.getVar(variable,d,1) == checkvalue:
126 return truevalue
127 else:
128 return falsevalue
129
130def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
131 if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
132 return truevalue
133 else:
134 return falsevalue
135
136def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
137 result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
138 if result <= 0:
139 return truevalue
140 else:
141 return falsevalue
142
143def base_contains(variable, checkvalues, truevalue, falsevalue, d):
144 matches = 0
145 if type(checkvalues).__name__ == "str":
146 checkvalues = [checkvalues]
147 for value in checkvalues:
148 if bb.data.getVar(variable,d,1).find(value) != -1:
149 matches = matches + 1
150 if matches == len(checkvalues):
151 return truevalue
152 return falsevalue
153
154def base_both_contain(variable1, variable2, checkvalue, d):
155 if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
156 return checkvalue
157 else:
158 return ""
159 34
160DEPENDS_prepend="${@base_dep_prepend(d)} " 35DEPENDS_prepend="${@base_dep_prepend(d)} "
161DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} " 36DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
162DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} " 37DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
163 38
164def base_prune_suffix(var, suffixes, d):
165 # See if var ends with any of the suffixes listed and
166 # remove it if found
167 for suffix in suffixes:
168 if var.endswith(suffix):
169 return var.replace(suffix, "")
170 return var
171 39
172def base_set_filespath(path, d): 40def base_set_filespath(path, d):
173 filespath = [] 41 filespath = []
@@ -180,13 +48,6 @@ def base_set_filespath(path, d):
180 48
181FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" 49FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
182 50
183def oe_filter(f, str, d):
184 from re import match
185 return " ".join(filter(lambda x: match(f, x, 0), str.split()))
186
187def oe_filter_out(f, str, d):
188 from re import match
189 return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
190 51
191die() { 52die() {
192 oefatal "$*" 53 oefatal "$*"
@@ -223,173 +84,6 @@ oe_runmake() {
223 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed" 84 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
224} 85}
225 86
226oe_soinstall() {
227 # Purpose: Install shared library file and
228 # create the necessary links
229 # Example:
230 #
231 # oe_
232 #
233 #oenote installing shared library $1 to $2
234 #
235 libname=`basename $1`
236 install -m 755 $1 $2/$libname
237 sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
238 solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
239 ln -sf $libname $2/$sonamelink
240 ln -sf $libname $2/$solink
241}
242
243oe_libinstall() {
244 # Purpose: Install a library, in all its forms
245 # Example
246 #
247 # oe_libinstall libltdl ${STAGING_LIBDIR}/
248 # oe_libinstall -C src/libblah libblah ${D}/${libdir}/
249 dir=""
250 libtool=""
251 silent=""
252 require_static=""
253 require_shared=""
254 staging_install=""
255 while [ "$#" -gt 0 ]; do
256 case "$1" in
257 -C)
258 shift
259 dir="$1"
260 ;;
261 -s)
262 silent=1
263 ;;
264 -a)
265 require_static=1
266 ;;
267 -so)
268 require_shared=1
269 ;;
270 -*)
271 oefatal "oe_libinstall: unknown option: $1"
272 ;;
273 *)
274 break;
275 ;;
276 esac
277 shift
278 done
279
280 libname="$1"
281 shift
282 destpath="$1"
283 if [ -z "$destpath" ]; then
284 oefatal "oe_libinstall: no destination path specified"
285 fi
286 if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
287 then
288 staging_install=1
289 fi
290
291 __runcmd () {
292 if [ -z "$silent" ]; then
293 echo >&2 "oe_libinstall: $*"
294 fi
295 $*
296 }
297
298 if [ -z "$dir" ]; then
299 dir=`pwd`
300 fi
301
302 dotlai=$libname.lai
303
304 # Sanity check that the libname.lai is unique
305 number_of_files=`(cd $dir; find . -name "$dotlai") | wc -l`
306 if [ $number_of_files -gt 1 ]; then
307 oefatal "oe_libinstall: $dotlai is not unique in $dir"
308 fi
309
310
311 dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
312 olddir=`pwd`
313 __runcmd cd $dir
314
315 lafile=$libname.la
316
317 # If such file doesn't exist, try to cut version suffix
318 if [ ! -f "$lafile" ]; then
319 libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
320 lafile1=$libname.la
321 if [ -f "$lafile1" ]; then
322 libname=$libname1
323 lafile=$lafile1
324 fi
325 fi
326
327 if [ -f "$lafile" ]; then
328 # libtool archive
329 eval `cat $lafile|grep "^library_names="`
330 libtool=1
331 else
332 library_names="$libname.so* $libname.dll.a"
333 fi
334
335 __runcmd install -d $destpath/
336 dota=$libname.a
337 if [ -f "$dota" -o -n "$require_static" ]; then
338 rm -f $destpath/$dota
339 __runcmd install -m 0644 $dota $destpath/
340 fi
341 if [ -f "$dotlai" -a -n "$libtool" ]; then
342 if test -n "$staging_install"
343 then
344 # stop libtool using the final directory name for libraries
345 # in staging:
346 __runcmd rm -f $destpath/$libname.la
347 __runcmd sed -e 's/^installed=yes$/installed=no/' \
348 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
349 -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
350 $dotlai >$destpath/$libname.la
351 else
352 rm -f $destpath/$libname.la
353 __runcmd install -m 0644 $dotlai $destpath/$libname.la
354 fi
355 fi
356
357 for name in $library_names; do
358 files=`eval echo $name`
359 for f in $files; do
360 if [ ! -e "$f" ]; then
361 if [ -n "$libtool" ]; then
362 oefatal "oe_libinstall: $dir/$f not found."
363 fi
364 elif [ -L "$f" ]; then
365 __runcmd cp -P "$f" $destpath/
366 elif [ ! -L "$f" ]; then
367 libfile="$f"
368 rm -f $destpath/$libfile
369 __runcmd install -m 0755 $libfile $destpath/
370 fi
371 done
372 done
373
374 if [ -z "$libfile" ]; then
375 if [ -n "$require_shared" ]; then
376 oefatal "oe_libinstall: unable to locate shared library"
377 fi
378 elif [ -z "$libtool" ]; then
379 # special case hack for non-libtool .so.#.#.# links
380 baselibfile=`basename "$libfile"`
381 if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
382 sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
383 solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
384 if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
385 __runcmd ln -sf $baselibfile $destpath/$sonamelink
386 fi
387 __runcmd ln -sf $baselibfile $destpath/$solink
388 fi
389 fi
390
391 __runcmd cd "$olddir"
392}
393 87
394def package_stagefile(file, d): 88def package_stagefile(file, d):
395 89
@@ -409,81 +103,7 @@ package_stagefile_shell() {
409 fi 103 fi
410} 104}
411 105
412oe_machinstall() { 106inherit utility-tasks
413 # Purpose: Install machine dependent files, if available
414 # If not available, check if there is a default
415 # If no default, just touch the destination
416 # Example:
417 # $1 $2 $3 $4
418 # oe_machinstall -m 0644 fstab ${D}/etc/fstab
419 #
420 # TODO: Check argument number?
421 #
422 filename=`basename $3`
423 dirname=`dirname $3`
424
425 for o in `echo ${OVERRIDES} | tr ':' ' '`; do
426 if [ -e $dirname/$o/$filename ]; then
427 oenote $dirname/$o/$filename present, installing to $4
428 install $1 $2 $dirname/$o/$filename $4
429 return
430 fi
431 done
432# oenote overrides specific file NOT present, trying default=$3...
433 if [ -e $3 ]; then
434 oenote $3 present, installing to $4
435 install $1 $2 $3 $4
436 else
437 oenote $3 NOT present, touching empty $4
438 touch $4
439 fi
440}
441
442addtask listtasks
443do_listtasks[nostamp] = "1"
444python do_listtasks() {
445 import sys
446 # emit variables and shell functions
447 #bb.data.emit_env(sys.__stdout__, d)
448 # emit the metadata which isnt valid shell
449 for e in d.keys():
450 if bb.data.getVarFlag(e, 'task', d):
451 sys.__stdout__.write("%s\n" % e)
452}
453
454addtask clean
455do_clean[dirs] = "${TOPDIR}"
456do_clean[nostamp] = "1"
457python base_do_clean() {
458 """clear the build and temp directories"""
459 dir = bb.data.expand("${WORKDIR}", d)
460 if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
461 bb.note("removing " + dir)
462 os.system('rm -rf ' + dir)
463
464 dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
465 bb.note("removing " + dir)
466 os.system('rm -f '+ dir)
467}
468
469addtask rebuild after do_${BB_DEFAULT_TASK}
470do_rebuild[dirs] = "${TOPDIR}"
471do_rebuild[nostamp] = "1"
472python base_do_rebuild() {
473 """rebuild a package"""
474}
475
476#addtask mrproper
477#do_mrproper[dirs] = "${TOPDIR}"
478#do_mrproper[nostamp] = "1"
479#python base_do_mrproper() {
480# """clear downloaded sources, build and temp directories"""
481# dir = bb.data.expand("${DL_DIR}", d)
482# if dir == '/': bb.build.FuncFailed("wrong DATADIR")
483# bb.debug(2, "removing " + dir)
484# os.system('rm -rf ' + dir)
485# bb.build.exec_func('do_clean', d)
486#}
487 107
488SCENEFUNCS += "base_scenefunction" 108SCENEFUNCS += "base_scenefunction"
489 109
@@ -566,57 +186,6 @@ python base_do_fetch() {
566 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri) 186 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
567} 187}
568 188
569addtask fetchall after do_fetch
570do_fetchall[recrdeptask] = "do_fetch"
571base_do_fetchall() {
572 :
573}
574
575addtask checkuri
576do_checkuri[nostamp] = "1"
577python do_checkuri() {
578 import sys
579
580 localdata = bb.data.createCopy(d)
581 bb.data.update_data(localdata)
582
583 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
584
585 try:
586 bb.fetch.init(src_uri.split(),d)
587 except bb.fetch.NoMethodError:
588 (type, value, traceback) = sys.exc_info()
589 raise bb.build.FuncFailed("No method: %s" % value)
590
591 try:
592 bb.fetch.checkstatus(localdata)
593 except bb.fetch.MissingParameterError:
594 (type, value, traceback) = sys.exc_info()
595 raise bb.build.FuncFailed("Missing parameters: %s" % value)
596 except bb.fetch.FetchError:
597 (type, value, traceback) = sys.exc_info()
598 raise bb.build.FuncFailed("Fetch failed: %s" % value)
599 except bb.fetch.MD5SumError:
600 (type, value, traceback) = sys.exc_info()
601 raise bb.build.FuncFailed("MD5 failed: %s" % value)
602 except:
603 (type, value, traceback) = sys.exc_info()
604 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
605}
606
607addtask checkuriall after do_checkuri
608do_checkuriall[recrdeptask] = "do_checkuri"
609do_checkuriall[nostamp] = "1"
610base_do_checkuriall() {
611 :
612}
613
614addtask buildall after do_build
615do_buildall[recrdeptask] = "do_build"
616base_do_buildall() {
617 :
618}
619
620def subprocess_setup(): 189def subprocess_setup():
621 import signal 190 import signal
622 # Python installs a SIGPIPE handler by default. This is usually not what 191 # Python installs a SIGPIPE handler by default. This is usually not what
@@ -720,82 +289,7 @@ python base_do_unpack() {
720 raise bb.build.FuncFailed() 289 raise bb.build.FuncFailed()
721} 290}
722 291
723METADATA_BRANCH ?= "${@base_detect_branch(d)}" 292inherit metadata_scm
724METADATA_REVISION ?= "${@base_detect_revision(d)}"
725
726def base_detect_revision(d):
727 path = base_get_scmbasepath(d)
728
729 scms = [base_get_metadata_git_revision, \
730 base_get_metadata_svn_revision]
731
732 for scm in scms:
733 rev = scm(path, d)
734 if rev <> "<unknown>":
735 return rev
736
737 return "<unknown>"
738
739def base_detect_branch(d):
740 path = base_get_scmbasepath(d)
741
742 scms = [base_get_metadata_git_branch]
743
744 for scm in scms:
745 rev = scm(path, d)
746 if rev <> "<unknown>":
747 return rev.strip()
748
749 return "<unknown>"
750
751
752
753def base_get_scmbasepath(d):
754 path_to_bbfiles = bb.data.getVar( 'BBFILES', d, 1 ).split()
755 return path_to_bbfiles[0][:path_to_bbfiles[0].rindex( "packages" )]
756
757def base_get_metadata_monotone_branch(path, d):
758 monotone_branch = "<unknown>"
759 try:
760 monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
761 if monotone_branch.startswith( "database" ):
762 monotone_branch_words = monotone_branch.split()
763 monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
764 except:
765 pass
766 return monotone_branch
767
768def base_get_metadata_monotone_revision(path, d):
769 monotone_revision = "<unknown>"
770 try:
771 monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
772 if monotone_revision.startswith( "format_version" ):
773 monotone_revision_words = monotone_revision.split()
774 monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
775 except IOError:
776 pass
777 return monotone_revision
778
779def base_get_metadata_svn_revision(path, d):
780 revision = "<unknown>"
781 try:
782 revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
783 except IOError:
784 pass
785 return revision
786
787def base_get_metadata_git_branch(path, d):
788 branch = os.popen('cd %s; git branch | grep "^* " | tr -d "* "' % path).read()
789
790 if len(branch) != 0:
791 return branch
792 return "<unknown>"
793
794def base_get_metadata_git_revision(path, d):
795 rev = os.popen("cd %s; git log -n 1 --pretty=oneline --" % path).read().split(" ")[0]
796 if len(rev) != 0:
797 return rev
798 return "<unknown>"
799 293
800GIT_CONFIG = "${STAGING_DIR_NATIVE}/usr/etc/gitconfig" 294GIT_CONFIG = "${STAGING_DIR_NATIVE}/usr/etc/gitconfig"
801 295
@@ -909,145 +403,7 @@ base_do_compile() {
909 fi 403 fi
910} 404}
911 405
912 406inherit staging
913sysroot_stage_dir() {
914 src="$1"
915 dest="$2"
916 # This will remove empty directories so we can ignore them
917 rmdir "$src" 2> /dev/null || true
918 if [ -d "$src" ]; then
919 mkdir -p "$dest"
920 cp -fpPR "$src"/* "$dest"
921 fi
922}
923
924sysroot_stage_libdir() {
925 src="$1"
926 dest="$2"
927
928 olddir=`pwd`
929 cd $src
930 las=$(find . -name \*.la -type f)
931 cd $olddir
932 echo "Found la files: $las"
933 for i in $las
934 do
935 sed -e 's/^installed=yes$/installed=no/' \
936 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
937 -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
938 -i $src/$i
939 done
940 sysroot_stage_dir $src $dest
941}
942
943sysroot_stage_dirs() {
944 from="$1"
945 to="$2"
946
947 sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
948 if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
949 sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
950 sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
951 sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
952 sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
953 sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
954 sysroot_stage_dir $from${sysconfdir} $to${STAGING_DIR_HOST}${sysconfdir}
955 fi
956 if [ -d $from${libdir} ]
957 then
958 sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
959 fi
960 if [ -d $from${base_libdir} ]
961 then
962 sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
963 fi
964 sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
965}
966
967sysroot_stage_all() {
968 sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
969}
970
971def is_legacy_staging(d):
972 stagefunc = bb.data.getVar('do_stage', d, True)
973 legacy = True
974 if stagefunc is None:
975 legacy = False
976 elif stagefunc.strip() == "use_do_install_for_stage":
977 legacy = False
978 elif stagefunc.strip() == "autotools_stage_all":
979 legacy = False
980 elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
981 legacy = False
982 elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
983 legacy = False
984 return legacy
985
986do_populate_sysroot[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
987 ${STAGING_DIR_TARGET}/${includedir} \
988 ${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
989 ${STAGING_INCDIR_NATIVE} \
990 ${STAGING_DATADIR} \
991 ${S} ${B}"
992
993# Could be compile but populate_sysroot and do_install shouldn't run at the same time
994addtask populate_sysroot after do_install
995
996PSTAGING_ACTIVE = "0"
997SYSROOT_PREPROCESS_FUNCS ?= ""
998SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
999SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
1000
1001python populate_sysroot_prehook () {
1002 return
1003}
1004
1005python populate_sysroot_posthook () {
1006 return
1007}
1008
1009packagedstaging_fastpath () {
1010 :
1011}
1012
1013python do_populate_sysroot () {
1014 #
1015 # if do_stage exists, we're legacy. In that case run the do_stage,
1016 # modify the SYSROOT_DESTDIR variable and then run the staging preprocess
1017 # functions against staging directly.
1018 #
1019 # Otherwise setup a destdir, copy the results from do_install
1020 # and run the staging preprocess against that
1021 #
1022 pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
1023 lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
1024 stagefunc = bb.data.getVar('do_stage', d, True)
1025 legacy = is_legacy_staging(d)
1026 if legacy:
1027 bb.data.setVar("SYSROOT_DESTDIR", "", d)
1028 bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
1029 lock = bb.utils.lockfile(lockfile)
1030 bb.build.exec_func('populate_sysroot_prehook', d)
1031 bb.build.exec_func('do_stage', d)
1032 for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
1033 bb.build.exec_func(f, d)
1034 bb.build.exec_func('populate_sysroot_posthook', d)
1035 bb.utils.unlockfile(lock)
1036 else:
1037 dest = bb.data.getVar('D', d, True)
1038 sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
1039 bb.mkdirhier(sysrootdest)
1040
1041 bb.build.exec_func("sysroot_stage_all", d)
1042 #os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
1043 for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
1044 bb.build.exec_func(f, d)
1045 bb.build.exec_func("packagedstaging_fastpath", d)
1046
1047 lock = bb.utils.lockfile(lockfile)
1048 os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
1049 bb.utils.unlockfile(lock)
1050}
1051 407
1052addtask install after do_compile 408addtask install after do_compile
1053do_install[dirs] = "${D} ${S} ${B}" 409do_install[dirs] = "${D} ${S} ${B}"
@@ -1066,19 +422,6 @@ addtask build after do_populate_sysroot
1066do_build = "" 422do_build = ""
1067do_build[func] = "1" 423do_build[func] = "1"
1068 424
1069# Make sure MACHINE isn't exported
1070# (breaks binutils at least)
1071MACHINE[unexport] = "1"
1072
1073# Make sure TARGET_ARCH isn't exported
1074# (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
1075# in them, undocumented)
1076TARGET_ARCH[unexport] = "1"
1077
1078# Make sure DISTRO isn't exported
1079# (breaks sysvinit at least)
1080DISTRO[unexport] = "1"
1081
1082 425
1083def base_after_parse(d): 426def base_after_parse(d):
1084 import exceptions 427 import exceptions
@@ -1176,7 +519,7 @@ def base_after_parse(d):
1176 for pkg in packages: 519 for pkg in packages:
1177 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1) 520 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
1178 521
1179 # We could look for != PACKAGE_ARCH here but how to choose 522 # We could look for != PACKAGE_ARCH here but how to choose
1180 # if multiple differences are present? 523 # if multiple differences are present?
1181 # Look through PACKAGE_ARCHS for the priority order? 524 # Look through PACKAGE_ARCHS for the priority order?
1182 if pkgarch and pkgarch == mach_arch: 525 if pkgarch and pkgarch == mach_arch:
@@ -1187,8 +530,6 @@ def base_after_parse(d):
1187 530
1188python () { 531python () {
1189 base_after_parse(d) 532 base_after_parse(d)
1190 if is_legacy_staging(d):
1191 bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
1192} 533}
1193 534
1194def check_app_exists(app, d): 535def check_app_exists(app, d):
@@ -1211,63 +552,7 @@ def check_gcc3(data):
1211# Patch handling 552# Patch handling
1212inherit patch 553inherit patch
1213 554
1214EXPORT_FUNCTIONS do_setscene do_clean do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_rebuild do_fetchall 555EXPORT_FUNCTIONS do_setscene do_fetch do_unpack do_configure do_compile do_install do_package
1215
1216MIRRORS[func] = "0"
1217MIRRORS () {
1218${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
1219${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
1220${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
1221${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
1222${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
1223${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
1224${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
1225${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
1226${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
1227${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
1228${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
1229${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
1230${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
1231${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
1232${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
1233${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
1234${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
1235${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
1236${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
1237${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
1238${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
1239${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
1240${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
1241${KERNELORG_MIRROR} http://www.kernel.org/pub
1242${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
1243${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
1244${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
1245${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
1246${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
1247ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
1248ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
1249ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
1250ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
1251ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
1252ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
1253ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
1254ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
1255ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
1256ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
1257ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
1258http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
1259http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
1260ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
1261ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
1262ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
1263ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
1264ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
1265ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
1266ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
1267ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
1268ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
1269ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
1270http://www.apache.org/dist http://archive.apache.org/dist
1271 556
1272} 557inherit mirrors
1273 558
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass
new file mode 100644
index 0000000000..4f66011b0d
--- /dev/null
+++ b/meta/classes/metadata_scm.bbclass
@@ -0,0 +1,77 @@
1METADATA_BRANCH ?= "${@base_detect_branch(d)}"
2METADATA_REVISION ?= "${@base_detect_revision(d)}"
3
4def base_detect_revision(d):
5 path = base_get_scmbasepath(d)
6
7 scms = [base_get_metadata_git_revision, \
8 base_get_metadata_svn_revision]
9
10 for scm in scms:
11 rev = scm(path, d)
12 if rev <> "<unknown>":
13 return rev
14
15 return "<unknown>"
16
17def base_detect_branch(d):
18 path = base_get_scmbasepath(d)
19
20 scms = [base_get_metadata_git_branch]
21
22 for scm in scms:
23 rev = scm(path, d)
24 if rev <> "<unknown>":
25 return rev.strip()
26
27 return "<unknown>"
28
29
30
31def base_get_scmbasepath(d):
32 path_to_bbfiles = bb.data.getVar( 'BBFILES', d, 1 ).split()
33 return path_to_bbfiles[0][:path_to_bbfiles[0].rindex( "packages" )]
34
35def base_get_metadata_monotone_branch(path, d):
36 monotone_branch = "<unknown>"
37 try:
38 monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
39 if monotone_branch.startswith( "database" ):
40 monotone_branch_words = monotone_branch.split()
41 monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
42 except:
43 pass
44 return monotone_branch
45
46def base_get_metadata_monotone_revision(path, d):
47 monotone_revision = "<unknown>"
48 try:
49 monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
50 if monotone_revision.startswith( "format_version" ):
51 monotone_revision_words = monotone_revision.split()
52 monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
53 except IOError:
54 pass
55 return monotone_revision
56
57def base_get_metadata_svn_revision(path, d):
58 revision = "<unknown>"
59 try:
60 revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
61 except IOError:
62 pass
63 return revision
64
65def base_get_metadata_git_branch(path, d):
66 branch = os.popen('cd %s; git branch | grep "^* " | tr -d "* "' % path).read()
67
68 if len(branch) != 0:
69 return branch
70 return "<unknown>"
71
72def base_get_metadata_git_revision(path, d):
73 rev = os.popen("cd %s; git log -n 1 --pretty=oneline --" % path).read().split(" ")[0]
74 if len(rev) != 0:
75 return rev
76 return "<unknown>"
77
diff --git a/meta/classes/mirrors.bbclass b/meta/classes/mirrors.bbclass
new file mode 100644
index 0000000000..9b430abcee
--- /dev/null
+++ b/meta/classes/mirrors.bbclass
@@ -0,0 +1,58 @@
1MIRRORS[func] = "0"
2MIRRORS () {
3${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
4${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
5${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
6${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
7${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
8${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
9${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
10${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
11${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
12${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
13${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
14${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
15${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
16${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
17${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
18${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
19${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
20${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
21${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
22${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
23${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
24${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
25${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
26${KERNELORG_MIRROR} http://www.kernel.org/pub
27${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
28${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
29${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
30${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
31${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
32ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
33ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
34ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
35ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
36ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
37ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
38ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
39ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
40ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
41ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
42ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
43http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
44http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
45ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
46ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
47ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
48ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
49ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
50ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
51ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
52ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
53ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
54ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
55http://www.apache.org/dist http://archive.apache.org/dist
56
57}
58
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass
new file mode 100644
index 0000000000..e03ed2a5ef
--- /dev/null
+++ b/meta/classes/staging.bbclass
@@ -0,0 +1,146 @@
1python populate_sysroot_prehook () {
2 return
3}
4
5python populate_sysroot_posthook () {
6 return
7}
8
9packagedstaging_fastpath () {
10 :
11}
12
13sysroot_stage_dir() {
14 src="$1"
15 dest="$2"
16 # This will remove empty directories so we can ignore them
17 rmdir "$src" 2> /dev/null || true
18 if [ -d "$src" ]; then
19 mkdir -p "$dest"
20 cp -fpPR "$src"/* "$dest"
21 fi
22}
23
24sysroot_stage_libdir() {
25 src="$1"
26 dest="$2"
27
28 olddir=`pwd`
29 cd $src
30 las=$(find . -name \*.la -type f)
31 cd $olddir
32 echo "Found la files: $las"
33 for i in $las
34 do
35 sed -e 's/^installed=yes$/installed=no/' \
36 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
37 -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
38 -i $src/$i
39 done
40 sysroot_stage_dir $src $dest
41}
42
43sysroot_stage_dirs() {
44 from="$1"
45 to="$2"
46
47 sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
48 if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
49 sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
50 sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
51 sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
52 sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
53 sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
54 sysroot_stage_dir $from${sysconfdir} $to${STAGING_DIR_HOST}${sysconfdir}
55 fi
56 if [ -d $from${libdir} ]
57 then
58 sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
59 fi
60 if [ -d $from${base_libdir} ]
61 then
62 sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
63 fi
64 sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
65}
66
67sysroot_stage_all() {
68 sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
69}
70
71def is_legacy_staging(d):
72 stagefunc = bb.data.getVar('do_stage', d, True)
73 legacy = True
74 if stagefunc is None:
75 legacy = False
76 elif stagefunc.strip() == "use_do_install_for_stage":
77 legacy = False
78 elif stagefunc.strip() == "autotools_stage_all":
79 legacy = False
80 elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
81 legacy = False
82 elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
83 legacy = False
84 return legacy
85
86do_populate_sysroot[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
87 ${STAGING_DIR_TARGET}/${includedir} \
88 ${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
89 ${STAGING_INCDIR_NATIVE} \
90 ${STAGING_DATADIR} \
91 ${S} ${B}"
92
93# Could be compile but populate_sysroot and do_install shouldn't run at the same time
94addtask populate_sysroot after do_install
95
96PSTAGING_ACTIVE = "0"
97SYSROOT_PREPROCESS_FUNCS ?= ""
98SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
99SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
100
101
102python do_populate_sysroot () {
103 #
104 # if do_stage exists, we're legacy. In that case run the do_stage,
105 # modify the SYSROOT_DESTDIR variable and then run the staging preprocess
106 # functions against staging directly.
107 #
108 # Otherwise setup a destdir, copy the results from do_install
109 # and run the staging preprocess against that
110 #
111 pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
112 lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
113 stagefunc = bb.data.getVar('do_stage', d, True)
114 legacy = is_legacy_staging(d)
115 if legacy:
116 bb.data.setVar("SYSROOT_DESTDIR", "", d)
117 bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
118 lock = bb.utils.lockfile(lockfile)
119 bb.build.exec_func('populate_sysroot_prehook', d)
120 bb.build.exec_func('do_stage', d)
121 for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
122 bb.build.exec_func(f, d)
123 bb.build.exec_func('populate_sysroot_posthook', d)
124 bb.utils.unlockfile(lock)
125 else:
126 dest = bb.data.getVar('D', d, True)
127 sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
128 bb.mkdirhier(sysrootdest)
129
130 bb.build.exec_func("sysroot_stage_all", d)
131 #os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
132 for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
133 bb.build.exec_func(f, d)
134 bb.build.exec_func("packagedstaging_fastpath", d)
135
136 lock = bb.utils.lockfile(lockfile)
137 os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
138 bb.utils.unlockfile(lock)
139}
140
141python () {
142 if is_legacy_staging(d):
143 bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
144}
145
146
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass
new file mode 100644
index 0000000000..d4d47bfa42
--- /dev/null
+++ b/meta/classes/utility-tasks.bbclass
@@ -0,0 +1,97 @@
1addtask listtasks
2do_listtasks[nostamp] = "1"
3python do_listtasks() {
4 import sys
5 # emit variables and shell functions
6 #bb.data.emit_env(sys.__stdout__, d)
7 # emit the metadata which isnt valid shell
8 for e in d.keys():
9 if bb.data.getVarFlag(e, 'task', d):
10 sys.__stdout__.write("%s\n" % e)
11}
12
13addtask clean
14do_clean[dirs] = "${TOPDIR}"
15do_clean[nostamp] = "1"
16python base_do_clean() {
17 """clear the build and temp directories"""
18 dir = bb.data.expand("${WORKDIR}", d)
19 if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
20 bb.note("removing " + dir)
21 os.system('rm -rf ' + dir)
22
23 dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
24 bb.note("removing " + dir)
25 os.system('rm -f '+ dir)
26}
27
28addtask rebuild after do_${BB_DEFAULT_TASK}
29do_rebuild[dirs] = "${TOPDIR}"
30do_rebuild[nostamp] = "1"
31python base_do_rebuild() {
32 """rebuild a package"""
33}
34
35#addtask mrproper
36#do_mrproper[dirs] = "${TOPDIR}"
37#do_mrproper[nostamp] = "1"
38#python base_do_mrproper() {
39# """clear downloaded sources, build and temp directories"""
40# dir = bb.data.expand("${DL_DIR}", d)
41# if dir == '/': bb.build.FuncFailed("wrong DATADIR")
42# bb.debug(2, "removing " + dir)
43# os.system('rm -rf ' + dir)
44# bb.build.exec_func('do_clean', d)
45#}
46
47addtask checkuri
48do_checkuri[nostamp] = "1"
49python do_checkuri() {
50 import sys
51
52 localdata = bb.data.createCopy(d)
53 bb.data.update_data(localdata)
54
55 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
56
57 try:
58 bb.fetch.init(src_uri.split(),d)
59 except bb.fetch.NoMethodError:
60 (type, value, traceback) = sys.exc_info()
61 raise bb.build.FuncFailed("No method: %s" % value)
62
63 try:
64 bb.fetch.checkstatus(localdata)
65 except bb.fetch.MissingParameterError:
66 (type, value, traceback) = sys.exc_info()
67 raise bb.build.FuncFailed("Missing parameters: %s" % value)
68 except bb.fetch.FetchError:
69 (type, value, traceback) = sys.exc_info()
70 raise bb.build.FuncFailed("Fetch failed: %s" % value)
71 except bb.fetch.MD5SumError:
72 (type, value, traceback) = sys.exc_info()
73 raise bb.build.FuncFailed("MD5 failed: %s" % value)
74 except:
75 (type, value, traceback) = sys.exc_info()
76 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
77}
78
79addtask checkuriall after do_checkuri
80do_checkuriall[recrdeptask] = "do_checkuri"
81do_checkuriall[nostamp] = "1"
82base_do_checkuriall() {
83 :
84}
85
86addtask fetchall after do_fetch
87do_fetchall[recrdeptask] = "do_fetch"
88base_do_fetchall() {
89 :
90}
91
92addtask buildall after do_build
93do_buildall[recrdeptask] = "do_build"
94base_do_buildall() {
95 :
96}
97
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass
new file mode 100644
index 0000000000..c2d323235b
--- /dev/null
+++ b/meta/classes/utils.bbclass
@@ -0,0 +1,340 @@
1# like os.path.join but doesn't treat absolute RHS specially
2def base_path_join(a, *p):
3 path = a
4 for b in p:
5 if path == '' or path.endswith('/'):
6 path += b
7 else:
8 path += '/' + b
9 return path
10
11# for MD5/SHA handling
12def base_chk_load_parser(config_path):
13 import ConfigParser
14 parser = ConfigParser.ConfigParser()
15 if not len(parser.read(config_path)) == 1:
16 bb.note("Can not open the '%s' ini file" % config_path)
17 raise Exception("Can not open the '%s'" % config_path)
18
19 return parser
20
21def base_chk_file(parser, pn, pv, src_uri, localpath, data):
22 no_checksum = False
23 # Try PN-PV-SRC_URI first and then try PN-SRC_URI
24 # we rely on the get method to create errors
25 pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
26 pn_src = "%s-%s" % (pn,src_uri)
27 if parser.has_section(pn_pv_src):
28 md5 = parser.get(pn_pv_src, "md5")
29 sha256 = parser.get(pn_pv_src, "sha256")
30 elif parser.has_section(pn_src):
31 md5 = parser.get(pn_src, "md5")
32 sha256 = parser.get(pn_src, "sha256")
33 elif parser.has_section(src_uri):
34 md5 = parser.get(src_uri, "md5")
35 sha256 = parser.get(src_uri, "sha256")
36 else:
37 no_checksum = True
38
39 # md5 and sha256 should be valid now
40 if not os.path.exists(localpath):
41 bb.note("The localpath does not exist '%s'" % localpath)
42 raise Exception("The path does not exist '%s'" % localpath)
43
44
45 # Calculate the MD5 and 256-bit SHA checksums
46 md5data = bb.utils.md5_file(localpath)
47 shadata = bb.utils.sha256_file(localpath)
48
49 # sha256_file() can return None if we are running on Python 2.4 (hashlib is
50 # 2.5 onwards, sha in 2.4 is 160-bit only), so check for this and call the
51 # standalone shasum binary if required.
52 if shadata is None:
53 try:
54 shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
55 shadata = (shapipe.readline().split() or [ "" ])[0]
56 shapipe.close()
57 except OSError:
58 raise Exception("Executing shasum failed, please build shasum-native")
59
60 if no_checksum == True: # we do not have conf/checksums.ini entry
61 try:
62 file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
63 except:
64 return False
65
66 if not file:
67 raise Exception("Creating checksums.ini failed")
68
69 file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
70 file.close()
71 return False
72
73 if not md5 == md5data:
74 bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
75 raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
76
77 if not sha256 == shadata:
78 bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
79 raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
80
81 return True
82
83def base_read_file(filename):
84 try:
85 f = file( filename, "r" )
86 except IOError, reason:
87 return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
88 else:
89 return f.read().strip()
90 return None
91
92def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
93 if bb.data.getVar(variable,d,1) == checkvalue:
94 return truevalue
95 else:
96 return falsevalue
97
98def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
99 if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
100 return truevalue
101 else:
102 return falsevalue
103
104def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
105 result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
106 if result <= 0:
107 return truevalue
108 else:
109 return falsevalue
110
111def base_contains(variable, checkvalues, truevalue, falsevalue, d):
112 matches = 0
113 if type(checkvalues).__name__ == "str":
114 checkvalues = [checkvalues]
115 for value in checkvalues:
116 if bb.data.getVar(variable,d,1).find(value) != -1:
117 matches = matches + 1
118 if matches == len(checkvalues):
119 return truevalue
120 return falsevalue
121
122def base_both_contain(variable1, variable2, checkvalue, d):
123 if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
124 return checkvalue
125 else:
126 return ""
127
128def base_prune_suffix(var, suffixes, d):
129 # See if var ends with any of the suffixes listed and
130 # remove it if found
131 for suffix in suffixes:
132 if var.endswith(suffix):
133 return var.replace(suffix, "")
134 return var
135
136def oe_filter(f, str, d):
137 from re import match
138 return " ".join(filter(lambda x: match(f, x, 0), str.split()))
139
140def oe_filter_out(f, str, d):
141 from re import match
142 return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
143
144oe_soinstall() {
145 # Purpose: Install shared library file and
146 # create the necessary links
147 # Example:
148 #
149 # oe_
150 #
151 #oenote installing shared library $1 to $2
152 #
153 libname=`basename $1`
154 install -m 755 $1 $2/$libname
155 sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
156 solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
157 ln -sf $libname $2/$sonamelink
158 ln -sf $libname $2/$solink
159}
160
161oe_libinstall() {
162 # Purpose: Install a library, in all its forms
163 # Example
164 #
165 # oe_libinstall libltdl ${STAGING_LIBDIR}/
166 # oe_libinstall -C src/libblah libblah ${D}/${libdir}/
167 dir=""
168 libtool=""
169 silent=""
170 require_static=""
171 require_shared=""
172 staging_install=""
173 while [ "$#" -gt 0 ]; do
174 case "$1" in
175 -C)
176 shift
177 dir="$1"
178 ;;
179 -s)
180 silent=1
181 ;;
182 -a)
183 require_static=1
184 ;;
185 -so)
186 require_shared=1
187 ;;
188 -*)
189 oefatal "oe_libinstall: unknown option: $1"
190 ;;
191 *)
192 break;
193 ;;
194 esac
195 shift
196 done
197
198 libname="$1"
199 shift
200 destpath="$1"
201 if [ -z "$destpath" ]; then
202 oefatal "oe_libinstall: no destination path specified"
203 fi
204 if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
205 then
206 staging_install=1
207 fi
208
209 __runcmd () {
210 if [ -z "$silent" ]; then
211 echo >&2 "oe_libinstall: $*"
212 fi
213 $*
214 }
215
216 if [ -z "$dir" ]; then
217 dir=`pwd`
218 fi
219
220 dotlai=$libname.lai
221
222 # Sanity check that the libname.lai is unique
223 number_of_files=`(cd $dir; find . -name "$dotlai") | wc -l`
224 if [ $number_of_files -gt 1 ]; then
225 oefatal "oe_libinstall: $dotlai is not unique in $dir"
226 fi
227
228
229 dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
230 olddir=`pwd`
231 __runcmd cd $dir
232
233 lafile=$libname.la
234
235 # If such file doesn't exist, try to cut version suffix
236 if [ ! -f "$lafile" ]; then
237 libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
238 lafile1=$libname.la
239 if [ -f "$lafile1" ]; then
240 libname=$libname1
241 lafile=$lafile1
242 fi
243 fi
244
245 if [ -f "$lafile" ]; then
246 # libtool archive
247 eval `cat $lafile|grep "^library_names="`
248 libtool=1
249 else
250 library_names="$libname.so* $libname.dll.a"
251 fi
252
253 __runcmd install -d $destpath/
254 dota=$libname.a
255 if [ -f "$dota" -o -n "$require_static" ]; then
256 rm -f $destpath/$dota
257 __runcmd install -m 0644 $dota $destpath/
258 fi
259 if [ -f "$dotlai" -a -n "$libtool" ]; then
260 if test -n "$staging_install"
261 then
262 # stop libtool using the final directory name for libraries
263 # in staging:
264 __runcmd rm -f $destpath/$libname.la
265 __runcmd sed -e 's/^installed=yes$/installed=no/' \
266 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
267 -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
268 $dotlai >$destpath/$libname.la
269 else
270 rm -f $destpath/$libname.la
271 __runcmd install -m 0644 $dotlai $destpath/$libname.la
272 fi
273 fi
274
275 for name in $library_names; do
276 files=`eval echo $name`
277 for f in $files; do
278 if [ ! -e "$f" ]; then
279 if [ -n "$libtool" ]; then
280 oefatal "oe_libinstall: $dir/$f not found."
281 fi
282 elif [ -L "$f" ]; then
283 __runcmd cp -P "$f" $destpath/
284 elif [ ! -L "$f" ]; then
285 libfile="$f"
286 rm -f $destpath/$libfile
287 __runcmd install -m 0755 $libfile $destpath/
288 fi
289 done
290 done
291
292 if [ -z "$libfile" ]; then
293 if [ -n "$require_shared" ]; then
294 oefatal "oe_libinstall: unable to locate shared library"
295 fi
296 elif [ -z "$libtool" ]; then
297 # special case hack for non-libtool .so.#.#.# links
298 baselibfile=`basename "$libfile"`
299 if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
300 sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
301 solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
302 if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
303 __runcmd ln -sf $baselibfile $destpath/$sonamelink
304 fi
305 __runcmd ln -sf $baselibfile $destpath/$solink
306 fi
307 fi
308
309 __runcmd cd "$olddir"
310}
311
312oe_machinstall() {
313 # Purpose: Install machine dependent files, if available
314 # If not available, check if there is a default
315 # If no default, just touch the destination
316 # Example:
317 # $1 $2 $3 $4
318 # oe_machinstall -m 0644 fstab ${D}/etc/fstab
319 #
320 # TODO: Check argument number?
321 #
322 filename=`basename $3`
323 dirname=`dirname $3`
324
325 for o in `echo ${OVERRIDES} | tr ':' ' '`; do
326 if [ -e $dirname/$o/$filename ]; then
327 oenote $dirname/$o/$filename present, installing to $4
328 install $1 $2 $dirname/$o/$filename $4
329 return
330 fi
331 done
332# oenote overrides specific file NOT present, trying default=$3...
333 if [ -e $3 ]; then
334 oenote $3 present, installing to $4
335 install $1 $2 $3 $4
336 else
337 oenote $3 NOT present, touching empty $4
338 touch $4
339 fi
340}
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf
index 1341349c4a..1745688fa8 100644
--- a/meta/conf/bitbake.conf
+++ b/meta/conf/bitbake.conf
@@ -676,3 +676,18 @@ COMBINED_FEATURES = "\
676 ${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "usbgadget", d)} \ 676 ${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "usbgadget", d)} \
677 ${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "usbhost", d)} \ 677 ${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "usbhost", d)} \
678 ${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "wifi", d)}" 678 ${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "wifi", d)}"
679
680
681# Make sure MACHINE isn't exported
682# (breaks binutils at least)
683MACHINE[unexport] = "1"
684
685# Make sure TARGET_ARCH isn't exported
686# (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
687# in them, undocumented)
688TARGET_ARCH[unexport] = "1"
689
690# Make sure DISTRO isn't exported
691# (breaks sysvinit at least)
692DISTRO[unexport] = "1"
693