summaryrefslogtreecommitdiffstats
path: root/meta
diff options
context:
space:
mode:
Diffstat (limited to 'meta')
-rw-r--r--meta/classes/base.bbclass20
-rw-r--r--meta/classes/cpan.bbclass3
-rw-r--r--meta/classes/efl.bbclass2
-rw-r--r--meta/classes/icecc.bbclass120
-rw-r--r--meta/classes/kernel-arch.bbclass1
-rw-r--r--meta/classes/kernel.bbclass21
-rw-r--r--meta/classes/module.bbclass2
-rw-r--r--meta/classes/opie.bbclass2
-rw-r--r--meta/classes/pkgconfig.bbclass2
-rw-r--r--meta/classes/sanity.bbclass9
-rw-r--r--meta/classes/sdl.bbclass2
-rw-r--r--meta/classes/tinderclient.bbclass48
12 files changed, 155 insertions, 77 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index 0081a52d53..546992e71f 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -64,7 +64,7 @@ def base_set_filespath(path, d):
64 overrides = overrides + ":" 64 overrides = overrides + ":"
65 for o in overrides.split(":"): 65 for o in overrides.split(":"):
66 filespath.append(os.path.join(p, o)) 66 filespath.append(os.path.join(p, o))
67 bb.data.setVar("FILESPATH", ":".join(filespath), d) 67 return ":".join(filespath)
68 68
69FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" 69FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
70 70
@@ -187,7 +187,7 @@ oe_libinstall() {
187 dir=`pwd` 187 dir=`pwd`
188 fi 188 fi
189 dotlai=$libname.lai 189 dotlai=$libname.lai
190 dir=$dir`(cd $dir; find -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"` 190 dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
191 olddir=`pwd` 191 olddir=`pwd`
192 __runcmd cd $dir 192 __runcmd cd $dir
193 193
@@ -413,10 +413,10 @@ def oe_unpack_file(file, data, url = None):
413 destdir = "." 413 destdir = "."
414 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir)) 414 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
415 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir) 415 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
416
416 if not cmd: 417 if not cmd:
417 return True 418 return True
418 419
419
420 dest = os.path.join(os.getcwd(), os.path.basename(file)) 420 dest = os.path.join(os.getcwd(), os.path.basename(file))
421 if os.path.exists(dest): 421 if os.path.exists(dest):
422 if os.path.samefile(file, dest): 422 if os.path.samefile(file, dest):
@@ -478,7 +478,8 @@ python base_eventhandler() {
478 msg += messages.get(name[5:]) or name[5:] 478 msg += messages.get(name[5:]) or name[5:]
479 elif name == "UnsatisfiedDep": 479 elif name == "UnsatisfiedDep":
480 msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower()) 480 msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
481 note(msg) 481 if msg:
482 note(msg)
482 483
483 if name.startswith("BuildStarted"): 484 if name.startswith("BuildStarted"):
484 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data ) 485 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
@@ -486,7 +487,7 @@ python base_eventhandler() {
486 path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )] 487 path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )]
487 monotone_revision = "<unknown>" 488 monotone_revision = "<unknown>"
488 try: 489 try:
489 monotone_revision = file( "%s/MT/revision" % path_to_packages ).read().strip() 490 monotone_revision = file( "%s/_MTN/revision" % path_to_packages ).read().strip()
490 except IOError: 491 except IOError:
491 pass 492 pass
492 bb.data.setVar( 'OE_REVISION', monotone_revision, e.data ) 493 bb.data.setVar( 'OE_REVISION', monotone_revision, e.data )
@@ -519,6 +520,7 @@ python base_eventhandler() {
519addtask configure after do_unpack do_patch 520addtask configure after do_unpack do_patch
520do_configure[dirs] = "${S} ${B}" 521do_configure[dirs] = "${S} ${B}"
521do_configure[bbdepcmd] = "do_populate_staging" 522do_configure[bbdepcmd] = "do_populate_staging"
523do_configure[deptask] = "do_populate_staging"
522base_do_configure() { 524base_do_configure() {
523 : 525 :
524} 526}
@@ -645,7 +647,7 @@ python __anonymous () {
645 if need_machine: 647 if need_machine:
646 import re 648 import re
647 this_machine = bb.data.getVar('MACHINE', d, 1) 649 this_machine = bb.data.getVar('MACHINE', d, 1)
648 if not re.match(need_machine, this_machine): 650 if this_machine and not re.match(need_machine, this_machine):
649 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine) 651 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
650 652
651 pn = bb.data.getVar('PN', d, 1) 653 pn = bb.data.getVar('PN', d, 1)
@@ -719,12 +721,18 @@ ftp://ftp.kernel.org/pub ftp://ftp.jp.kernel.org/pub
719ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/ 721ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
720ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/ 722ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
721ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/ 723ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
724ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
725ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
726ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
722ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/ 727ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
723ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/ 728ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
724ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/ 729ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
725ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/ 730ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
726ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/ 731ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
727 732
733
734
728ftp://.*/.*/ http://www.oesources.org/source/current/ 735ftp://.*/.*/ http://www.oesources.org/source/current/
729http://.*/.*/ http://www.oesources.org/source/current/ 736http://.*/.*/ http://www.oesources.org/source/current/
730} 737}
738
diff --git a/meta/classes/cpan.bbclass b/meta/classes/cpan.bbclass
index 853abfd1b3..78b902f85d 100644
--- a/meta/classes/cpan.bbclass
+++ b/meta/classes/cpan.bbclass
@@ -1,7 +1,8 @@
1FILES_${PN} += '${libdir}/perl5' 1FILES_${PN} += '${libdir}/perl5'
2EXTRA_CPANFLAGS = ""
2 3
3cpan_do_configure () { 4cpan_do_configure () {
4 perl Makefile.PL 5 perl Makefile.PL ${EXTRA_CPANFLAGS}
5 if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then 6 if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
6 . ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh 7 . ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh
7 sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new 8 sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new
diff --git a/meta/classes/efl.bbclass b/meta/classes/efl.bbclass
index 9c490284c2..c258758d30 100644
--- a/meta/classes/efl.bbclass
+++ b/meta/classes/efl.bbclass
@@ -44,6 +44,6 @@ do_stage_append () {
44} 44}
45 45
46PACKAGES = "${PN} ${PN}-themes ${PN}-dev ${PN}-examples" 46PACKAGES = "${PN} ${PN}-themes ${PN}-dev ${PN}-examples"
47FILES_${PN}-dev = "${bindir}/${PN}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${libdir}/lib*.a" 47FILES_${PN}-dev = "${bindir}/${PN}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${libdir}/lib*.a ${includedir}"
48FILES_${PN}-examples = "${bindir} ${datadir}" 48FILES_${PN}-examples = "${bindir} ${datadir}"
49 49
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
index 7dfcfc29a4..66a5bf79e3 100644
--- a/meta/classes/icecc.bbclass
+++ b/meta/classes/icecc.bbclass
@@ -1,9 +1,17 @@
1# IceCream distributed compiling support 1# IceCream distributed compiling support
2# 2#
3# We need to create a tar.bz2 of our toolchain and set 3# We need to create a tar.bz2 of our toolchain and set
4# ICECC_VERSION, ICECC_CXX and ICEC_CC 4# ICECC_VERSION, ICECC_CXX and ICEC_CC
5# 5#
6 6
7def icc_determine_gcc_version(gcc):
8 """
9 Hack to determine the version of GCC
10
11 'i686-apple-darwin8-gcc-4.0.1 (GCC) 4.0.1 (Apple Computer, Inc. build 5363)'
12 """
13 return os.popen("%s --version" % gcc ).readline()[2]
14
7def create_env(bb,d): 15def create_env(bb,d):
8 """ 16 """
9 Create a tar.bz of the current toolchain 17 Create a tar.bz of the current toolchain
@@ -13,7 +21,7 @@ def create_env(bb,d):
13 # host prefix is empty (let us duplicate the query for ease) 21 # host prefix is empty (let us duplicate the query for ease)
14 prefix = bb.data.expand('${HOST_PREFIX}', d) 22 prefix = bb.data.expand('${HOST_PREFIX}', d)
15 if len(prefix) == 0: 23 if len(prefix) == 0:
16 return "" 24 return ""
17 25
18 import tarfile 26 import tarfile
19 import socket 27 import socket
@@ -23,51 +31,66 @@ def create_env(bb,d):
23 prefix = bb.data.expand('${HOST_PREFIX}' , d) 31 prefix = bb.data.expand('${HOST_PREFIX}' , d)
24 distro = bb.data.expand('${DISTRO}', d) 32 distro = bb.data.expand('${DISTRO}', d)
25 target_sys = bb.data.expand('${TARGET_SYS}', d) 33 target_sys = bb.data.expand('${TARGET_SYS}', d)
26 #float = bb.data.getVar('${TARGET_FPU}', d) 34 float = bb.data.getVar('${TARGET_FPU}', d) or "hard"
27 float = "anyfloat"
28 name = socket.gethostname() 35 name = socket.gethostname()
29 36
37 # Stupid check to determine if we have built a libc and a cross
38 # compiler.
30 try: 39 try:
31 os.stat(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2') 40 os.stat(os.path.join(ice_dir, target_sys, 'lib', 'ld-linux.so.2'))
32 os.stat(ice_dir + '/' + target_sys + '/bin/g++') 41 os.stat(os.path.join(ice_dir, target_sys, 'bin', 'g++'))
33 except: 42 except:
34 return "" 43 return ""
35 44
36 VERSION = '3.4.3' 45 VERSION = icc_determine_gcc_version( os.path.join(ice_dir,target_sys,"bin","g++") )
37 cross_name = prefix + distro + target_sys + float +VERSION+ name 46 cross_name = prefix + distro + target_sys + float +VERSION+ name
38 tar_file = ice_dir + '/ice/' + cross_name + '.tar.bz2' 47 tar_file = os.path.join(ice_dir, 'ice', cross_name + '.tar.bz2')
39 48
40 try: 49 try:
41 os.stat(tar_file) 50 os.stat(tar_file)
42 return tar_file 51 return tar_file
43 except: 52 except:
44 try: 53 try:
45 os.makedirs(ice_dir+'/ice') 54 os.makedirs(os.path.join(ice_dir,'ice'))
46 except: 55 except:
47 pass 56 pass
48 57
49 # FIXME find out the version of the compiler 58 # FIXME find out the version of the compiler
59 # Consider using -print-prog-name={cc1,cc1plus}
60 # and -print-file-name=specs
61
62 # We will use the GCC to tell us which tools to use
63 # What we need is:
64 # -gcc
65 # -g++
66 # -as
67 # -cc1
68 # -cc1plus
69 # and we add them to /usr/bin
70
50 tar = tarfile.open(tar_file, 'w:bz2') 71 tar = tarfile.open(tar_file, 'w:bz2')
51 tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2', 72
52 target_sys + 'cross/lib/ld-linux.so.2') 73 # Now add the required files
53 tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2', 74 tar.add(os.path.join(ice_dir,target_sys,'bin','gcc'),
54 target_sys + 'cross/lib/ld-2.3.3.so') 75 os.path.join("usr","bin","gcc") )
55 tar.add(ice_dir + '/' + target_sys + '/lib/libc-2.3.3.so', 76 tar.add(os.path.join(ice_dir,target_sys,'bin','g++'),
56 target_sys + 'cross/lib/libc-2.3.3.so') 77 os.path.join("usr","bin","g++") )
57 tar.add(ice_dir + '/' + target_sys + '/lib/libc.so.6', 78 tar.add(os.path.join(ice_dir,target_sys,'bin','as'),
58 target_sys + 'cross/lib/libc.so.6') 79 os.path.join("usr","bin","as") )
59 tar.add(ice_dir + '/' + target_sys + '/bin/gcc', 80
60 target_sys + 'cross/usr/bin/gcc') 81 # Now let us find cc1 and cc1plus
61 tar.add(ice_dir + '/' + target_sys + '/bin/g++', 82 cc1 = os.popen("%s -print-prog-name=cc1" % data.getVar('CC', d, True)).read()[:-1]
62 target_sys + 'cross/usr/bin/g++') 83 cc1plus = os.popen("%s -print-prog-name=cc1plus" % data.getVar('CC', d, True)).read()[:-1]
63 tar.add(ice_dir + '/' + target_sys + '/bin/as', 84 spec = os.popen("%s -print-file-name=specs" % data.getVar('CC', d, True)).read()[:-1]
64 target_sys + 'cross/usr/bin/as') 85
65 tar.add(ice_dir + '/lib/gcc/' + target_sys +'/'+ VERSION + '/specs', 86 # CC1 and CC1PLUS should be there...
66 target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/specs') 87 tar.add(cc1, os.path.join('usr', 'bin', 'cc1'))
67 tar.add(ice_dir + '/libexec/gcc/'+target_sys+'/' + VERSION + '/cc1', 88 tar.add(cc1plus, os.path.join('usr', 'bin', 'cc1plus'))
68 target_sys + 'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1') 89
69 tar.add(ice_dir + '/libexec/gcc/arm-linux/' + VERSION + '/cc1plus', 90 # spec - if it exists
70 target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1plus') 91 if os.path.exists(spec):
92 tar.add(spec)
93
71 tar.close() 94 tar.close()
72 return tar_file 95 return tar_file
73 96
@@ -78,7 +101,7 @@ def create_path(compilers, type, bb, d):
78 """ 101 """
79 import os 102 import os
80 103
81 staging = bb.data.expand('${STAGING_DIR}', d) + "/ice/" + type 104 staging = os.path.join(bb.data.expand('${STAGING_DIR}', d), "ice", type)
82 icecc = bb.data.getVar('ICECC_PATH', d) 105 icecc = bb.data.getVar('ICECC_PATH', d)
83 106
84 # Create the dir if necessary 107 # Create the dir if necessary
@@ -89,7 +112,7 @@ def create_path(compilers, type, bb, d):
89 112
90 113
91 for compiler in compilers: 114 for compiler in compilers:
92 gcc_path = staging + "/" + compiler 115 gcc_path = os.path.join(staging, compiler)
93 try: 116 try:
94 os.stat(gcc_path) 117 os.stat(gcc_path)
95 except: 118 except:
@@ -102,15 +125,14 @@ def use_icc_version(bb,d):
102 # Constin native native 125 # Constin native native
103 prefix = bb.data.expand('${HOST_PREFIX}', d) 126 prefix = bb.data.expand('${HOST_PREFIX}', d)
104 if len(prefix) == 0: 127 if len(prefix) == 0:
105 return "no" 128 return "no"
106 129
107 130
108 native = bb.data.expand('${PN}', d) 131 blacklist = [ "cross", "native" ]
109 blacklist = [ "-cross", "-native" ]
110 132
111 for black in blacklist: 133 for black in blacklist:
112 if black in native: 134 if bb.data.inherits_class(black, d):
113 return "no" 135 return "no"
114 136
115 return "yes" 137 return "yes"
116 138
@@ -118,13 +140,13 @@ def icc_path(bb,d,compile):
118 native = bb.data.expand('${PN}', d) 140 native = bb.data.expand('${PN}', d)
119 blacklist = [ "ulibc", "glibc", "ncurses" ] 141 blacklist = [ "ulibc", "glibc", "ncurses" ]
120 for black in blacklist: 142 for black in blacklist:
121 if black in native: 143 if black in native:
122 return "" 144 return ""
123 145
124 if "-native" in native: 146 blacklist = [ "cross", "native" ]
125 compile = False 147 for black in blacklist:
126 if "-cross" in native: 148 if bb.data.inherits_class(black, d):
127 compile = False 149 compile = False
128 150
129 prefix = bb.data.expand('${HOST_PREFIX}', d) 151 prefix = bb.data.expand('${HOST_PREFIX}', d)
130 if compile and len(prefix) != 0: 152 if compile and len(prefix) != 0:
@@ -151,6 +173,6 @@ do_compile_prepend() {
151 export ICECC_CXX="${HOST_PREFIX}g++" 173 export ICECC_CXX="${HOST_PREFIX}g++"
152 174
153 if [ "${@use_icc_version(bb,d)}" = "yes" ]; then 175 if [ "${@use_icc_version(bb,d)}" = "yes" ]; then
154 export ICECC_VERSION="${@icc_version(bb,d)}" 176 export ICECC_VERSION="${@icc_version(bb,d)}"
155 fi 177 fi
156} 178}
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass
index 92a6c982fb..b331d25614 100644
--- a/meta/classes/kernel-arch.bbclass
+++ b/meta/classes/kernel-arch.bbclass
@@ -19,6 +19,7 @@ def map_kernel_arch(a, d):
19 elif re.match('armeb$', a): return 'arm' 19 elif re.match('armeb$', a): return 'arm'
20 elif re.match('powerpc$', a): return 'ppc' 20 elif re.match('powerpc$', a): return 'ppc'
21 elif re.match('mipsel$', a): return 'mips' 21 elif re.match('mipsel$', a): return 'mips'
22 elif re.match('sh(3|4)$', a): return 'sh'
22 elif a in valid_archs: return a 23 elif a in valid_archs: return a
23 else: 24 else:
24 bb.error("cannot map '%s' to a linux kernel architecture" % a) 25 bb.error("cannot map '%s' to a linux kernel architecture" % a)
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index ad51c4e035..c81112ede7 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -109,6 +109,21 @@ kernel_do_stage() {
109 mkdir -p ${STAGING_KERNEL_DIR}/include/pcmcia 109 mkdir -p ${STAGING_KERNEL_DIR}/include/pcmcia
110 cp -fR include/pcmcia/* ${STAGING_KERNEL_DIR}/include/pcmcia/ 110 cp -fR include/pcmcia/* ${STAGING_KERNEL_DIR}/include/pcmcia/
111 111
112 if [ -d drivers/crypto ]; then
113 mkdir -p ${STAGING_KERNEL_DIR}/drivers/crypto
114 cp -fR drivers/crypto/* ${STAGING_KERNEL_DIR}/drivers/crypto/
115 fi
116
117 if [ -d include/media ]; then
118 mkdir -p ${STAGING_KERNEL_DIR}/include/media
119 cp -fR include/media/* ${STAGING_KERNEL_DIR}/include/media/
120 fi
121
122 if [ -d include/acpi ]; then
123 mkdir -p ${STAGING_KERNEL_DIR}/include/acpi
124 cp -fR include/acpi/* ${STAGING_KERNEL_DIR}/include/acpi/
125 fi
126
112 if [ -d include/sound ]; then 127 if [ -d include/sound ]; then
113 mkdir -p ${STAGING_KERNEL_DIR}/include/sound 128 mkdir -p ${STAGING_KERNEL_DIR}/include/sound
114 cp -fR include/sound/* ${STAGING_KERNEL_DIR}/include/sound/ 129 cp -fR include/sound/* ${STAGING_KERNEL_DIR}/include/sound/
@@ -133,7 +148,7 @@ kernel_do_stage() {
133 # Check if arch/${ARCH}/Makefile exists and install it 148 # Check if arch/${ARCH}/Makefile exists and install it
134 if [ -e arch/${ARCH}/Makefile ]; then 149 if [ -e arch/${ARCH}/Makefile ]; then
135 install -d ${STAGING_KERNEL_DIR}/arch/${ARCH} 150 install -d ${STAGING_KERNEL_DIR}/arch/${ARCH}
136 install -m 0644 arch/${ARCH}/Makefile ${STAGING_KERNEL_DIR}/arch/${ARCH} 151 install -m 0644 arch/${ARCH}/Makefile* ${STAGING_KERNEL_DIR}/arch/${ARCH}
137 fi 152 fi
138 cp -fR include/config* ${STAGING_KERNEL_DIR}/include/ 153 cp -fR include/config* ${STAGING_KERNEL_DIR}/include/
139 install -m 0644 ${KERNEL_OUTPUT} ${STAGING_KERNEL_DIR}/${KERNEL_IMAGETYPE} 154 install -m 0644 ${KERNEL_OUTPUT} ${STAGING_KERNEL_DIR}/${KERNEL_IMAGETYPE}
@@ -199,7 +214,7 @@ fi
199if [ -n "$D" ]; then 214if [ -n "$D" ]; then
200 ${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION} 215 ${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION}
201else 216else
202 depmod -A 217 depmod -a
203fi 218fi
204} 219}
205 220
@@ -207,7 +222,7 @@ pkg_postinst_modules () {
207if [ -n "$D" ]; then 222if [ -n "$D" ]; then
208 ${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION} 223 ${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION}
209else 224else
210 depmod -A 225 depmod -a
211 update-modules || true 226 update-modules || true
212fi 227fi
213} 228}
diff --git a/meta/classes/module.bbclass b/meta/classes/module.bbclass
index 8a13f1f858..6089f90462 100644
--- a/meta/classes/module.bbclass
+++ b/meta/classes/module.bbclass
@@ -38,7 +38,7 @@ pkg_postinst_append () {
38 if [ -n "$D" ]; then 38 if [ -n "$D" ]; then
39 exit 1 39 exit 1
40 fi 40 fi
41 depmod -A 41 depmod -a
42 update-modules || true 42 update-modules || true
43} 43}
44 44
diff --git a/meta/classes/opie.bbclass b/meta/classes/opie.bbclass
index 47f364a644..6430d46d6a 100644
--- a/meta/classes/opie.bbclass
+++ b/meta/classes/opie.bbclass
@@ -18,7 +18,7 @@ inherit palmtop
18# Note that when CVS changes to 1.2.2, the dash 18# Note that when CVS changes to 1.2.2, the dash
19# should be removed from OPIE_CVS_PV to convert 19# should be removed from OPIE_CVS_PV to convert
20# to the standardised version format 20# to the standardised version format
21OPIE_CVS_PV = "1.2.1+cvs-${SRCDATE}" 21OPIE_CVS_PV = "1.2.2+cvs-${SRCDATE}"
22 22
23DEPENDS_prepend = "${@["libopie2 ", ""][(bb.data.getVar('PN', d, 1) == 'libopie2')]}" 23DEPENDS_prepend = "${@["libopie2 ", ""][(bb.data.getVar('PN', d, 1) == 'libopie2')]}"
24 24
diff --git a/meta/classes/pkgconfig.bbclass b/meta/classes/pkgconfig.bbclass
index 62f15f312d..f2054b0b07 100644
--- a/meta/classes/pkgconfig.bbclass
+++ b/meta/classes/pkgconfig.bbclass
@@ -20,7 +20,7 @@ def get_pkgconfig_mangle(d):
20 return s 20 return s
21 21
22do_stage_append () { 22do_stage_append () {
23 for pc in `find ${S} -name '*.pc' | grep -v -- '-uninstalled.pc$'`; do 23 for pc in `find ${S} -name '*.pc' -type f | grep -v -- '-uninstalled.pc$'`; do
24 pcname=`basename $pc` 24 pcname=`basename $pc`
25 install -d ${PKG_CONFIG_PATH} 25 install -d ${PKG_CONFIG_PATH}
26 cat $pc | sed ${@get_pkgconfig_mangle(d)} > ${PKG_CONFIG_PATH}/$pcname 26 cat $pc | sed ${@get_pkgconfig_mangle(d)} > ${PKG_CONFIG_PATH}/$pcname
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index a626162ffb..91ca9865fd 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -64,13 +64,14 @@ def check_sanity(e):
64 if "diffstat-native" not in data.getVar('ASSUME_PROVIDED', e.data, True).split(): 64 if "diffstat-native" not in data.getVar('ASSUME_PROVIDED', e.data, True).split():
65 raise_sanity_error('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf') 65 raise_sanity_error('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf')
66 66
67 # Check the MACHINE is valid 67 # Check that the MACHINE is valid
68 if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data): 68 if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data):
69 raise_sanity_error('Please set a valid MACHINE in your local.conf') 69 raise_sanity_error('Please set a valid MACHINE in your local.conf')
70 70
71 # Check the distro is valid 71 # Check that the DISTRO is valid
72 if not check_conf_exists("conf/distro/${DISTRO}.conf", e.data): 72 # need to take into account DISTRO renaming DISTRO
73 raise_sanity_error('Please set a valid DISTRO in your local.conf') 73 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", e.data) or check_conf_exists("conf/distro/include/${DISTRO}.inc", e.data) ):
74 raise_sanity_error("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf" % data.getVar("DISTRO", e.data, True ))
74 75
75 if not check_app_exists("${MAKE}", e.data): 76 if not check_app_exists("${MAKE}", e.data):
76 raise_sanity_error('GNU make missing. Please install GNU make') 77 raise_sanity_error('GNU make missing. Please install GNU make')
diff --git a/meta/classes/sdl.bbclass b/meta/classes/sdl.bbclass
index c0b21427a4..d478d97f18 100644
--- a/meta/classes/sdl.bbclass
+++ b/meta/classes/sdl.bbclass
@@ -26,7 +26,7 @@ sdl_do_sdl_install() {
26Note=Auto Generated... this may be not what you want 26Note=Auto Generated... this may be not what you want
27Comment=${DESCRIPTION} 27Comment=${DESCRIPTION}
28Exec=${APPNAME} 28Exec=${APPNAME}
29Icon=${APPIMAGE} 29Icon=${PN}.png
30Type=Application 30Type=Application
31Name=${PN} 31Name=${PN}
32EOF 32EOF
diff --git a/meta/classes/tinderclient.bbclass b/meta/classes/tinderclient.bbclass
index f544c203fe..d36ef0b343 100644
--- a/meta/classes/tinderclient.bbclass
+++ b/meta/classes/tinderclient.bbclass
@@ -240,8 +240,8 @@ def tinder_tinder_start(d, event):
240 output.append( "---> TINDERBOX BUILDING '%(packages)s'" ) 240 output.append( "---> TINDERBOX BUILDING '%(packages)s'" )
241 output.append( "<--- TINDERBOX STARTING BUILD NOW" ) 241 output.append( "<--- TINDERBOX STARTING BUILD NOW" )
242 242
243 output.append( "" ) 243 output.append( "" )
244 244
245 return "\n".join(output) % vars() 245 return "\n".join(output) % vars()
246 246
247def tinder_do_tinder_report(event): 247def tinder_do_tinder_report(event):
@@ -255,6 +255,14 @@ def tinder_do_tinder_report(event):
255 information immediately. The caching/queuing needs to be 255 information immediately. The caching/queuing needs to be
256 implemented. Also sending more or less information is not 256 implemented. Also sending more or less information is not
257 implemented yet. 257 implemented yet.
258
259 We have two temporary files stored in the TMP directory. One file
260 contains the assigned machine id for the tinderclient. This id gets
261 assigned when we connect the box and start the build process the second
262 file is used to workaround an EventHandler limitation. If BitBake is ran
263 with the continue option we want the Build to fail even if we get the
264 BuildCompleted Event. In this case we have to look up the status and
265 send it instead of 100/success.
258 """ 266 """
259 from bb.event import getName 267 from bb.event import getName
260 from bb import data, mkdirhier, build 268 from bb import data, mkdirhier, build
@@ -264,7 +272,6 @@ def tinder_do_tinder_report(event):
264 name = getName(event) 272 name = getName(event)
265 log = "" 273 log = ""
266 status = 1 274 status = 1
267 #print asd
268 # Check what we need to do Build* shows we start or are done 275 # Check what we need to do Build* shows we start or are done
269 if name == "BuildStarted": 276 if name == "BuildStarted":
270 tinder_build_start(event.data) 277 tinder_build_start(event.data)
@@ -272,9 +279,18 @@ def tinder_do_tinder_report(event):
272 279
273 try: 280 try:
274 # truncate the tinder log file 281 # truncate the tinder log file
275 f = file(data.getVar('TINDER_LOG', event.data, True), 'rw+') 282 f = file(data.getVar('TINDER_LOG', event.data, True), 'w')
276 f.truncate(0) 283 f.write("")
277 f.close() 284 f.close()
285 except:
286 pass
287
288 try:
289 # write a status to the file. This is needed for the -k option
290 # of BitBake
291 g = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
292 g.write("")
293 g.close()
278 except IOError: 294 except IOError:
279 pass 295 pass
280 296
@@ -295,16 +311,27 @@ def tinder_do_tinder_report(event):
295 elif name == "TaskFailed": 311 elif name == "TaskFailed":
296 log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task 312 log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task
297 elif name == "PkgStarted": 313 elif name == "PkgStarted":
298 log += "---> TINDERBOX Package %s started\n" % data.getVar('P', event.data, True) 314 log += "---> TINDERBOX Package %s started\n" % data.getVar('PF', event.data, True)
299 elif name == "PkgSucceeded": 315 elif name == "PkgSucceeded":
300 log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('P', event.data, True) 316 log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('PF', event.data, True)
301 elif name == "PkgFailed": 317 elif name == "PkgFailed":
302 build.exec_task('do_clean', event.data) 318 if not data.getVar('TINDER_AUTOBUILD', event.data, True) == "0":
303 log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('P', event.data, True) 319 build.exec_task('do_clean', event.data)
320 log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('PF', event.data, True)
304 status = 200 321 status = 200
322 # remember the failure for the -k case
323 h = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
324 h.write("200")
305 elif name == "BuildCompleted": 325 elif name == "BuildCompleted":
306 log += "Build Completed\n" 326 log += "Build Completed\n"
307 status = 100 327 status = 100
328 # Check if we have a old status...
329 try:
330 h = file(data.getVar('TMPDIR',event.data,True)+'/tinder-status', 'r')
331 status = int(h.read())
332 except:
333 pass
334
308 elif name == "MultipleProviders": 335 elif name == "MultipleProviders":
309 log += "---> TINDERBOX Multiple Providers\n" 336 log += "---> TINDERBOX Multiple Providers\n"
310 log += "multiple providers are available (%s);\n" % ", ".join(event.getCandidates()) 337 log += "multiple providers are available (%s);\n" % ", ".join(event.getCandidates())
@@ -315,6 +342,9 @@ def tinder_do_tinder_report(event):
315 log += "Error: No Provider for: %s\n" % event.getItem() 342 log += "Error: No Provider for: %s\n" % event.getItem()
316 log += "Error:Was Runtime: %d\n" % event.isRuntime() 343 log += "Error:Was Runtime: %d\n" % event.isRuntime()
317 status = 200 344 status = 200
345 # remember the failure for the -k case
346 h = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
347 h.write("200")
318 348
319 # now post the log 349 # now post the log
320 if len(log) == 0: 350 if len(log) == 0: