diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-08-20 16:52:21 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-08-21 12:15:30 +0100 |
commit | 48619958d53cbec888a0a6806c5e494291e9e227 (patch) | |
tree | 0618ff69a32566be156b0f7e5d7a06cce350e033 | |
parent | 7c40daab587306da25c8d8f566392ee1f57bbef4 (diff) | |
download | poky-48619958d53cbec888a0a6806c5e494291e9e227.tar.gz |
meta/classes: Various python whitespace fixes
It was pointed out we have a number of weird indentations in the python functions.
This patch cleans up 3, 7 and other weird indentations for the core bbclass files.
It also fixes some wierd (odd) shell function indentation which my searches picked up.
(From OE-Core rev: 8385d6d74624000d68814f4e3266d47bc8885942)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | meta/classes/base.bbclass | 18 | ||||
-rw-r--r-- | meta/classes/bugzilla.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/buildstats.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/grub-efi.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/image-swab.bbclass | 38 | ||||
-rw-r--r-- | meta/classes/insane.bbclass | 28 | ||||
-rw-r--r-- | meta/classes/package.bbclass | 74 | ||||
-rw-r--r-- | meta/classes/package_ipk.bbclass | 34 | ||||
-rw-r--r-- | meta/classes/package_rpm.bbclass | 34 | ||||
-rw-r--r-- | meta/classes/recipe_sanity.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/sanity.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/sstate.bbclass | 26 | ||||
-rw-r--r-- | meta/classes/staging.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/tinderclient.bbclass | 28 | ||||
-rw-r--r-- | meta/classes/update-alternatives.bbclass | 54 | ||||
-rw-r--r-- | meta/classes/utils.bbclass | 52 |
16 files changed, 207 insertions, 207 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index 1a094ad384..59febd1022 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -205,8 +205,8 @@ def preferred_ml_updates(d): | |||
205 | continue | 205 | continue |
206 | virt = "" | 206 | virt = "" |
207 | if pkg.startswith("virtual/"): | 207 | if pkg.startswith("virtual/"): |
208 | pkg = pkg.replace("virtual/", "") | 208 | pkg = pkg.replace("virtual/", "") |
209 | virt = "virtual/" | 209 | virt = "virtual/" |
210 | for p in prefixes: | 210 | for p in prefixes: |
211 | newname = "PREFERRED_PROVIDER_" + virt + p + "-" + pkg | 211 | newname = "PREFERRED_PROVIDER_" + virt + p + "-" + pkg |
212 | if pkg != "kernel": | 212 | if pkg != "kernel": |
@@ -353,13 +353,13 @@ python () { | |||
353 | appends = bb.utils.explode_deps(d.expand(" ".join(appends))) | 353 | appends = bb.utils.explode_deps(d.expand(" ".join(appends))) |
354 | newappends = [] | 354 | newappends = [] |
355 | for a in appends: | 355 | for a in appends: |
356 | if a.endswith("-native") or a.endswith("-cross"): | 356 | if a.endswith("-native") or a.endswith("-cross"): |
357 | newappends.append(a) | 357 | newappends.append(a) |
358 | elif a.startswith("virtual/"): | 358 | elif a.startswith("virtual/"): |
359 | subs = a.split("/", 1)[1] | 359 | subs = a.split("/", 1)[1] |
360 | newappends.append("virtual/" + prefix + subs + extension) | 360 | newappends.append("virtual/" + prefix + subs + extension) |
361 | else: | 361 | else: |
362 | newappends.append(prefix + a + extension) | 362 | newappends.append(prefix + a + extension) |
363 | return newappends | 363 | return newappends |
364 | 364 | ||
365 | def appendVar(varname, appends): | 365 | def appendVar(varname, appends): |
diff --git a/meta/classes/bugzilla.bbclass b/meta/classes/bugzilla.bbclass index 6806dcd682..006763d2c7 100644 --- a/meta/classes/bugzilla.bbclass +++ b/meta/classes/bugzilla.bbclass | |||
@@ -105,8 +105,8 @@ python bugzilla_eventhandler() { | |||
105 | data = e.data | 105 | data = e.data |
106 | name = bb.event.getName(event) | 106 | name = bb.event.getName(event) |
107 | if name == "MsgNote": | 107 | if name == "MsgNote": |
108 | # avoid recursion | 108 | # avoid recursion |
109 | return | 109 | return |
110 | 110 | ||
111 | if name == "TaskFailed": | 111 | if name == "TaskFailed": |
112 | xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) | 112 | xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) |
diff --git a/meta/classes/buildstats.bbclass b/meta/classes/buildstats.bbclass index dc9afb101b..6fd13a8602 100644 --- a/meta/classes/buildstats.bbclass +++ b/meta/classes/buildstats.bbclass | |||
@@ -65,7 +65,7 @@ def set_device(e): | |||
65 | try: | 65 | try: |
66 | for line in open("/proc/diskstats", "r"): | 66 | for line in open("/proc/diskstats", "r"): |
67 | if majordev == int(line.split()[0]) and minordev == int(line.split()[1]): | 67 | if majordev == int(line.split()[0]) and minordev == int(line.split()[1]): |
68 | rdev=line.split()[2] | 68 | rdev=line.split()[2] |
69 | except: | 69 | except: |
70 | pass | 70 | pass |
71 | file = open(e.data.getVar('DEVFILE', True), "w") | 71 | file = open(e.data.getVar('DEVFILE', True), "w") |
@@ -100,10 +100,10 @@ def get_diskdata(var, dev, data): | |||
100 | olddiskdata = data.getVar(var, False) | 100 | olddiskdata = data.getVar(var, False) |
101 | diskdata = {} | 101 | diskdata = {} |
102 | if olddiskdata is None: | 102 | if olddiskdata is None: |
103 | return | 103 | return |
104 | newdiskdata = get_diskstats(dev) | 104 | newdiskdata = get_diskstats(dev) |
105 | for key in olddiskdata.iterkeys(): | 105 | for key in olddiskdata.iterkeys(): |
106 | diskdata["Start"+key] = str(int(olddiskdata[key])) | 106 | diskdata["Start"+key] = str(int(olddiskdata[key])) |
107 | diskdata["End"+key] = str(int(newdiskdata[key])) | 107 | diskdata["End"+key] = str(int(newdiskdata[key])) |
108 | return diskdata | 108 | return diskdata |
109 | 109 | ||
diff --git a/meta/classes/grub-efi.bbclass b/meta/classes/grub-efi.bbclass index 147accc895..a093628455 100644 --- a/meta/classes/grub-efi.bbclass +++ b/meta/classes/grub-efi.bbclass | |||
@@ -54,12 +54,12 @@ python build_grub_cfg() { | |||
54 | if not workdir: | 54 | if not workdir: |
55 | bb.error("WORKDIR not defined, unable to package") | 55 | bb.error("WORKDIR not defined, unable to package") |
56 | return | 56 | return |
57 | 57 | ||
58 | labels = d.getVar('LABELS', True) | 58 | labels = d.getVar('LABELS', True) |
59 | if not labels: | 59 | if not labels: |
60 | bb.debug(1, "LABELS not defined, nothing to do") | 60 | bb.debug(1, "LABELS not defined, nothing to do") |
61 | return | 61 | return |
62 | 62 | ||
63 | if labels == []: | 63 | if labels == []: |
64 | bb.debug(1, "No labels, nothing to do") | 64 | bb.debug(1, "No labels, nothing to do") |
65 | return | 65 | return |
@@ -109,7 +109,7 @@ python build_grub_cfg() { | |||
109 | if append: | 109 | if append: |
110 | cfgfile.write('%s' % (append)) | 110 | cfgfile.write('%s' % (append)) |
111 | cfgfile.write('\n') | 111 | cfgfile.write('\n') |
112 | 112 | ||
113 | if initrd: | 113 | if initrd: |
114 | cfgfile.write('initrd /initrd') | 114 | cfgfile.write('initrd /initrd') |
115 | cfgfile.write('\n}\n') | 115 | cfgfile.write('\n}\n') |
diff --git a/meta/classes/image-swab.bbclass b/meta/classes/image-swab.bbclass index 0414653f72..37d75355bf 100644 --- a/meta/classes/image-swab.bbclass +++ b/meta/classes/image-swab.bbclass | |||
@@ -51,13 +51,13 @@ python() { | |||
51 | # and cross packages which aren't swabber-native or one of its dependencies | 51 | # and cross packages which aren't swabber-native or one of its dependencies |
52 | # I have ignored them for now... | 52 | # I have ignored them for now... |
53 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): | 53 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): |
54 | deps = (d.getVarFlag('do_setscene', 'depends') or "").split() | 54 | deps = (d.getVarFlag('do_setscene', 'depends') or "").split() |
55 | deps.append('strace-native:do_populate_sysroot') | 55 | deps.append('strace-native:do_populate_sysroot') |
56 | d.setVarFlag('do_setscene', 'depends', " ".join(deps)) | 56 | d.setVarFlag('do_setscene', 'depends', " ".join(deps)) |
57 | logdir = d.expand("${TRACE_LOGDIR}") | 57 | logdir = d.expand("${TRACE_LOGDIR}") |
58 | bb.utils.mkdirhier(logdir) | 58 | bb.utils.mkdirhier(logdir) |
59 | else: | 59 | else: |
60 | d.setVar('STRACEFUNC', '') | 60 | d.setVar('STRACEFUNC', '') |
61 | } | 61 | } |
62 | 62 | ||
63 | STRACEPID = "${@os.getpid()}" | 63 | STRACEPID = "${@os.getpid()}" |
@@ -76,23 +76,23 @@ imageswab_attachstrace () { | |||
76 | 76 | ||
77 | do_generate_swabber_report () { | 77 | do_generate_swabber_report () { |
78 | 78 | ||
79 | update_distro ${HOST_DATA} | 79 | update_distro ${HOST_DATA} |
80 | 80 | ||
81 | # Swabber can't create the directory for us | 81 | # Swabber can't create the directory for us |
82 | mkdir -p ${SWABBER_REPORT} | 82 | mkdir -p ${SWABBER_REPORT} |
83 | 83 | ||
84 | REPORTSTAMP=${SWAB_ORIG_TASK}-`date +%2m%2d%2H%2M%Y` | 84 | REPORTSTAMP=${SWAB_ORIG_TASK}-`date +%2m%2d%2H%2M%Y` |
85 | 85 | ||
86 | if [ `which ccache` ] ; then | 86 | if [ `which ccache` ] ; then |
87 | CCACHE_DIR=`( ccache -s | grep "cache directory" | grep -o '[^ ]*$' 2> /dev/null )` | 87 | CCACHE_DIR=`( ccache -s | grep "cache directory" | grep -o '[^ ]*$' 2> /dev/null )` |
88 | fi | 88 | fi |
89 | 89 | ||
90 | if [ "$(ls -A ${HOST_DATA})" ]; then | 90 | if [ "$(ls -A ${HOST_DATA})" ]; then |
91 | echo "Generating swabber report" | 91 | echo "Generating swabber report" |
92 | swabber -d ${HOST_DATA} -l ${SWABBER_LOGS} -o ${SWABBER_REPORT}/report-${REPORTSTAMP}.txt -r ${SWABBER_REPORT}/extra_report-${REPORTSTAMP}.txt -c all -p ${TOPDIR} -f ${OEROOT}/meta/conf/swabber ${TOPDIR} ${OEROOT} ${CCACHE_DIR} | 92 | swabber -d ${HOST_DATA} -l ${SWABBER_LOGS} -o ${SWABBER_REPORT}/report-${REPORTSTAMP}.txt -r ${SWABBER_REPORT}/extra_report-${REPORTSTAMP}.txt -c all -p ${TOPDIR} -f ${OEROOT}/meta/conf/swabber ${TOPDIR} ${OEROOT} ${CCACHE_DIR} |
93 | else | 93 | else |
94 | echo "No host data, cannot generate swabber report." | 94 | echo "No host data, cannot generate swabber report." |
95 | fi | 95 | fi |
96 | } | 96 | } |
97 | addtask generate_swabber_report after do_${SWAB_ORIG_TASK} | 97 | addtask generate_swabber_report after do_${SWAB_ORIG_TASK} |
98 | do_generate_swabber_report[depends] = "swabber-native:do_populate_sysroot" | 98 | do_generate_swabber_report[depends] = "swabber-native:do_populate_sysroot" |
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 556a17684f..0f3f1cd082 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
@@ -109,7 +109,7 @@ def package_qa_get_machine_dict(): | |||
109 | "linux-gnux32" : { | 109 | "linux-gnux32" : { |
110 | "x86_64": (62, 0, 0, True, 32), | 110 | "x86_64": (62, 0, 0, True, 32), |
111 | }, | 111 | }, |
112 | } | 112 | } |
113 | 113 | ||
114 | 114 | ||
115 | # Currently not being used by default "desktop" | 115 | # Currently not being used by default "desktop" |
@@ -719,19 +719,19 @@ Rerun configure task after fixing this. The path was '%s'""" % root) | |||
719 | 719 | ||
720 | cnf = d.getVar('EXTRA_OECONF', True) or "" | 720 | cnf = d.getVar('EXTRA_OECONF', True) or "" |
721 | if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf: | 721 | if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf: |
722 | ml = d.getVar("MLPREFIX", True) or "" | 722 | ml = d.getVar("MLPREFIX", True) or "" |
723 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): | 723 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): |
724 | gt = "gettext-native" | 724 | gt = "gettext-native" |
725 | elif bb.data.inherits_class('cross-canadian', d): | 725 | elif bb.data.inherits_class('cross-canadian', d): |
726 | gt = "gettext-nativesdk" | 726 | gt = "gettext-nativesdk" |
727 | else: | 727 | else: |
728 | gt = "virtual/" + ml + "gettext" | 728 | gt = "virtual/" + ml + "gettext" |
729 | deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "") | 729 | deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "") |
730 | if gt not in deps: | 730 | if gt not in deps: |
731 | for config in configs: | 731 | for config in configs: |
732 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config | 732 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config |
733 | if subprocess.call(gnu, shell=True) == 0: | 733 | if subprocess.call(gnu, shell=True) == 0: |
734 | bb.fatal("""%s required but not in DEPENDS for file %s. | 734 | bb.fatal("""%s required but not in DEPENDS for file %s. |
735 | Missing inherit gettext?""" % (gt, config)) | 735 | Missing inherit gettext?""" % (gt, config)) |
736 | 736 | ||
737 | if not package_qa_check_license(workdir, d): | 737 | if not package_qa_check_license(workdir, d): |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index 278f0f0ea9..b17fa08da1 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -230,42 +230,42 @@ def splitfile2(debugsrcdir, d): | |||
230 | 230 | ||
231 | sourcefile = d.expand("${WORKDIR}/debugsources.list") | 231 | sourcefile = d.expand("${WORKDIR}/debugsources.list") |
232 | if debugsrcdir and os.path.isfile(sourcefile): | 232 | if debugsrcdir and os.path.isfile(sourcefile): |
233 | dvar = d.getVar('PKGD', True) | 233 | dvar = d.getVar('PKGD', True) |
234 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) | 234 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) |
235 | strip = d.getVar("STRIP", True) | 235 | strip = d.getVar("STRIP", True) |
236 | objcopy = d.getVar("OBJCOPY", True) | 236 | objcopy = d.getVar("OBJCOPY", True) |
237 | debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") | 237 | debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") |
238 | workdir = d.getVar("WORKDIR", True) | 238 | workdir = d.getVar("WORKDIR", True) |
239 | workparentdir = os.path.dirname(workdir) | 239 | workparentdir = os.path.dirname(workdir) |
240 | workbasedir = os.path.basename(workdir) | 240 | workbasedir = os.path.basename(workdir) |
241 | 241 | ||
242 | nosuchdir = [] | 242 | nosuchdir = [] |
243 | basepath = dvar | 243 | basepath = dvar |
244 | for p in debugsrcdir.split("/"): | 244 | for p in debugsrcdir.split("/"): |
245 | basepath = basepath + "/" + p | 245 | basepath = basepath + "/" + p |
246 | if not os.path.exists(basepath): | 246 | if not os.path.exists(basepath): |
247 | nosuchdir.append(basepath) | 247 | nosuchdir.append(basepath) |
248 | bb.mkdirhier(basepath) | 248 | bb.mkdirhier(basepath) |
249 | 249 | ||
250 | processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | " | 250 | processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | " |
251 | # We need to ignore files that are not actually ours | 251 | # We need to ignore files that are not actually ours |
252 | # we do this by only paying attention to items from this package | 252 | # we do this by only paying attention to items from this package |
253 | processdebugsrc += "fgrep -z '%s' | " | 253 | processdebugsrc += "fgrep -z '%s' | " |
254 | processdebugsrc += "(cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" | 254 | processdebugsrc += "(cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" |
255 | 255 | ||
256 | subprocess.call(processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir), shell=True) | 256 | subprocess.call(processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir), shell=True) |
257 | 257 | ||
258 | # The copy by cpio may have resulted in some empty directories! Remove these | 258 | # The copy by cpio may have resulted in some empty directories! Remove these |
259 | for root, dirs, files in os.walk("%s%s" % (dvar, debugsrcdir)): | 259 | for root, dirs, files in os.walk("%s%s" % (dvar, debugsrcdir)): |
260 | for d in dirs: | 260 | for d in dirs: |
261 | dir = os.path.join(root, d) | 261 | dir = os.path.join(root, d) |
262 | #bb.note("rmdir -p %s" % dir) | 262 | #bb.note("rmdir -p %s" % dir) |
263 | subprocess.call("rmdir -p %s 2>/dev/null" % dir, shell=True) | 263 | subprocess.call("rmdir -p %s 2>/dev/null" % dir, shell=True) |
264 | 264 | ||
265 | # Also remove debugsrcdir if its empty | 265 | # Also remove debugsrcdir if its empty |
266 | for p in nosuchdir[::-1]: | 266 | for p in nosuchdir[::-1]: |
267 | if os.path.exists(p) and not os.listdir(p): | 267 | if os.path.exists(p) and not os.listdir(p): |
268 | os.rmdir(p) | 268 | os.rmdir(p) |
269 | 269 | ||
270 | def runstrip(file, elftype, d): | 270 | def runstrip(file, elftype, d): |
271 | # Function to strip a single file, called from split_and_strip_files below | 271 | # Function to strip a single file, called from split_and_strip_files below |
@@ -735,7 +735,7 @@ python split_and_strip_files () { | |||
735 | file_list = {} | 735 | file_list = {} |
736 | file_links = {} | 736 | file_links = {} |
737 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \ | 737 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \ |
738 | (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): | 738 | (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): |
739 | for root, dirs, files in os.walk(dvar): | 739 | for root, dirs, files in os.walk(dvar): |
740 | for f in files: | 740 | for f in files: |
741 | file = os.path.join(root, f) | 741 | file = os.path.join(root, f) |
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index a297a1f9d4..e94586e6ca 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass | |||
@@ -172,23 +172,23 @@ package_install_internal_ipk() { | |||
172 | } | 172 | } |
173 | 173 | ||
174 | ipk_log_check() { | 174 | ipk_log_check() { |
175 | target="$1" | 175 | target="$1" |
176 | lf_path="$2" | 176 | lf_path="$2" |
177 | 177 | ||
178 | lf_txt="`cat $lf_path`" | 178 | lf_txt="`cat $lf_path`" |
179 | for keyword_die in "exit 1" "Collected errors" ERR Fail | 179 | for keyword_die in "exit 1" "Collected errors" ERR Fail |
180 | do | 180 | do |
181 | if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1 | 181 | if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1 |
182 | then | 182 | then |
183 | echo "log_check: There were error messages in the logfile" | 183 | echo "log_check: There were error messages in the logfile" |
184 | echo -e "log_check: Matched keyword: [$keyword_die]\n" | 184 | echo -e "log_check: Matched keyword: [$keyword_die]\n" |
185 | echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die" | 185 | echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die" |
186 | echo "" | 186 | echo "" |
187 | do_exit=1 | 187 | do_exit=1 |
188 | fi | 188 | fi |
189 | done | 189 | done |
190 | test "$do_exit" = 1 && exit 1 | 190 | test "$do_exit" = 1 && exit 1 |
191 | true | 191 | true |
192 | } | 192 | } |
193 | 193 | ||
194 | # | 194 | # |
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index 4b18cc6701..d0f69bf40f 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass | |||
@@ -101,23 +101,23 @@ package_generate_rpm_conf_common() { | |||
101 | } | 101 | } |
102 | 102 | ||
103 | rpm_log_check() { | 103 | rpm_log_check() { |
104 | target="$1" | 104 | target="$1" |
105 | lf_path="$2" | 105 | lf_path="$2" |
106 | 106 | ||
107 | lf_txt="`cat $lf_path`" | 107 | lf_txt="`cat $lf_path`" |
108 | for keyword_die in "Cannot find package" "exit 1" ERR Fail | 108 | for keyword_die in "Cannot find package" "exit 1" ERR Fail |
109 | do | 109 | do |
110 | if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1 | 110 | if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1 |
111 | then | 111 | then |
112 | echo "log_check: There were error messages in the logfile" | 112 | echo "log_check: There were error messages in the logfile" |
113 | echo -e "log_check: Matched keyword: [$keyword_die]\n" | 113 | echo -e "log_check: Matched keyword: [$keyword_die]\n" |
114 | echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die" | 114 | echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die" |
115 | echo "" | 115 | echo "" |
116 | do_exit=1 | 116 | do_exit=1 |
117 | fi | 117 | fi |
118 | done | 118 | done |
119 | test "$do_exit" = 1 && exit 1 | 119 | test "$do_exit" = 1 && exit 1 |
120 | true | 120 | true |
121 | } | 121 | } |
122 | 122 | ||
123 | 123 | ||
diff --git a/meta/classes/recipe_sanity.bbclass b/meta/classes/recipe_sanity.bbclass index 63e9e7ff23..c3bd0d2a4c 100644 --- a/meta/classes/recipe_sanity.bbclass +++ b/meta/classes/recipe_sanity.bbclass | |||
@@ -4,7 +4,7 @@ def __note(msg, d): | |||
4 | __recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" | 4 | __recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" |
5 | def bad_runtime_vars(cfgdata, d): | 5 | def bad_runtime_vars(cfgdata, d): |
6 | if bb.data.inherits_class("native", d) or \ | 6 | if bb.data.inherits_class("native", d) or \ |
7 | bb.data.inherits_class("cross", d): | 7 | bb.data.inherits_class("cross", d): |
8 | return | 8 | return |
9 | 9 | ||
10 | for var in d.getVar("__recipe_sanity_badruntimevars", True).split(): | 10 | for var in d.getVar("__recipe_sanity_badruntimevars", True).split(): |
@@ -42,7 +42,7 @@ def var_renames_overwrite(cfgdata, d): | |||
42 | 42 | ||
43 | def incorrect_nonempty_PACKAGES(cfgdata, d): | 43 | def incorrect_nonempty_PACKAGES(cfgdata, d): |
44 | if bb.data.inherits_class("native", d) or \ | 44 | if bb.data.inherits_class("native", d) or \ |
45 | bb.data.inherits_class("cross", d): | 45 | bb.data.inherits_class("cross", d): |
46 | if d.getVar("PACKAGES", True): | 46 | if d.getVar("PACKAGES", True): |
47 | return True | 47 | return True |
48 | 48 | ||
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass index ff7c73bb45..40d8211aa1 100644 --- a/meta/classes/sanity.bbclass +++ b/meta/classes/sanity.bbclass | |||
@@ -280,10 +280,10 @@ def check_sanity_validmachine(sanity_data): | |||
280 | tunefound = True | 280 | tunefound = True |
281 | 281 | ||
282 | if len(dups): | 282 | if len(dups): |
283 | messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups) | 283 | messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups) |
284 | 284 | ||
285 | if tunefound == False: | 285 | if tunefound == False: |
286 | messages = messages + "Error, the PACKAGE_ARCHS variable does not contain TUNE_PKGARCH (%s)." % tunepkg | 286 | messages = messages + "Error, the PACKAGE_ARCHS variable does not contain TUNE_PKGARCH (%s)." % tunepkg |
287 | 287 | ||
288 | return messages | 288 | return messages |
289 | 289 | ||
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index e87f3c05ab..6762e32cfb 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -174,7 +174,7 @@ def sstate_installpkg(ss, d): | |||
174 | sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['name'] + ".tgz" | 174 | sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['name'] + ".tgz" |
175 | 175 | ||
176 | if not os.path.exists(sstatepkg): | 176 | if not os.path.exists(sstatepkg): |
177 | pstaging_fetch(sstatefetch, sstatepkg, d) | 177 | pstaging_fetch(sstatefetch, sstatepkg, d) |
178 | 178 | ||
179 | if not os.path.isfile(sstatepkg): | 179 | if not os.path.isfile(sstatepkg): |
180 | bb.note("Staging package %s does not exist" % sstatepkg) | 180 | bb.note("Staging package %s does not exist" % sstatepkg) |
@@ -259,10 +259,10 @@ def sstate_clean_manifest(manifest, d): | |||
259 | # so we ignore errors here. | 259 | # so we ignore errors here. |
260 | try: | 260 | try: |
261 | if entry.endswith("/"): | 261 | if entry.endswith("/"): |
262 | if os.path.islink(entry[:-1]): | 262 | if os.path.islink(entry[:-1]): |
263 | os.remove(entry[:-1]) | 263 | os.remove(entry[:-1]) |
264 | elif os.path.exists(entry) and len(os.listdir(entry)) == 0: | 264 | elif os.path.exists(entry) and len(os.listdir(entry)) == 0: |
265 | os.rmdir(entry[:-1]) | 265 | os.rmdir(entry[:-1]) |
266 | else: | 266 | else: |
267 | oe.path.remove(entry) | 267 | oe.path.remove(entry) |
268 | except OSError: | 268 | except OSError: |
@@ -314,14 +314,14 @@ python sstate_cleanall() { | |||
314 | 314 | ||
315 | for manifest in (os.listdir(manifest_dir)): | 315 | for manifest in (os.listdir(manifest_dir)): |
316 | if fnmatch.fnmatch(manifest, manifest_pattern): | 316 | if fnmatch.fnmatch(manifest, manifest_pattern): |
317 | name = manifest.replace(manifest_pattern[:-1], "") | 317 | name = manifest.replace(manifest_pattern[:-1], "") |
318 | namemap = d.getVar('SSTATETASKNAMES', True).split() | 318 | namemap = d.getVar('SSTATETASKNAMES', True).split() |
319 | tasks = d.getVar('SSTATETASKS', True).split() | 319 | tasks = d.getVar('SSTATETASKS', True).split() |
320 | if name not in namemap: | 320 | if name not in namemap: |
321 | continue | 321 | continue |
322 | taskname = tasks[namemap.index(name)] | 322 | taskname = tasks[namemap.index(name)] |
323 | shared_state = sstate_state_fromvars(d, taskname[3:]) | 323 | shared_state = sstate_state_fromvars(d, taskname[3:]) |
324 | sstate_clean(shared_state, d) | 324 | sstate_clean(shared_state, d) |
325 | } | 325 | } |
326 | 326 | ||
327 | def sstate_hardcode_path(d): | 327 | def sstate_hardcode_path(d): |
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass index eda415e480..8b3dcb4d58 100644 --- a/meta/classes/staging.bbclass +++ b/meta/classes/staging.bbclass | |||
@@ -70,8 +70,8 @@ SYSROOT_LOCK = "${STAGING_DIR}/staging.lock" | |||
70 | 70 | ||
71 | # We clean out any existing sstate from the sysroot if we rerun configure | 71 | # We clean out any existing sstate from the sysroot if we rerun configure |
72 | python sysroot_cleansstate () { | 72 | python sysroot_cleansstate () { |
73 | ss = sstate_state_fromvars(d, "populate_sysroot") | 73 | ss = sstate_state_fromvars(d, "populate_sysroot") |
74 | sstate_clean(ss, d) | 74 | sstate_clean(ss, d) |
75 | } | 75 | } |
76 | do_configure[prefuncs] += "sysroot_cleansstate" | 76 | do_configure[prefuncs] += "sysroot_cleansstate" |
77 | 77 | ||
diff --git a/meta/classes/tinderclient.bbclass b/meta/classes/tinderclient.bbclass index e57bc48c3b..6984efd1be 100644 --- a/meta/classes/tinderclient.bbclass +++ b/meta/classes/tinderclient.bbclass | |||
@@ -2,20 +2,20 @@ def tinder_http_post(server, selector, content_type, body): | |||
2 | import httplib | 2 | import httplib |
3 | # now post it | 3 | # now post it |
4 | for i in range(0,5): | 4 | for i in range(0,5): |
5 | try: | 5 | try: |
6 | h = httplib.HTTP(server) | 6 | h = httplib.HTTP(server) |
7 | h.putrequest('POST', selector) | 7 | h.putrequest('POST', selector) |
8 | h.putheader('content-type', content_type) | 8 | h.putheader('content-type', content_type) |
9 | h.putheader('content-length', str(len(body))) | 9 | h.putheader('content-length', str(len(body))) |
10 | h.endheaders() | 10 | h.endheaders() |
11 | h.send(body) | 11 | h.send(body) |
12 | errcode, errmsg, headers = h.getreply() | 12 | errcode, errmsg, headers = h.getreply() |
13 | #print errcode, errmsg, headers | 13 | #print errcode, errmsg, headers |
14 | return (errcode,errmsg, headers, h.file) | 14 | return (errcode,errmsg, headers, h.file) |
15 | except: | 15 | except: |
16 | print "Error sending the report!" | 16 | print "Error sending the report!" |
17 | # try again | 17 | # try again |
18 | pass | 18 | pass |
19 | 19 | ||
20 | # return some garbage | 20 | # return some garbage |
21 | return (-1, "unknown", "unknown", None) | 21 | return (-1, "unknown", "unknown", None) |
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass index 47215add1e..4e1ff27052 100644 --- a/meta/classes/update-alternatives.bbclass +++ b/meta/classes/update-alternatives.bbclass | |||
@@ -150,22 +150,22 @@ def update_alternatives_after_parse(d): | |||
150 | UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY" | 150 | UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY" |
151 | 151 | ||
152 | def gen_updatealternativesvardeps(d): | 152 | def gen_updatealternativesvardeps(d): |
153 | pkgs = (d.getVar("PACKAGES", True) or "").split() | 153 | pkgs = (d.getVar("PACKAGES", True) or "").split() |
154 | vars = (d.getVar("UPDALTVARS", True) or "").split() | 154 | vars = (d.getVar("UPDALTVARS", True) or "").split() |
155 | 155 | ||
156 | # First compute them for non_pkg versions | 156 | # First compute them for non_pkg versions |
157 | for v in vars: | 157 | for v in vars: |
158 | for flag in (d.getVarFlags(v) or {}): | 158 | for flag in (d.getVarFlags(v) or {}): |
159 | if flag == "doc" or flag == "vardeps" or flag == "vardepsexp": | ||
160 | continue | ||
161 | d.appendVar('%s_VARDEPS' % (v), ' %s:%s' % (flag, d.getVarFlag(v, flag, False))) | ||
162 | |||
163 | for p in pkgs: | ||
164 | for v in vars: | ||
165 | for flag in (d.getVarFlags("%s_%s" % (v,p)) or {}): | ||
166 | if flag == "doc" or flag == "vardeps" or flag == "vardepsexp": | 159 | if flag == "doc" or flag == "vardeps" or flag == "vardepsexp": |
167 | continue | 160 | continue |
168 | d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False))) | 161 | d.appendVar('%s_VARDEPS' % (v), ' %s:%s' % (flag, d.getVarFlag(v, flag, False))) |
162 | |||
163 | for p in pkgs: | ||
164 | for v in vars: | ||
165 | for flag in (d.getVarFlags("%s_%s" % (v,p)) or {}): | ||
166 | if flag == "doc" or flag == "vardeps" or flag == "vardepsexp": | ||
167 | continue | ||
168 | d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False))) | ||
169 | 169 | ||
170 | python __anonymous() { | 170 | python __anonymous() { |
171 | # deprecated stuff... | 171 | # deprecated stuff... |
@@ -176,18 +176,18 @@ python __anonymous() { | |||
176 | } | 176 | } |
177 | 177 | ||
178 | def gen_updatealternativesvars(d): | 178 | def gen_updatealternativesvars(d): |
179 | ret = [] | 179 | ret = [] |
180 | pkgs = (d.getVar("PACKAGES", True) or "").split() | 180 | pkgs = (d.getVar("PACKAGES", True) or "").split() |
181 | vars = (d.getVar("UPDALTVARS", True) or "").split() | 181 | vars = (d.getVar("UPDALTVARS", True) or "").split() |
182 | 182 | ||
183 | for v in vars: | 183 | for v in vars: |
184 | ret.append(v + "_VARDEPS") | 184 | ret.append(v + "_VARDEPS") |
185 | 185 | ||
186 | for p in pkgs: | 186 | for p in pkgs: |
187 | for v in vars: | 187 | for v in vars: |
188 | ret.append(v + "_" + p) | 188 | ret.append(v + "_" + p) |
189 | ret.append(v + "_VARDEPS_" + p) | 189 | ret.append(v + "_VARDEPS_" + p) |
190 | return " ".join(ret) | 190 | return " ".join(ret) |
191 | 191 | ||
192 | # First the deprecated items... | 192 | # First the deprecated items... |
193 | populate_packages[vardeps] += "ALTERNATIVE_LINKS ALTERNATIVE_NAME ALTERNATIVE_PATH" | 193 | populate_packages[vardeps] += "ALTERNATIVE_LINKS ALTERNATIVE_NAME ALTERNATIVE_PATH" |
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass index 57406109de..c47ad6976d 100644 --- a/meta/classes/utils.bbclass +++ b/meta/classes/utils.bbclass | |||
@@ -246,49 +246,49 @@ oe_machinstall() { | |||
246 | } | 246 | } |
247 | 247 | ||
248 | create_cmdline_wrapper () { | 248 | create_cmdline_wrapper () { |
249 | # Create a wrapper script | 249 | # Create a wrapper script |
250 | # | 250 | # |
251 | # These are useful to work around relocation issues, by setting environment | 251 | # These are useful to work around relocation issues, by setting environment |
252 | # variables which point to paths in the filesystem. | 252 | # variables which point to paths in the filesystem. |
253 | # | 253 | # |
254 | # Usage: create_wrapper FILENAME [[VAR=VALUE]..] | 254 | # Usage: create_wrapper FILENAME [[VAR=VALUE]..] |
255 | 255 | ||
256 | cmd=$1 | 256 | cmd=$1 |
257 | shift | 257 | shift |
258 | 258 | ||
259 | echo "Generating wrapper script for $cmd" | 259 | echo "Generating wrapper script for $cmd" |
260 | 260 | ||
261 | mv $cmd $cmd.real | 261 | mv $cmd $cmd.real |
262 | cmdname=`basename $cmd`.real | 262 | cmdname=`basename $cmd`.real |
263 | cat <<END >$cmd | 263 | cat <<END >$cmd |
264 | #!/bin/sh | 264 | #!/bin/sh |
265 | realpath=\`readlink -fn \$0\` | 265 | realpath=\`readlink -fn \$0\` |
266 | exec \`dirname \$realpath\`/$cmdname $@ "\$@" | 266 | exec \`dirname \$realpath\`/$cmdname $@ "\$@" |
267 | END | 267 | END |
268 | chmod +x $cmd | 268 | chmod +x $cmd |
269 | } | 269 | } |
270 | 270 | ||
271 | create_wrapper () { | 271 | create_wrapper () { |
272 | # Create a wrapper script | 272 | # Create a wrapper script |
273 | # | 273 | # |
274 | # These are useful to work around relocation issues, by setting environment | 274 | # These are useful to work around relocation issues, by setting environment |
275 | # variables which point to paths in the filesystem. | 275 | # variables which point to paths in the filesystem. |
276 | # | 276 | # |
277 | # Usage: create_wrapper FILENAME [[VAR=VALUE]..] | 277 | # Usage: create_wrapper FILENAME [[VAR=VALUE]..] |
278 | 278 | ||
279 | cmd=$1 | 279 | cmd=$1 |
280 | shift | 280 | shift |
281 | 281 | ||
282 | echo "Generating wrapper script for $cmd" | 282 | echo "Generating wrapper script for $cmd" |
283 | 283 | ||
284 | mv $cmd $cmd.real | 284 | mv $cmd $cmd.real |
285 | cmdname=`basename $cmd`.real | 285 | cmdname=`basename $cmd`.real |
286 | cat <<END >$cmd | 286 | cat <<END >$cmd |
287 | #!/bin/sh | 287 | #!/bin/sh |
288 | realpath=\`readlink -fn \$0\` | 288 | realpath=\`readlink -fn \$0\` |
289 | exec env $@ \`dirname \$realpath\`/$cmdname "\$@" | 289 | exec env $@ \`dirname \$realpath\`/$cmdname "\$@" |
290 | END | 290 | END |
291 | chmod +x $cmd | 291 | chmod +x $cmd |
292 | } | 292 | } |
293 | 293 | ||
294 | def check_app_exists(app, d): | 294 | def check_app_exists(app, d): |