diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-07-11 17:33:43 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-07-19 10:24:50 +0100 |
commit | bfd279de3275abbfaf3e630383ec244131e0375f (patch) | |
tree | 0d1c90461a890d21444f5d2afb13c52b302427f1 /meta/classes/base.bbclass | |
parent | 99203edda6f0b09d817454d656c100b7a8806b18 (diff) | |
download | poky-bfd279de3275abbfaf3e630383ec244131e0375f.tar.gz |
Convert tab indentation in python functions into four-space
(From OE-Core rev: 604d46c686d06d62d5a07b9c7f4fa170f99307d8)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/base.bbclass')
-rw-r--r-- | meta/classes/base.bbclass | 227 |
1 files changed, 113 insertions, 114 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index f69179943b..f3587bcbef 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -33,7 +33,7 @@ def oe_import(d): | |||
33 | 33 | ||
34 | python oe_import_eh () { | 34 | python oe_import_eh () { |
35 | if isinstance(e, bb.event.ConfigParsed): | 35 | if isinstance(e, bb.event.ConfigParsed): |
36 | oe_import(e.data) | 36 | oe_import(e.data) |
37 | } | 37 | } |
38 | 38 | ||
39 | addhandler oe_import_eh | 39 | addhandler oe_import_eh |
@@ -50,21 +50,20 @@ oe_runmake() { | |||
50 | 50 | ||
51 | 51 | ||
52 | def base_dep_prepend(d): | 52 | def base_dep_prepend(d): |
53 | # | 53 | # |
54 | # Ideally this will check a flag so we will operate properly in | 54 | # Ideally this will check a flag so we will operate properly in |
55 | # the case where host == build == target, for now we don't work in | 55 | # the case where host == build == target, for now we don't work in |
56 | # that case though. | 56 | # that case though. |
57 | # | 57 | # |
58 | 58 | ||
59 | deps = "" | 59 | deps = "" |
60 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | 60 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not |
61 | # we need that built is the responsibility of the patch function / class, not | 61 | # we need that built is the responsibility of the patch function / class, not |
62 | # the application. | 62 | # the application. |
63 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): | 63 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): |
64 | if (d.getVar('HOST_SYS', True) != | 64 | if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): |
65 | d.getVar('BUILD_SYS', True)): | 65 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " |
66 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " | 66 | return deps |
67 | return deps | ||
68 | 67 | ||
69 | BASEDEPENDS = "${@base_dep_prepend(d)}" | 68 | BASEDEPENDS = "${@base_dep_prepend(d)}" |
70 | 69 | ||
@@ -80,61 +79,61 @@ do_fetch[dirs] = "${DL_DIR}" | |||
80 | do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" | 79 | do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" |
81 | python base_do_fetch() { | 80 | python base_do_fetch() { |
82 | 81 | ||
83 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 82 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
84 | if len(src_uri) == 0: | 83 | if len(src_uri) == 0: |
85 | return | 84 | return |
86 | 85 | ||
87 | localdata = bb.data.createCopy(d) | 86 | localdata = bb.data.createCopy(d) |
88 | bb.data.update_data(localdata) | 87 | bb.data.update_data(localdata) |
89 | 88 | ||
90 | try: | 89 | try: |
91 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | 90 | fetcher = bb.fetch2.Fetch(src_uri, localdata) |
92 | fetcher.download() | 91 | fetcher.download() |
93 | except bb.fetch2.BBFetchException, e: | 92 | except bb.fetch2.BBFetchException, e: |
94 | raise bb.build.FuncFailed(e) | 93 | raise bb.build.FuncFailed(e) |
95 | } | 94 | } |
96 | 95 | ||
97 | addtask unpack after do_fetch | 96 | addtask unpack after do_fetch |
98 | do_unpack[dirs] = "${WORKDIR}" | 97 | do_unpack[dirs] = "${WORKDIR}" |
99 | do_unpack[cleandirs] = "${S}/patches" | 98 | do_unpack[cleandirs] = "${S}/patches" |
100 | python base_do_unpack() { | 99 | python base_do_unpack() { |
101 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 100 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
102 | if len(src_uri) == 0: | 101 | if len(src_uri) == 0: |
103 | return | 102 | return |
104 | 103 | ||
105 | localdata = bb.data.createCopy(d) | 104 | localdata = bb.data.createCopy(d) |
106 | bb.data.update_data(localdata) | 105 | bb.data.update_data(localdata) |
107 | 106 | ||
108 | rootdir = localdata.getVar('WORKDIR', True) | 107 | rootdir = localdata.getVar('WORKDIR', True) |
109 | 108 | ||
110 | try: | 109 | try: |
111 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | 110 | fetcher = bb.fetch2.Fetch(src_uri, localdata) |
112 | fetcher.unpack(rootdir) | 111 | fetcher.unpack(rootdir) |
113 | except bb.fetch2.BBFetchException, e: | 112 | except bb.fetch2.BBFetchException, e: |
114 | raise bb.build.FuncFailed(e) | 113 | raise bb.build.FuncFailed(e) |
115 | } | 114 | } |
116 | 115 | ||
117 | GIT_CONFIG_PATH = "${STAGING_DIR_NATIVE}/etc" | 116 | GIT_CONFIG_PATH = "${STAGING_DIR_NATIVE}/etc" |
118 | GIT_CONFIG = "${GIT_CONFIG_PATH}/gitconfig" | 117 | GIT_CONFIG = "${GIT_CONFIG_PATH}/gitconfig" |
119 | 118 | ||
120 | def generate_git_config(e): | 119 | def generate_git_config(e): |
121 | from bb import data | 120 | from bb import data |
122 | 121 | ||
123 | if data.getVar('GIT_CORE_CONFIG', e.data, True): | 122 | if data.getVar('GIT_CORE_CONFIG', e.data, True): |
124 | gitconfig_path = e.data.getVar('GIT_CONFIG', True) | 123 | gitconfig_path = e.data.getVar('GIT_CONFIG', True) |
125 | proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True) | 124 | proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True) |
126 | 125 | ||
127 | bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}")) | 126 | bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}")) |
128 | if (os.path.exists(gitconfig_path)): | 127 | if (os.path.exists(gitconfig_path)): |
129 | os.remove(gitconfig_path) | 128 | os.remove(gitconfig_path) |
130 | 129 | ||
131 | f = open(gitconfig_path, 'w') | 130 | f = open(gitconfig_path, 'w') |
132 | f.write("[core]\n") | 131 | f.write("[core]\n") |
133 | ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split() | 132 | ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split() |
134 | for ignore_host in ignore_hosts: | 133 | for ignore_host in ignore_hosts: |
135 | f.write(" gitProxy = none for %s\n" % ignore_host) | 134 | f.write(" gitProxy = none for %s\n" % ignore_host) |
136 | f.write(proxy_command) | 135 | f.write(proxy_command) |
137 | f.close | 136 | f.close |
138 | 137 | ||
139 | def pkgarch_mapping(d): | 138 | def pkgarch_mapping(d): |
140 | # Compatibility mappings of TUNE_PKGARCH (opt in) | 139 | # Compatibility mappings of TUNE_PKGARCH (opt in) |
@@ -205,69 +204,69 @@ def preferred_ml_updates(d): | |||
205 | 204 | ||
206 | 205 | ||
207 | def get_layers_branch_rev(d): | 206 | def get_layers_branch_rev(d): |
208 | layers = (d.getVar("BBLAYERS", True) or "").split() | 207 | layers = (d.getVar("BBLAYERS", True) or "").split() |
209 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ | 208 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ |
210 | base_get_metadata_git_branch(i, None).strip(), \ | 209 | base_get_metadata_git_branch(i, None).strip(), \ |
211 | base_get_metadata_git_revision(i, None)) \ | 210 | base_get_metadata_git_revision(i, None)) \ |
212 | for i in layers] | 211 | for i in layers] |
213 | i = len(layers_branch_rev)-1 | 212 | i = len(layers_branch_rev)-1 |
214 | p1 = layers_branch_rev[i].find("=") | 213 | p1 = layers_branch_rev[i].find("=") |
215 | s1 = layers_branch_rev[i][p1:] | 214 | s1 = layers_branch_rev[i][p1:] |
216 | while i > 0: | 215 | while i > 0: |
217 | p2 = layers_branch_rev[i-1].find("=") | 216 | p2 = layers_branch_rev[i-1].find("=") |
218 | s2= layers_branch_rev[i-1][p2:] | 217 | s2= layers_branch_rev[i-1][p2:] |
219 | if s1 == s2: | 218 | if s1 == s2: |
220 | layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] | 219 | layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] |
221 | i -= 1 | 220 | i -= 1 |
222 | else: | 221 | else: |
223 | i -= 1 | 222 | i -= 1 |
224 | p1 = layers_branch_rev[i].find("=") | 223 | p1 = layers_branch_rev[i].find("=") |
225 | s1= layers_branch_rev[i][p1:] | 224 | s1= layers_branch_rev[i][p1:] |
226 | return layers_branch_rev | 225 | return layers_branch_rev |
227 | 226 | ||
228 | 227 | ||
229 | BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" | 228 | BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" |
230 | BUILDCFG_FUNCS[type] = "list" | 229 | BUILDCFG_FUNCS[type] = "list" |
231 | 230 | ||
232 | def buildcfg_vars(d): | 231 | def buildcfg_vars(d): |
233 | statusvars = oe.data.typed_value('BUILDCFG_VARS', d) | 232 | statusvars = oe.data.typed_value('BUILDCFG_VARS', d) |
234 | for var in statusvars: | 233 | for var in statusvars: |
235 | value = d.getVar(var, True) | 234 | value = d.getVar(var, True) |
236 | if value is not None: | 235 | if value is not None: |
237 | yield '%-17s = "%s"' % (var, value) | 236 | yield '%-17s = "%s"' % (var, value) |
238 | 237 | ||
239 | def buildcfg_neededvars(d): | 238 | def buildcfg_neededvars(d): |
240 | needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) | 239 | needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) |
241 | pesteruser = [] | 240 | pesteruser = [] |
242 | for v in needed_vars: | 241 | for v in needed_vars: |
243 | val = d.getVar(v, True) | 242 | val = d.getVar(v, True) |
244 | if not val or val == 'INVALID': | 243 | if not val or val == 'INVALID': |
245 | pesteruser.append(v) | 244 | pesteruser.append(v) |
246 | 245 | ||
247 | if pesteruser: | 246 | if pesteruser: |
248 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | 247 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) |
249 | 248 | ||
250 | addhandler base_eventhandler | 249 | addhandler base_eventhandler |
251 | python base_eventhandler() { | 250 | python base_eventhandler() { |
252 | if isinstance(e, bb.event.ConfigParsed): | 251 | if isinstance(e, bb.event.ConfigParsed): |
253 | e.data.setVar('BB_VERSION', bb.__version__) | 252 | e.data.setVar('BB_VERSION', bb.__version__) |
254 | generate_git_config(e) | 253 | generate_git_config(e) |
255 | pkgarch_mapping(e.data) | 254 | pkgarch_mapping(e.data) |
256 | preferred_ml_updates(e.data) | 255 | preferred_ml_updates(e.data) |
257 | 256 | ||
258 | if isinstance(e, bb.event.BuildStarted): | 257 | if isinstance(e, bb.event.BuildStarted): |
259 | statuslines = [] | 258 | statuslines = [] |
260 | for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data): | 259 | for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data): |
261 | g = globals() | 260 | g = globals() |
262 | if func not in g: | 261 | if func not in g: |
263 | bb.warn("Build configuration function '%s' does not exist" % func) | 262 | bb.warn("Build configuration function '%s' does not exist" % func) |
264 | else: | 263 | else: |
265 | flines = g[func](e.data) | 264 | flines = g[func](e.data) |
266 | if flines: | 265 | if flines: |
267 | statuslines.extend(flines) | 266 | statuslines.extend(flines) |
268 | 267 | ||
269 | statusheader = e.data.getVar('BUILDCFG_HEADER', True) | 268 | statusheader = e.data.getVar('BUILDCFG_HEADER', True) |
270 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | 269 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) |
271 | } | 270 | } |
272 | 271 | ||
273 | addtask configure after do_patch | 272 | addtask configure after do_patch |
@@ -546,18 +545,18 @@ python do_cleansstate() { | |||
546 | 545 | ||
547 | addtask cleanall after do_cleansstate | 546 | addtask cleanall after do_cleansstate |
548 | python do_cleanall() { | 547 | python do_cleanall() { |
549 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 548 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
550 | if len(src_uri) == 0: | 549 | if len(src_uri) == 0: |
551 | return | 550 | return |
552 | 551 | ||
553 | localdata = bb.data.createCopy(d) | 552 | localdata = bb.data.createCopy(d) |
554 | bb.data.update_data(localdata) | 553 | bb.data.update_data(localdata) |
555 | 554 | ||
556 | try: | 555 | try: |
557 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | 556 | fetcher = bb.fetch2.Fetch(src_uri, localdata) |
558 | fetcher.clean() | 557 | fetcher.clean() |
559 | except bb.fetch2.BBFetchException, e: | 558 | except bb.fetch2.BBFetchException, e: |
560 | raise bb.build.FuncFailed(e) | 559 | raise bb.build.FuncFailed(e) |
561 | } | 560 | } |
562 | do_cleanall[nostamp] = "1" | 561 | do_cleanall[nostamp] = "1" |
563 | 562 | ||