diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-07-11 17:33:43 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-07-19 10:24:50 +0100 |
commit | bfd279de3275abbfaf3e630383ec244131e0375f (patch) | |
tree | 0d1c90461a890d21444f5d2afb13c52b302427f1 /meta/classes | |
parent | 99203edda6f0b09d817454d656c100b7a8806b18 (diff) | |
download | poky-bfd279de3275abbfaf3e630383ec244131e0375f.tar.gz |
Convert tab indentation in python functions into four-space
(From OE-Core rev: 604d46c686d06d62d5a07b9c7f4fa170f99307d8)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes')
35 files changed, 3647 insertions, 3648 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass index 02b984db63..4c4bf8775d 100644 --- a/meta/classes/autotools.bbclass +++ b/meta/classes/autotools.bbclass | |||
@@ -1,23 +1,23 @@ | |||
1 | def autotools_dep_prepend(d): | 1 | def autotools_dep_prepend(d): |
2 | if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): | 2 | if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): |
3 | return '' | 3 | return '' |
4 | 4 | ||
5 | pn = d.getVar('PN', True) | 5 | pn = d.getVar('PN', True) |
6 | deps = '' | 6 | deps = '' |
7 | 7 | ||
8 | if pn in ['autoconf-native', 'automake-native', 'help2man-native']: | 8 | if pn in ['autoconf-native', 'automake-native', 'help2man-native']: |
9 | return deps | 9 | return deps |
10 | deps += 'autoconf-native automake-native ' | 10 | deps += 'autoconf-native automake-native ' |
11 | 11 | ||
12 | if not pn in ['libtool', 'libtool-native'] and not pn.endswith("libtool-cross"): | 12 | if not pn in ['libtool', 'libtool-native'] and not pn.endswith("libtool-cross"): |
13 | deps += 'libtool-native ' | 13 | deps += 'libtool-native ' |
14 | if not bb.data.inherits_class('native', d) \ | 14 | if not bb.data.inherits_class('native', d) \ |
15 | and not bb.data.inherits_class('nativesdk', d) \ | 15 | and not bb.data.inherits_class('nativesdk', d) \ |
16 | and not bb.data.inherits_class('cross', d) \ | 16 | and not bb.data.inherits_class('cross', d) \ |
17 | and not d.getVar('INHIBIT_DEFAULT_DEPS', True): | 17 | and not d.getVar('INHIBIT_DEFAULT_DEPS', True): |
18 | deps += 'libtool-cross ' | 18 | deps += 'libtool-cross ' |
19 | 19 | ||
20 | return deps + 'gnu-config-native ' | 20 | return deps + 'gnu-config-native ' |
21 | 21 | ||
22 | EXTRA_OEMAKE = "" | 22 | EXTRA_OEMAKE = "" |
23 | 23 | ||
@@ -35,15 +35,15 @@ EXTRA_AUTORECONF = "--exclude=autopoint" | |||
35 | export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}" | 35 | export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}" |
36 | 36 | ||
37 | def autotools_set_crosscompiling(d): | 37 | def autotools_set_crosscompiling(d): |
38 | if not bb.data.inherits_class('native', d): | 38 | if not bb.data.inherits_class('native', d): |
39 | return " cross_compiling=yes" | 39 | return " cross_compiling=yes" |
40 | return "" | 40 | return "" |
41 | 41 | ||
42 | def append_libtool_sysroot(d): | 42 | def append_libtool_sysroot(d): |
43 | # Only supply libtool sysroot option for non-native packages | 43 | # Only supply libtool sysroot option for non-native packages |
44 | if not bb.data.inherits_class('native', d): | 44 | if not bb.data.inherits_class('native', d): |
45 | return '--with-libtool-sysroot=${STAGING_DIR_HOST}' | 45 | return '--with-libtool-sysroot=${STAGING_DIR_HOST}' |
46 | return "" | 46 | return "" |
47 | 47 | ||
48 | # EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}" | 48 | # EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}" |
49 | 49 | ||
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index f69179943b..f3587bcbef 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -33,7 +33,7 @@ def oe_import(d): | |||
33 | 33 | ||
34 | python oe_import_eh () { | 34 | python oe_import_eh () { |
35 | if isinstance(e, bb.event.ConfigParsed): | 35 | if isinstance(e, bb.event.ConfigParsed): |
36 | oe_import(e.data) | 36 | oe_import(e.data) |
37 | } | 37 | } |
38 | 38 | ||
39 | addhandler oe_import_eh | 39 | addhandler oe_import_eh |
@@ -50,21 +50,20 @@ oe_runmake() { | |||
50 | 50 | ||
51 | 51 | ||
52 | def base_dep_prepend(d): | 52 | def base_dep_prepend(d): |
53 | # | 53 | # |
54 | # Ideally this will check a flag so we will operate properly in | 54 | # Ideally this will check a flag so we will operate properly in |
55 | # the case where host == build == target, for now we don't work in | 55 | # the case where host == build == target, for now we don't work in |
56 | # that case though. | 56 | # that case though. |
57 | # | 57 | # |
58 | 58 | ||
59 | deps = "" | 59 | deps = "" |
60 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | 60 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not |
61 | # we need that built is the responsibility of the patch function / class, not | 61 | # we need that built is the responsibility of the patch function / class, not |
62 | # the application. | 62 | # the application. |
63 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): | 63 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): |
64 | if (d.getVar('HOST_SYS', True) != | 64 | if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): |
65 | d.getVar('BUILD_SYS', True)): | 65 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " |
66 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " | 66 | return deps |
67 | return deps | ||
68 | 67 | ||
69 | BASEDEPENDS = "${@base_dep_prepend(d)}" | 68 | BASEDEPENDS = "${@base_dep_prepend(d)}" |
70 | 69 | ||
@@ -80,61 +79,61 @@ do_fetch[dirs] = "${DL_DIR}" | |||
80 | do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" | 79 | do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" |
81 | python base_do_fetch() { | 80 | python base_do_fetch() { |
82 | 81 | ||
83 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 82 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
84 | if len(src_uri) == 0: | 83 | if len(src_uri) == 0: |
85 | return | 84 | return |
86 | 85 | ||
87 | localdata = bb.data.createCopy(d) | 86 | localdata = bb.data.createCopy(d) |
88 | bb.data.update_data(localdata) | 87 | bb.data.update_data(localdata) |
89 | 88 | ||
90 | try: | 89 | try: |
91 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | 90 | fetcher = bb.fetch2.Fetch(src_uri, localdata) |
92 | fetcher.download() | 91 | fetcher.download() |
93 | except bb.fetch2.BBFetchException, e: | 92 | except bb.fetch2.BBFetchException, e: |
94 | raise bb.build.FuncFailed(e) | 93 | raise bb.build.FuncFailed(e) |
95 | } | 94 | } |
96 | 95 | ||
97 | addtask unpack after do_fetch | 96 | addtask unpack after do_fetch |
98 | do_unpack[dirs] = "${WORKDIR}" | 97 | do_unpack[dirs] = "${WORKDIR}" |
99 | do_unpack[cleandirs] = "${S}/patches" | 98 | do_unpack[cleandirs] = "${S}/patches" |
100 | python base_do_unpack() { | 99 | python base_do_unpack() { |
101 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 100 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
102 | if len(src_uri) == 0: | 101 | if len(src_uri) == 0: |
103 | return | 102 | return |
104 | 103 | ||
105 | localdata = bb.data.createCopy(d) | 104 | localdata = bb.data.createCopy(d) |
106 | bb.data.update_data(localdata) | 105 | bb.data.update_data(localdata) |
107 | 106 | ||
108 | rootdir = localdata.getVar('WORKDIR', True) | 107 | rootdir = localdata.getVar('WORKDIR', True) |
109 | 108 | ||
110 | try: | 109 | try: |
111 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | 110 | fetcher = bb.fetch2.Fetch(src_uri, localdata) |
112 | fetcher.unpack(rootdir) | 111 | fetcher.unpack(rootdir) |
113 | except bb.fetch2.BBFetchException, e: | 112 | except bb.fetch2.BBFetchException, e: |
114 | raise bb.build.FuncFailed(e) | 113 | raise bb.build.FuncFailed(e) |
115 | } | 114 | } |
116 | 115 | ||
117 | GIT_CONFIG_PATH = "${STAGING_DIR_NATIVE}/etc" | 116 | GIT_CONFIG_PATH = "${STAGING_DIR_NATIVE}/etc" |
118 | GIT_CONFIG = "${GIT_CONFIG_PATH}/gitconfig" | 117 | GIT_CONFIG = "${GIT_CONFIG_PATH}/gitconfig" |
119 | 118 | ||
120 | def generate_git_config(e): | 119 | def generate_git_config(e): |
121 | from bb import data | 120 | from bb import data |
122 | 121 | ||
123 | if data.getVar('GIT_CORE_CONFIG', e.data, True): | 122 | if data.getVar('GIT_CORE_CONFIG', e.data, True): |
124 | gitconfig_path = e.data.getVar('GIT_CONFIG', True) | 123 | gitconfig_path = e.data.getVar('GIT_CONFIG', True) |
125 | proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True) | 124 | proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True) |
126 | 125 | ||
127 | bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}")) | 126 | bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}")) |
128 | if (os.path.exists(gitconfig_path)): | 127 | if (os.path.exists(gitconfig_path)): |
129 | os.remove(gitconfig_path) | 128 | os.remove(gitconfig_path) |
130 | 129 | ||
131 | f = open(gitconfig_path, 'w') | 130 | f = open(gitconfig_path, 'w') |
132 | f.write("[core]\n") | 131 | f.write("[core]\n") |
133 | ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split() | 132 | ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split() |
134 | for ignore_host in ignore_hosts: | 133 | for ignore_host in ignore_hosts: |
135 | f.write(" gitProxy = none for %s\n" % ignore_host) | 134 | f.write(" gitProxy = none for %s\n" % ignore_host) |
136 | f.write(proxy_command) | 135 | f.write(proxy_command) |
137 | f.close | 136 | f.close |
138 | 137 | ||
139 | def pkgarch_mapping(d): | 138 | def pkgarch_mapping(d): |
140 | # Compatibility mappings of TUNE_PKGARCH (opt in) | 139 | # Compatibility mappings of TUNE_PKGARCH (opt in) |
@@ -205,69 +204,69 @@ def preferred_ml_updates(d): | |||
205 | 204 | ||
206 | 205 | ||
207 | def get_layers_branch_rev(d): | 206 | def get_layers_branch_rev(d): |
208 | layers = (d.getVar("BBLAYERS", True) or "").split() | 207 | layers = (d.getVar("BBLAYERS", True) or "").split() |
209 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ | 208 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ |
210 | base_get_metadata_git_branch(i, None).strip(), \ | 209 | base_get_metadata_git_branch(i, None).strip(), \ |
211 | base_get_metadata_git_revision(i, None)) \ | 210 | base_get_metadata_git_revision(i, None)) \ |
212 | for i in layers] | 211 | for i in layers] |
213 | i = len(layers_branch_rev)-1 | 212 | i = len(layers_branch_rev)-1 |
214 | p1 = layers_branch_rev[i].find("=") | 213 | p1 = layers_branch_rev[i].find("=") |
215 | s1 = layers_branch_rev[i][p1:] | 214 | s1 = layers_branch_rev[i][p1:] |
216 | while i > 0: | 215 | while i > 0: |
217 | p2 = layers_branch_rev[i-1].find("=") | 216 | p2 = layers_branch_rev[i-1].find("=") |
218 | s2= layers_branch_rev[i-1][p2:] | 217 | s2= layers_branch_rev[i-1][p2:] |
219 | if s1 == s2: | 218 | if s1 == s2: |
220 | layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] | 219 | layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] |
221 | i -= 1 | 220 | i -= 1 |
222 | else: | 221 | else: |
223 | i -= 1 | 222 | i -= 1 |
224 | p1 = layers_branch_rev[i].find("=") | 223 | p1 = layers_branch_rev[i].find("=") |
225 | s1= layers_branch_rev[i][p1:] | 224 | s1= layers_branch_rev[i][p1:] |
226 | return layers_branch_rev | 225 | return layers_branch_rev |
227 | 226 | ||
228 | 227 | ||
229 | BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" | 228 | BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" |
230 | BUILDCFG_FUNCS[type] = "list" | 229 | BUILDCFG_FUNCS[type] = "list" |
231 | 230 | ||
232 | def buildcfg_vars(d): | 231 | def buildcfg_vars(d): |
233 | statusvars = oe.data.typed_value('BUILDCFG_VARS', d) | 232 | statusvars = oe.data.typed_value('BUILDCFG_VARS', d) |
234 | for var in statusvars: | 233 | for var in statusvars: |
235 | value = d.getVar(var, True) | 234 | value = d.getVar(var, True) |
236 | if value is not None: | 235 | if value is not None: |
237 | yield '%-17s = "%s"' % (var, value) | 236 | yield '%-17s = "%s"' % (var, value) |
238 | 237 | ||
239 | def buildcfg_neededvars(d): | 238 | def buildcfg_neededvars(d): |
240 | needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) | 239 | needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) |
241 | pesteruser = [] | 240 | pesteruser = [] |
242 | for v in needed_vars: | 241 | for v in needed_vars: |
243 | val = d.getVar(v, True) | 242 | val = d.getVar(v, True) |
244 | if not val or val == 'INVALID': | 243 | if not val or val == 'INVALID': |
245 | pesteruser.append(v) | 244 | pesteruser.append(v) |
246 | 245 | ||
247 | if pesteruser: | 246 | if pesteruser: |
248 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | 247 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) |
249 | 248 | ||
250 | addhandler base_eventhandler | 249 | addhandler base_eventhandler |
251 | python base_eventhandler() { | 250 | python base_eventhandler() { |
252 | if isinstance(e, bb.event.ConfigParsed): | 251 | if isinstance(e, bb.event.ConfigParsed): |
253 | e.data.setVar('BB_VERSION', bb.__version__) | 252 | e.data.setVar('BB_VERSION', bb.__version__) |
254 | generate_git_config(e) | 253 | generate_git_config(e) |
255 | pkgarch_mapping(e.data) | 254 | pkgarch_mapping(e.data) |
256 | preferred_ml_updates(e.data) | 255 | preferred_ml_updates(e.data) |
257 | 256 | ||
258 | if isinstance(e, bb.event.BuildStarted): | 257 | if isinstance(e, bb.event.BuildStarted): |
259 | statuslines = [] | 258 | statuslines = [] |
260 | for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data): | 259 | for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data): |
261 | g = globals() | 260 | g = globals() |
262 | if func not in g: | 261 | if func not in g: |
263 | bb.warn("Build configuration function '%s' does not exist" % func) | 262 | bb.warn("Build configuration function '%s' does not exist" % func) |
264 | else: | 263 | else: |
265 | flines = g[func](e.data) | 264 | flines = g[func](e.data) |
266 | if flines: | 265 | if flines: |
267 | statuslines.extend(flines) | 266 | statuslines.extend(flines) |
268 | 267 | ||
269 | statusheader = e.data.getVar('BUILDCFG_HEADER', True) | 268 | statusheader = e.data.getVar('BUILDCFG_HEADER', True) |
270 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | 269 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) |
271 | } | 270 | } |
272 | 271 | ||
273 | addtask configure after do_patch | 272 | addtask configure after do_patch |
@@ -546,18 +545,18 @@ python do_cleansstate() { | |||
546 | 545 | ||
547 | addtask cleanall after do_cleansstate | 546 | addtask cleanall after do_cleansstate |
548 | python do_cleanall() { | 547 | python do_cleanall() { |
549 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 548 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
550 | if len(src_uri) == 0: | 549 | if len(src_uri) == 0: |
551 | return | 550 | return |
552 | 551 | ||
553 | localdata = bb.data.createCopy(d) | 552 | localdata = bb.data.createCopy(d) |
554 | bb.data.update_data(localdata) | 553 | bb.data.update_data(localdata) |
555 | 554 | ||
556 | try: | 555 | try: |
557 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | 556 | fetcher = bb.fetch2.Fetch(src_uri, localdata) |
558 | fetcher.clean() | 557 | fetcher.clean() |
559 | except bb.fetch2.BBFetchException, e: | 558 | except bb.fetch2.BBFetchException, e: |
560 | raise bb.build.FuncFailed(e) | 559 | raise bb.build.FuncFailed(e) |
561 | } | 560 | } |
562 | do_cleanall[nostamp] = "1" | 561 | do_cleanall[nostamp] = "1" |
563 | 562 | ||
diff --git a/meta/classes/boot-directdisk.bbclass b/meta/classes/boot-directdisk.bbclass index 1c601c60c3..d265485bb7 100644 --- a/meta/classes/boot-directdisk.bbclass +++ b/meta/classes/boot-directdisk.bbclass | |||
@@ -92,8 +92,8 @@ build_boot_dd() { | |||
92 | } | 92 | } |
93 | 93 | ||
94 | python do_bootdirectdisk() { | 94 | python do_bootdirectdisk() { |
95 | bb.build.exec_func('build_syslinux_cfg', d) | 95 | bb.build.exec_func('build_syslinux_cfg', d) |
96 | bb.build.exec_func('build_boot_dd', d) | 96 | bb.build.exec_func('build_boot_dd', d) |
97 | } | 97 | } |
98 | 98 | ||
99 | addtask bootdirectdisk before do_build | 99 | addtask bootdirectdisk before do_build |
diff --git a/meta/classes/bootimg.bbclass b/meta/classes/bootimg.bbclass index a4c0e8d931..11a29cdf2b 100644 --- a/meta/classes/bootimg.bbclass +++ b/meta/classes/bootimg.bbclass | |||
@@ -42,15 +42,15 @@ EFI_CLASS = "${@base_contains("MACHINE_FEATURES", "efi", "grub-efi", "dummy", d) | |||
42 | # contain "efi". This way legacy is supported by default if neither is | 42 | # contain "efi". This way legacy is supported by default if neither is |
43 | # specified, maintaining the original behavior. | 43 | # specified, maintaining the original behavior. |
44 | def pcbios(d): | 44 | def pcbios(d): |
45 | pcbios = base_contains("MACHINE_FEATURES", "pcbios", "1", "0", d) | 45 | pcbios = base_contains("MACHINE_FEATURES", "pcbios", "1", "0", d) |
46 | if pcbios == "0": | 46 | if pcbios == "0": |
47 | pcbios = base_contains("MACHINE_FEATURES", "efi", "0", "1", d) | 47 | pcbios = base_contains("MACHINE_FEATURES", "efi", "0", "1", d) |
48 | return pcbios | 48 | return pcbios |
49 | 49 | ||
50 | def pcbios_class(d): | 50 | def pcbios_class(d): |
51 | if d.getVar("PCBIOS", True) == "1": | 51 | if d.getVar("PCBIOS", True) == "1": |
52 | return "syslinux" | 52 | return "syslinux" |
53 | return "dummy" | 53 | return "dummy" |
54 | 54 | ||
55 | PCBIOS = "${@pcbios(d)}" | 55 | PCBIOS = "${@pcbios(d)}" |
56 | PCBIOS_CLASS = "${@pcbios_class(d)}" | 56 | PCBIOS_CLASS = "${@pcbios_class(d)}" |
@@ -181,12 +181,12 @@ build_hddimg() { | |||
181 | } | 181 | } |
182 | 182 | ||
183 | python do_bootimg() { | 183 | python do_bootimg() { |
184 | if d.getVar("PCBIOS", True) == "1": | 184 | if d.getVar("PCBIOS", True) == "1": |
185 | bb.build.exec_func('build_syslinux_cfg', d) | 185 | bb.build.exec_func('build_syslinux_cfg', d) |
186 | if d.getVar("EFI", True) == "1": | 186 | if d.getVar("EFI", True) == "1": |
187 | bb.build.exec_func('build_grub_cfg', d) | 187 | bb.build.exec_func('build_grub_cfg', d) |
188 | bb.build.exec_func('build_hddimg', d) | 188 | bb.build.exec_func('build_hddimg', d) |
189 | bb.build.exec_func('build_iso', d) | 189 | bb.build.exec_func('build_iso', d) |
190 | } | 190 | } |
191 | 191 | ||
192 | addtask bootimg before do_build | 192 | addtask bootimg before do_build |
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass index b4b7b81d8d..660c15f549 100644 --- a/meta/classes/cpan-base.bbclass +++ b/meta/classes/cpan-base.bbclass | |||
@@ -11,28 +11,28 @@ PERL_OWN_DIR = "${@["", "/perl-native"][(bb.data.inherits_class('native', d))]}" | |||
11 | 11 | ||
12 | # Determine the staged version of perl from the perl configuration file | 12 | # Determine the staged version of perl from the perl configuration file |
13 | def get_perl_version(d): | 13 | def get_perl_version(d): |
14 | import re | 14 | import re |
15 | cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh') | 15 | cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh') |
16 | try: | 16 | try: |
17 | f = open(cfg, 'r') | 17 | f = open(cfg, 'r') |
18 | except IOError: | 18 | except IOError: |
19 | return None | 19 | return None |
20 | l = f.readlines(); | 20 | l = f.readlines(); |
21 | f.close(); | 21 | f.close(); |
22 | r = re.compile("^version='(\d*\.\d*\.\d*)'") | 22 | r = re.compile("^version='(\d*\.\d*\.\d*)'") |
23 | for s in l: | 23 | for s in l: |
24 | m = r.match(s) | 24 | m = r.match(s) |
25 | if m: | 25 | if m: |
26 | return m.group(1) | 26 | return m.group(1) |
27 | return None | 27 | return None |
28 | 28 | ||
29 | # Determine where the library directories are | 29 | # Determine where the library directories are |
30 | def perl_get_libdirs(d): | 30 | def perl_get_libdirs(d): |
31 | libdir = d.getVar('libdir', True) | 31 | libdir = d.getVar('libdir', True) |
32 | if is_target(d) == "no": | 32 | if is_target(d) == "no": |
33 | libdir += '/perl-native' | 33 | libdir += '/perl-native' |
34 | libdir += '/perl' | 34 | libdir += '/perl' |
35 | return libdir | 35 | return libdir |
36 | 36 | ||
37 | def is_target(d): | 37 | def is_target(d): |
38 | if not bb.data.inherits_class('native', d): | 38 | if not bb.data.inherits_class('native', d): |
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass index bb4ae11eca..2484003e37 100644 --- a/meta/classes/debian.bbclass +++ b/meta/classes/debian.bbclass | |||
@@ -20,105 +20,105 @@ python () { | |||
20 | } | 20 | } |
21 | 21 | ||
22 | python debian_package_name_hook () { | 22 | python debian_package_name_hook () { |
23 | import glob, copy, stat, errno, re | 23 | import glob, copy, stat, errno, re |
24 | 24 | ||
25 | pkgdest = d.getVar('PKGDEST', True) | 25 | pkgdest = d.getVar('PKGDEST', True) |
26 | packages = d.getVar('PACKAGES', True) | 26 | packages = d.getVar('PACKAGES', True) |
27 | bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") | 27 | bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") |
28 | lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") | 28 | lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") |
29 | so_re = re.compile("lib.*\.so") | 29 | so_re = re.compile("lib.*\.so") |
30 | 30 | ||
31 | def socrunch(s): | 31 | def socrunch(s): |
32 | s = s.lower().replace('_', '-') | 32 | s = s.lower().replace('_', '-') |
33 | m = re.match("^(.*)(.)\.so\.(.*)$", s) | 33 | m = re.match("^(.*)(.)\.so\.(.*)$", s) |
34 | if m is None: | 34 | if m is None: |
35 | return None | 35 | return None |
36 | if m.group(2) in '0123456789': | 36 | if m.group(2) in '0123456789': |
37 | bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3)) | 37 | bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3)) |
38 | else: | 38 | else: |
39 | bin = m.group(1) + m.group(2) + m.group(3) | 39 | bin = m.group(1) + m.group(2) + m.group(3) |
40 | dev = m.group(1) + m.group(2) | 40 | dev = m.group(1) + m.group(2) |
41 | return (bin, dev) | 41 | return (bin, dev) |
42 | 42 | ||
43 | def isexec(path): | 43 | def isexec(path): |
44 | try: | 44 | try: |
45 | s = os.stat(path) | 45 | s = os.stat(path) |
46 | except (os.error, AttributeError): | 46 | except (os.error, AttributeError): |
47 | return 0 | 47 | return 0 |
48 | return (s[stat.ST_MODE] & stat.S_IEXEC) | 48 | return (s[stat.ST_MODE] & stat.S_IEXEC) |
49 | 49 | ||
50 | def auto_libname(packages, orig_pkg): | 50 | def auto_libname(packages, orig_pkg): |
51 | sonames = [] | 51 | sonames = [] |
52 | has_bins = 0 | 52 | has_bins = 0 |
53 | has_libs = 0 | 53 | has_libs = 0 |
54 | pkg_dir = os.path.join(pkgdest, orig_pkg) | 54 | pkg_dir = os.path.join(pkgdest, orig_pkg) |
55 | for root, dirs, files in os.walk(pkg_dir): | 55 | for root, dirs, files in os.walk(pkg_dir): |
56 | if bin_re.match(root) and files: | 56 | if bin_re.match(root) and files: |
57 | has_bins = 1 | 57 | has_bins = 1 |
58 | if lib_re.match(root) and files: | 58 | if lib_re.match(root) and files: |
59 | has_libs = 1 | 59 | has_libs = 1 |
60 | for f in files: | 60 | for f in files: |
61 | if so_re.match(f): | 61 | if so_re.match(f): |
62 | fp = os.path.join(root, f) | 62 | fp = os.path.join(root, f) |
63 | cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null" | 63 | cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null" |
64 | fd = os.popen(cmd) | 64 | fd = os.popen(cmd) |
65 | lines = fd.readlines() | 65 | lines = fd.readlines() |
66 | fd.close() | 66 | fd.close() |
67 | for l in lines: | 67 | for l in lines: |
68 | m = re.match("\s+SONAME\s+([^\s]*)", l) | 68 | m = re.match("\s+SONAME\s+([^\s]*)", l) |
69 | if m and not m.group(1) in sonames: | 69 | if m and not m.group(1) in sonames: |
70 | sonames.append(m.group(1)) | 70 | sonames.append(m.group(1)) |
71 | 71 | ||
72 | bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames)) | 72 | bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames)) |
73 | soname = None | 73 | soname = None |
74 | if len(sonames) == 1: | 74 | if len(sonames) == 1: |
75 | soname = sonames[0] | 75 | soname = sonames[0] |
76 | elif len(sonames) > 1: | 76 | elif len(sonames) > 1: |
77 | lead = d.getVar('LEAD_SONAME', True) | 77 | lead = d.getVar('LEAD_SONAME', True) |
78 | if lead: | 78 | if lead: |
79 | r = re.compile(lead) | 79 | r = re.compile(lead) |
80 | filtered = [] | 80 | filtered = [] |
81 | for s in sonames: | 81 | for s in sonames: |
82 | if r.match(s): | 82 | if r.match(s): |
83 | filtered.append(s) | 83 | filtered.append(s) |
84 | if len(filtered) == 1: | 84 | if len(filtered) == 1: |
85 | soname = filtered[0] | 85 | soname = filtered[0] |
86 | elif len(filtered) > 1: | 86 | elif len(filtered) > 1: |
87 | bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead)) | 87 | bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead)) |
88 | else: | 88 | else: |
89 | bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead)) | 89 | bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead)) |
90 | else: | 90 | else: |
91 | bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames)) | 91 | bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames)) |
92 | 92 | ||
93 | if has_libs and not has_bins and soname: | 93 | if has_libs and not has_bins and soname: |
94 | soname_result = socrunch(soname) | 94 | soname_result = socrunch(soname) |
95 | if soname_result: | 95 | if soname_result: |
96 | (pkgname, devname) = soname_result | 96 | (pkgname, devname) = soname_result |
97 | for pkg in packages.split(): | 97 | for pkg in packages.split(): |
98 | if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)): | 98 | if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)): |
99 | continue | 99 | continue |
100 | debian_pn = d.getVar('DEBIANNAME_' + pkg) | 100 | debian_pn = d.getVar('DEBIANNAME_' + pkg) |
101 | if debian_pn: | 101 | if debian_pn: |
102 | newpkg = debian_pn | 102 | newpkg = debian_pn |
103 | elif pkg == orig_pkg: | 103 | elif pkg == orig_pkg: |
104 | newpkg = pkgname | 104 | newpkg = pkgname |
105 | else: | 105 | else: |
106 | newpkg = pkg.replace(orig_pkg, devname, 1) | 106 | newpkg = pkg.replace(orig_pkg, devname, 1) |
107 | mlpre=d.getVar('MLPREFIX', True) | 107 | mlpre=d.getVar('MLPREFIX', True) |
108 | if mlpre: | 108 | if mlpre: |
109 | if not newpkg.find(mlpre) == 0: | 109 | if not newpkg.find(mlpre) == 0: |
110 | newpkg = mlpre + newpkg | 110 | newpkg = mlpre + newpkg |
111 | if newpkg != pkg: | 111 | if newpkg != pkg: |
112 | d.setVar('PKG_' + pkg, newpkg) | 112 | d.setVar('PKG_' + pkg, newpkg) |
113 | 113 | ||
114 | # reversed sort is needed when some package is substring of another | 114 | # reversed sort is needed when some package is substring of another |
115 | # ie in ncurses we get without reverse sort: | 115 | # ie in ncurses we get without reverse sort: |
116 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5 | 116 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5 |
117 | # and later | 117 | # and later |
118 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw | 118 | # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw |
119 | # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 | 119 | # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 |
120 | for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): | 120 | for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): |
121 | auto_libname(packages, pkg) | 121 | auto_libname(packages, pkg) |
122 | } | 122 | } |
123 | 123 | ||
124 | EXPORT_FUNCTIONS package_name_hook | 124 | EXPORT_FUNCTIONS package_name_hook |
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass index fb9f701b37..7a3ee3c28c 100644 --- a/meta/classes/gconf.bbclass +++ b/meta/classes/gconf.bbclass | |||
@@ -39,33 +39,33 @@ done | |||
39 | } | 39 | } |
40 | 40 | ||
41 | python populate_packages_append () { | 41 | python populate_packages_append () { |
42 | import re | 42 | import re |
43 | packages = d.getVar('PACKAGES', True).split() | 43 | packages = d.getVar('PACKAGES', True).split() |
44 | pkgdest = d.getVar('PKGDEST', True) | 44 | pkgdest = d.getVar('PKGDEST', True) |
45 | 45 | ||
46 | for pkg in packages: | 46 | for pkg in packages: |
47 | schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) | 47 | schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) |
48 | schemas = [] | 48 | schemas = [] |
49 | schema_re = re.compile(".*\.schemas$") | 49 | schema_re = re.compile(".*\.schemas$") |
50 | if os.path.exists(schema_dir): | 50 | if os.path.exists(schema_dir): |
51 | for f in os.listdir(schema_dir): | 51 | for f in os.listdir(schema_dir): |
52 | if schema_re.match(f): | 52 | if schema_re.match(f): |
53 | schemas.append(f) | 53 | schemas.append(f) |
54 | if schemas != []: | 54 | if schemas != []: |
55 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) | 55 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) |
56 | d.setVar('SCHEMA_FILES', " ".join(schemas)) | 56 | d.setVar('SCHEMA_FILES', " ".join(schemas)) |
57 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) | 57 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) |
58 | if not postinst: | 58 | if not postinst: |
59 | postinst = '#!/bin/sh\n' | 59 | postinst = '#!/bin/sh\n' |
60 | postinst += d.getVar('gconf_postinst', True) | 60 | postinst += d.getVar('gconf_postinst', True) |
61 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 61 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
62 | prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True) | 62 | prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True) |
63 | if not prerm: | 63 | if not prerm: |
64 | prerm = '#!/bin/sh\n' | 64 | prerm = '#!/bin/sh\n' |
65 | prerm += d.getVar('gconf_prerm', True) | 65 | prerm += d.getVar('gconf_prerm', True) |
66 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 66 | d.setVar('pkg_prerm_%s' % pkg, prerm) |
67 | rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" | 67 | rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" |
68 | rdepends += ' ' + d.getVar('MLPREFIX') + 'gconf' | 68 | rdepends += ' ' + d.getVar('MLPREFIX') + 'gconf' |
69 | d.setVar("RDEPENDS_%s" % pkg, rdepends) | 69 | d.setVar("RDEPENDS_%s" % pkg, rdepends) |
70 | 70 | ||
71 | } | 71 | } |
diff --git a/meta/classes/gnomebase.bbclass b/meta/classes/gnomebase.bbclass index 80b78be48d..19c7f7143d 100644 --- a/meta/classes/gnomebase.bbclass +++ b/meta/classes/gnomebase.bbclass | |||
@@ -1,7 +1,7 @@ | |||
1 | def gnome_verdir(v): | 1 | def gnome_verdir(v): |
2 | import re | 2 | import re |
3 | m = re.match("^([0-9]+)\.([0-9]+)", v) | 3 | m = re.match("^([0-9]+)\.([0-9]+)", v) |
4 | return "%s.%s" % (m.group(1), m.group(2)) | 4 | return "%s.%s" % (m.group(1), m.group(2)) |
5 | 5 | ||
6 | GNOME_COMPRESS_TYPE ?= "bz2" | 6 | GNOME_COMPRESS_TYPE ?= "bz2" |
7 | SECTION ?= "x11/gnome" | 7 | SECTION ?= "x11/gnome" |
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass index 60e3401f4b..01fb2f3946 100644 --- a/meta/classes/gtk-icon-cache.bbclass +++ b/meta/classes/gtk-icon-cache.bbclass | |||
@@ -28,31 +28,31 @@ done | |||
28 | } | 28 | } |
29 | 29 | ||
30 | python populate_packages_append () { | 30 | python populate_packages_append () { |
31 | packages = d.getVar('PACKAGES', True).split() | 31 | packages = d.getVar('PACKAGES', True).split() |
32 | pkgdest = d.getVar('PKGDEST', True) | 32 | pkgdest = d.getVar('PKGDEST', True) |
33 | 33 | ||
34 | for pkg in packages: | 34 | for pkg in packages: |
35 | icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) | 35 | icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) |
36 | if not os.path.exists(icon_dir): | 36 | if not os.path.exists(icon_dir): |
37 | continue | 37 | continue |
38 | 38 | ||
39 | bb.note("adding hicolor-icon-theme dependency to %s" % pkg) | 39 | bb.note("adding hicolor-icon-theme dependency to %s" % pkg) |
40 | rdepends = d.getVar('RDEPENDS_%s' % pkg, True) | 40 | rdepends = d.getVar('RDEPENDS_%s' % pkg, True) |
41 | rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme" | 41 | rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme" |
42 | d.setVar('RDEPENDS_%s' % pkg, rdepends) | 42 | d.setVar('RDEPENDS_%s' % pkg, rdepends) |
43 | 43 | ||
44 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) | 44 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) |
45 | 45 | ||
46 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) | 46 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) |
47 | if not postinst: | 47 | if not postinst: |
48 | postinst = '#!/bin/sh\n' | 48 | postinst = '#!/bin/sh\n' |
49 | postinst += d.getVar('gtk_icon_cache_postinst', True) | 49 | postinst += d.getVar('gtk_icon_cache_postinst', True) |
50 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 50 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
51 | 51 | ||
52 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) | 52 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) |
53 | if not postrm: | 53 | if not postrm: |
54 | postrm = '#!/bin/sh\n' | 54 | postrm = '#!/bin/sh\n' |
55 | postrm += d.getVar('gtk_icon_cache_postrm', True) | 55 | postrm += d.getVar('gtk_icon_cache_postrm', True) |
56 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 56 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
57 | } | 57 | } |
58 | 58 | ||
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass index f1b829fe18..1799bf1865 100644 --- a/meta/classes/image.bbclass +++ b/meta/classes/image.bbclass | |||
@@ -164,28 +164,28 @@ do_rootfs[umask] = "022" | |||
164 | 164 | ||
165 | fakeroot do_rootfs () { | 165 | fakeroot do_rootfs () { |
166 | #set -x | 166 | #set -x |
167 | # When use the rpm incremental image generation, don't remove the rootfs | 167 | # When use the rpm incremental image generation, don't remove the rootfs |
168 | if [ "${INC_RPM_IMAGE_GEN}" != "1" -o "${IMAGE_PKGTYPE}" != "rpm" ]; then | 168 | if [ "${INC_RPM_IMAGE_GEN}" != "1" -o "${IMAGE_PKGTYPE}" != "rpm" ]; then |
169 | rm -rf ${IMAGE_ROOTFS} | 169 | rm -rf ${IMAGE_ROOTFS} |
170 | elif [ -d ${T}/saved_rpmlib/var/lib/rpm ]; then | 170 | elif [ -d ${T}/saved_rpmlib/var/lib/rpm ]; then |
171 | # Move the rpmlib back | 171 | # Move the rpmlib back |
172 | if [ ! -d ${IMAGE_ROOTFS}/var/lib/rpm ]; then | 172 | if [ ! -d ${IMAGE_ROOTFS}/var/lib/rpm ]; then |
173 | mkdir -p ${IMAGE_ROOTFS}/var/lib/ | 173 | mkdir -p ${IMAGE_ROOTFS}/var/lib/ |
174 | mv ${T}/saved_rpmlib/var/lib/rpm ${IMAGE_ROOTFS}/var/lib/ | 174 | mv ${T}/saved_rpmlib/var/lib/rpm ${IMAGE_ROOTFS}/var/lib/ |
175 | fi | 175 | fi |
176 | fi | 176 | fi |
177 | rm -rf ${MULTILIB_TEMP_ROOTFS} | 177 | rm -rf ${MULTILIB_TEMP_ROOTFS} |
178 | mkdir -p ${IMAGE_ROOTFS} | 178 | mkdir -p ${IMAGE_ROOTFS} |
179 | mkdir -p ${DEPLOY_DIR_IMAGE} | 179 | mkdir -p ${DEPLOY_DIR_IMAGE} |
180 | 180 | ||
181 | cp ${COREBASE}/meta/files/deploydir_readme.txt ${DEPLOY_DIR_IMAGE}/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt || true | 181 | cp ${COREBASE}/meta/files/deploydir_readme.txt ${DEPLOY_DIR_IMAGE}/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt || true |
182 | 182 | ||
183 | # If "${IMAGE_ROOTFS}/dev" exists, then the device had been made by | 183 | # If "${IMAGE_ROOTFS}/dev" exists, then the device had been made by |
184 | # the previous build | 184 | # the previous build |
185 | if [ "${USE_DEVFS}" != "1" -a ! -r "${IMAGE_ROOTFS}/dev" ]; then | 185 | if [ "${USE_DEVFS}" != "1" -a ! -r "${IMAGE_ROOTFS}/dev" ]; then |
186 | for devtable in ${@get_devtable_list(d)}; do | 186 | for devtable in ${@get_devtable_list(d)}; do |
187 | # Always return ture since there maybe already one when use the | 187 | # Always return ture since there maybe already one when use the |
188 | # incremental image generation | 188 | # incremental image generation |
189 | makedevs -r ${IMAGE_ROOTFS} -D $devtable | 189 | makedevs -r ${IMAGE_ROOTFS} -D $devtable |
190 | done | 190 | done |
191 | fi | 191 | fi |
@@ -398,7 +398,7 @@ rootfs_trim_schemas () { | |||
398 | # Need this in case no files exist | 398 | # Need this in case no files exist |
399 | if [ -e $schema ]; then | 399 | if [ -e $schema ]; then |
400 | oe-trim-schemas $schema > $schema.new | 400 | oe-trim-schemas $schema > $schema.new |
401 | mv $schema.new $schema | 401 | mv $schema.new $schema |
402 | fi | 402 | fi |
403 | done | 403 | done |
404 | } | 404 | } |
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass index 727d8d6f8f..d286eeaea9 100644 --- a/meta/classes/image_types.bbclass +++ b/meta/classes/image_types.bbclass | |||
@@ -48,7 +48,7 @@ def get_imagecmds(d): | |||
48 | types.remove("live") | 48 | types.remove("live") |
49 | 49 | ||
50 | if d.getVar('IMAGE_LINK_NAME', True): | 50 | if d.getVar('IMAGE_LINK_NAME', True): |
51 | cmds += " rm -f ${DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME}.*" | 51 | cmds += "\trm -f ${DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME}.*" |
52 | 52 | ||
53 | for type in types: | 53 | for type in types: |
54 | ccmd = [] | 54 | ccmd = [] |
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass index 4a30192c15..6446504845 100644 --- a/meta/classes/kernel-arch.bbclass +++ b/meta/classes/kernel-arch.bbclass | |||
@@ -7,38 +7,38 @@ | |||
7 | valid_archs = "alpha cris ia64 \ | 7 | valid_archs = "alpha cris ia64 \ |
8 | i386 x86 \ | 8 | i386 x86 \ |
9 | m68knommu m68k ppc powerpc powerpc64 ppc64 \ | 9 | m68knommu m68k ppc powerpc powerpc64 ppc64 \ |
10 | sparc sparc64 \ | 10 | sparc sparc64 \ |
11 | arm \ | 11 | arm \ |
12 | m32r mips \ | 12 | m32r mips \ |
13 | sh sh64 um h8300 \ | 13 | sh sh64 um h8300 \ |
14 | parisc s390 v850 \ | 14 | parisc s390 v850 \ |
15 | avr32 blackfin \ | 15 | avr32 blackfin \ |
16 | microblaze" | 16 | microblaze" |
17 | 17 | ||
18 | def map_kernel_arch(a, d): | 18 | def map_kernel_arch(a, d): |
19 | import re | 19 | import re |
20 | 20 | ||
21 | valid_archs = d.getVar('valid_archs', True).split() | 21 | valid_archs = d.getVar('valid_archs', True).split() |
22 | 22 | ||
23 | if re.match('(i.86|athlon|x86.64)$', a): return 'x86' | 23 | if re.match('(i.86|athlon|x86.64)$', a): return 'x86' |
24 | elif re.match('armeb$', a): return 'arm' | 24 | elif re.match('armeb$', a): return 'arm' |
25 | elif re.match('mips(el|64|64el)$', a): return 'mips' | 25 | elif re.match('mips(el|64|64el)$', a): return 'mips' |
26 | elif re.match('p(pc|owerpc)(|64)', a): return 'powerpc' | 26 | elif re.match('p(pc|owerpc)(|64)', a): return 'powerpc' |
27 | elif re.match('sh(3|4)$', a): return 'sh' | 27 | elif re.match('sh(3|4)$', a): return 'sh' |
28 | elif re.match('bfin', a): return 'blackfin' | 28 | elif re.match('bfin', a): return 'blackfin' |
29 | elif re.match('microblazeel', a): return 'microblaze' | 29 | elif re.match('microblazeel', a): return 'microblaze' |
30 | elif a in valid_archs: return a | 30 | elif a in valid_archs: return a |
31 | else: | 31 | else: |
32 | bb.error("cannot map '%s' to a linux kernel architecture" % a) | 32 | bb.error("cannot map '%s' to a linux kernel architecture" % a) |
33 | 33 | ||
34 | export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}" | 34 | export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}" |
35 | 35 | ||
36 | def map_uboot_arch(a, d): | 36 | def map_uboot_arch(a, d): |
37 | import re | 37 | import re |
38 | 38 | ||
39 | if re.match('p(pc|owerpc)(|64)', a): return 'ppc' | 39 | if re.match('p(pc|owerpc)(|64)', a): return 'ppc' |
40 | elif re.match('i.86$', a): return 'x86' | 40 | elif re.match('i.86$', a): return 'x86' |
41 | return a | 41 | return a |
42 | 42 | ||
43 | export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}" | 43 | export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}" |
44 | 44 | ||
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass index 48c8974257..ab59fc8f13 100644 --- a/meta/classes/kernel-yocto.bbclass +++ b/meta/classes/kernel-yocto.bbclass | |||
@@ -6,41 +6,41 @@ SRCTREECOVEREDTASKS += "do_kernel_link_vmlinux do_kernel_configme do_validate_br | |||
6 | # returns local (absolute) path names for all valid patches in the | 6 | # returns local (absolute) path names for all valid patches in the |
7 | # src_uri | 7 | # src_uri |
8 | def find_patches(d): | 8 | def find_patches(d): |
9 | patches=src_patches(d) | 9 | patches = src_patches(d) |
10 | patch_list=[] | 10 | patch_list=[] |
11 | for p in patches: | 11 | for p in patches: |
12 | _, _, local, _, _, _ = bb.decodeurl(p) | 12 | _, _, local, _, _, _ = bb.decodeurl(p) |
13 | patch_list.append(local) | 13 | patch_list.append(local) |
14 | 14 | ||
15 | return patch_list | 15 | return patch_list |
16 | 16 | ||
17 | # returns all the elements from the src uri that are .scc files | 17 | # returns all the elements from the src uri that are .scc files |
18 | def find_sccs(d): | 18 | def find_sccs(d): |
19 | sources=src_patches(d, True) | 19 | sources=src_patches(d, True) |
20 | sources_list=[] | 20 | sources_list=[] |
21 | for s in sources: | 21 | for s in sources: |
22 | base, ext = os.path.splitext(os.path.basename(s)) | 22 | base, ext = os.path.splitext(os.path.basename(s)) |
23 | if ext and ext in ('.scc' '.cfg'): | 23 | if ext and ext in ('.scc' '.cfg'): |
24 | sources_list.append(s) | 24 | sources_list.append(s) |
25 | elif base and base in 'defconfig': | 25 | elif base and base in 'defconfig': |
26 | sources_list.append(s) | 26 | sources_list.append(s) |
27 | 27 | ||
28 | return sources_list | 28 | return sources_list |
29 | 29 | ||
30 | # this is different from find_patches, in that it returns a colon separated | 30 | # this is different from find_patches, in that it returns a colon separated |
31 | # list of <patches>:<subdir> instead of just a list of patches | 31 | # list of <patches>:<subdir> instead of just a list of patches |
32 | def find_urls(d): | 32 | def find_urls(d): |
33 | patches=src_patches(d) | 33 | patches=src_patches(d) |
34 | fetch = bb.fetch2.Fetch([], d) | 34 | fetch = bb.fetch2.Fetch([], d) |
35 | patch_list=[] | 35 | patch_list=[] |
36 | for p in patches: | 36 | for p in patches: |
37 | _, _, local, _, _, _ = bb.decodeurl(p) | 37 | _, _, local, _, _, _ = bb.decodeurl(p) |
38 | for url in fetch.urls: | 38 | for url in fetch.urls: |
39 | urldata = fetch.ud[url] | 39 | urldata = fetch.ud[url] |
40 | if urldata.localpath == local: | 40 | if urldata.localpath == local: |
41 | patch_list.append(local+':'+urldata.path) | 41 | patch_list.append(local+':'+urldata.path) |
42 | 42 | ||
43 | return patch_list | 43 | return patch_list |
44 | 44 | ||
45 | 45 | ||
46 | do_patch() { | 46 | do_patch() { |
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass index fd744e7ea3..08b5e61fbe 100644 --- a/meta/classes/kernel.bbclass +++ b/meta/classes/kernel.bbclass | |||
@@ -310,177 +310,177 @@ module_conf_sco = "alias bt-proto-2 sco" | |||
310 | module_conf_rfcomm = "alias bt-proto-3 rfcomm" | 310 | module_conf_rfcomm = "alias bt-proto-3 rfcomm" |
311 | 311 | ||
312 | python populate_packages_prepend () { | 312 | python populate_packages_prepend () { |
313 | def extract_modinfo(file): | 313 | def extract_modinfo(file): |
314 | import tempfile, re, subprocess | 314 | import tempfile, re, subprocess |
315 | tempfile.tempdir = d.getVar("WORKDIR", True) | 315 | tempfile.tempdir = d.getVar("WORKDIR", True) |
316 | tf = tempfile.mkstemp() | 316 | tf = tempfile.mkstemp() |
317 | tmpfile = tf[1] | 317 | tmpfile = tf[1] |
318 | cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile) | 318 | cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile) |
319 | subprocess.call(cmd, shell=True) | 319 | subprocess.call(cmd, shell=True) |
320 | f = open(tmpfile) | 320 | f = open(tmpfile) |
321 | l = f.read().split("\000") | 321 | l = f.read().split("\000") |
322 | f.close() | 322 | f.close() |
323 | os.close(tf[0]) | 323 | os.close(tf[0]) |
324 | os.unlink(tmpfile) | 324 | os.unlink(tmpfile) |
325 | exp = re.compile("([^=]+)=(.*)") | 325 | exp = re.compile("([^=]+)=(.*)") |
326 | vals = {} | 326 | vals = {} |
327 | for i in l: | 327 | for i in l: |
328 | m = exp.match(i) | 328 | m = exp.match(i) |
329 | if not m: | 329 | if not m: |
330 | continue | 330 | continue |
331 | vals[m.group(1)] = m.group(2) | 331 | vals[m.group(1)] = m.group(2) |
332 | return vals | 332 | return vals |
333 | 333 | ||
334 | def parse_depmod(): | 334 | def parse_depmod(): |
335 | import re | 335 | import re |
336 | 336 | ||
337 | dvar = d.getVar('PKGD', True) | 337 | dvar = d.getVar('PKGD', True) |
338 | if not dvar: | 338 | if not dvar: |
339 | bb.error("PKGD not defined") | 339 | bb.error("PKGD not defined") |
340 | return | 340 | return |
341 | 341 | ||
342 | kernelver = d.getVar('KERNEL_VERSION', True) | 342 | kernelver = d.getVar('KERNEL_VERSION', True) |
343 | kernelver_stripped = kernelver | 343 | kernelver_stripped = kernelver |
344 | m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) | 344 | m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) |
345 | if m: | 345 | if m: |
346 | kernelver_stripped = m.group(1) | 346 | kernelver_stripped = m.group(1) |
347 | path = d.getVar("PATH", True) | 347 | path = d.getVar("PATH", True) |
348 | 348 | ||
349 | cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped) | 349 | cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped) |
350 | f = os.popen(cmd, 'r') | 350 | f = os.popen(cmd, 'r') |
351 | 351 | ||
352 | deps = {} | 352 | deps = {} |
353 | pattern0 = "^(.*\.k?o):..*$" | 353 | pattern0 = "^(.*\.k?o):..*$" |
354 | pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$" | 354 | pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$" |
355 | pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$" | 355 | pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$" |
356 | pattern3 = "^\t(.*\.k?o)\s*\\\$" | 356 | pattern3 = "^\t(.*\.k?o)\s*\\\$" |
357 | pattern4 = "^\t(.*\.k?o)\s*$" | 357 | pattern4 = "^\t(.*\.k?o)\s*$" |
358 | 358 | ||
359 | line = f.readline() | 359 | line = f.readline() |
360 | while line: | 360 | while line: |
361 | if not re.match(pattern0, line): | 361 | if not re.match(pattern0, line): |
362 | line = f.readline() | 362 | line = f.readline() |
363 | continue | 363 | continue |
364 | m1 = re.match(pattern1, line) | 364 | m1 = re.match(pattern1, line) |
365 | if m1: | 365 | if m1: |
366 | deps[m1.group(1)] = m1.group(2).split() | 366 | deps[m1.group(1)] = m1.group(2).split() |
367 | else: | 367 | else: |
368 | m2 = re.match(pattern2, line) | 368 | m2 = re.match(pattern2, line) |
369 | if m2: | 369 | if m2: |
370 | deps[m2.group(1)] = m2.group(2).split() | 370 | deps[m2.group(1)] = m2.group(2).split() |
371 | line = f.readline() | 371 | line = f.readline() |
372 | m3 = re.match(pattern3, line) | 372 | m3 = re.match(pattern3, line) |
373 | while m3: | 373 | while m3: |
374 | deps[m2.group(1)].extend(m3.group(1).split()) | 374 | deps[m2.group(1)].extend(m3.group(1).split()) |
375 | line = f.readline() | 375 | line = f.readline() |
376 | m3 = re.match(pattern3, line) | 376 | m3 = re.match(pattern3, line) |
377 | m4 = re.match(pattern4, line) | 377 | m4 = re.match(pattern4, line) |
378 | deps[m2.group(1)].extend(m4.group(1).split()) | 378 | deps[m2.group(1)].extend(m4.group(1).split()) |
379 | line = f.readline() | 379 | line = f.readline() |
380 | f.close() | 380 | f.close() |
381 | return deps | 381 | return deps |
382 | 382 | ||
383 | def get_dependencies(file, pattern, format): | 383 | def get_dependencies(file, pattern, format): |
384 | # file no longer includes PKGD | 384 | # file no longer includes PKGD |
385 | file = file.replace(d.getVar('PKGD', True) or '', '', 1) | 385 | file = file.replace(d.getVar('PKGD', True) or '', '', 1) |
386 | # instead is prefixed with /lib/modules/${KERNEL_VERSION} | 386 | # instead is prefixed with /lib/modules/${KERNEL_VERSION} |
387 | file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1) | 387 | file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1) |
388 | 388 | ||
389 | if module_deps.has_key(file): | 389 | if module_deps.has_key(file): |
390 | import re | 390 | import re |
391 | dependencies = [] | 391 | dependencies = [] |
392 | for i in module_deps[file]: | 392 | for i in module_deps[file]: |
393 | m = re.match(pattern, os.path.basename(i)) | 393 | m = re.match(pattern, os.path.basename(i)) |
394 | if not m: | 394 | if not m: |
395 | continue | 395 | continue |
396 | on = legitimize_package_name(m.group(1)) | 396 | on = legitimize_package_name(m.group(1)) |
397 | dependency_pkg = format % on | 397 | dependency_pkg = format % on |
398 | dependencies.append(dependency_pkg) | 398 | dependencies.append(dependency_pkg) |
399 | return dependencies | 399 | return dependencies |
400 | return [] | 400 | return [] |
401 | 401 | ||
402 | def frob_metadata(file, pkg, pattern, format, basename): | 402 | def frob_metadata(file, pkg, pattern, format, basename): |
403 | import re | 403 | import re |
404 | vals = extract_modinfo(file) | 404 | vals = extract_modinfo(file) |
405 | 405 | ||
406 | dvar = d.getVar('PKGD', True) | 406 | dvar = d.getVar('PKGD', True) |
407 | 407 | ||
408 | # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append | 408 | # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append |
409 | # appropriate modprobe commands to the postinst | 409 | # appropriate modprobe commands to the postinst |
410 | autoload = d.getVar('module_autoload_%s' % basename, True) | 410 | autoload = d.getVar('module_autoload_%s' % basename, True) |
411 | if autoload: | 411 | if autoload: |
412 | name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename) | 412 | name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename) |
413 | f = open(name, 'w') | 413 | f = open(name, 'w') |
414 | for m in autoload.split(): | 414 | for m in autoload.split(): |
415 | f.write('%s\n' % m) | 415 | f.write('%s\n' % m) |
416 | f.close() | 416 | f.close() |
417 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) | 417 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) |
418 | if not postinst: | 418 | if not postinst: |
419 | bb.fatal("pkg_postinst_%s not defined" % pkg) | 419 | bb.fatal("pkg_postinst_%s not defined" % pkg) |
420 | postinst += d.getVar('autoload_postinst_fragment', True) % autoload | 420 | postinst += d.getVar('autoload_postinst_fragment', True) % autoload |
421 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 421 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
422 | 422 | ||
423 | # Write out any modconf fragment | 423 | # Write out any modconf fragment |
424 | modconf = d.getVar('module_conf_%s' % basename, True) | 424 | modconf = d.getVar('module_conf_%s' % basename, True) |
425 | if modconf: | 425 | if modconf: |
426 | name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) | 426 | name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) |
427 | f = open(name, 'w') | 427 | f = open(name, 'w') |
428 | f.write("%s\n" % modconf) | 428 | f.write("%s\n" % modconf) |
429 | f.close() | 429 | f.close() |
430 | 430 | ||
431 | files = d.getVar('FILES_%s' % pkg, True) | 431 | files = d.getVar('FILES_%s' % pkg, True) |
432 | files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) | 432 | files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) |
433 | d.setVar('FILES_%s' % pkg, files) | 433 | d.setVar('FILES_%s' % pkg, files) |
434 | 434 | ||
435 | if vals.has_key("description"): | 435 | if vals.has_key("description"): |
436 | old_desc = d.getVar('DESCRIPTION_' + pkg, True) or "" | 436 | old_desc = d.getVar('DESCRIPTION_' + pkg, True) or "" |
437 | d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) | 437 | d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) |
438 | 438 | ||
439 | rdepends_str = d.getVar('RDEPENDS_' + pkg, True) | 439 | rdepends_str = d.getVar('RDEPENDS_' + pkg, True) |
440 | if rdepends_str: | 440 | if rdepends_str: |
441 | rdepends = rdepends_str.split() | 441 | rdepends = rdepends_str.split() |
442 | else: | 442 | else: |
443 | rdepends = [] | 443 | rdepends = [] |
444 | rdepends.extend(get_dependencies(file, pattern, format)) | 444 | rdepends.extend(get_dependencies(file, pattern, format)) |
445 | d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends)) | 445 | d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends)) |
446 | 446 | ||
447 | module_deps = parse_depmod() | 447 | module_deps = parse_depmod() |
448 | module_regex = '^(.*)\.k?o$' | 448 | module_regex = '^(.*)\.k?o$' |
449 | module_pattern = 'kernel-module-%s' | 449 | module_pattern = 'kernel-module-%s' |
450 | 450 | ||
451 | postinst = d.getVar('pkg_postinst_modules', True) | 451 | postinst = d.getVar('pkg_postinst_modules', True) |
452 | postrm = d.getVar('pkg_postrm_modules', True) | 452 | postrm = d.getVar('pkg_postrm_modules', True) |
453 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 453 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
454 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 454 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
455 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 455 | do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
456 | do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True)) | 456 | do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True)) |
457 | 457 | ||
458 | # If modules-load.d and modprobe.d are empty at this point, remove them to | 458 | # If modules-load.d and modprobe.d are empty at this point, remove them to |
459 | # avoid warnings. removedirs only raises an OSError if an empty | 459 | # avoid warnings. removedirs only raises an OSError if an empty |
460 | # directory cannot be removed. | 460 | # directory cannot be removed. |
461 | dvar = d.getVar('PKGD', True) | 461 | dvar = d.getVar('PKGD', True) |
462 | for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]: | 462 | for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]: |
463 | if len(os.listdir(dir)) == 0: | 463 | if len(os.listdir(dir)) == 0: |
464 | os.rmdir(dir) | 464 | os.rmdir(dir) |
465 | 465 | ||
466 | import re | 466 | import re |
467 | metapkg = "kernel-modules" | 467 | metapkg = "kernel-modules" |
468 | d.setVar('ALLOW_EMPTY_' + metapkg, "1") | 468 | d.setVar('ALLOW_EMPTY_' + metapkg, "1") |
469 | d.setVar('FILES_' + metapkg, "") | 469 | d.setVar('FILES_' + metapkg, "") |
470 | blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ] | 470 | blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ] |
471 | for l in module_deps.values(): | 471 | for l in module_deps.values(): |
472 | for i in l: | 472 | for i in l: |
473 | pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) | 473 | pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) |
474 | blacklist.append(pkg) | 474 | blacklist.append(pkg) |
475 | metapkg_rdepends = [] | 475 | metapkg_rdepends = [] |
476 | packages = d.getVar('PACKAGES', True).split() | 476 | packages = d.getVar('PACKAGES', True).split() |
477 | for pkg in packages[1:]: | 477 | for pkg in packages[1:]: |
478 | if not pkg in blacklist and not pkg in metapkg_rdepends: | 478 | if not pkg in blacklist and not pkg in metapkg_rdepends: |
479 | metapkg_rdepends.append(pkg) | 479 | metapkg_rdepends.append(pkg) |
480 | d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends)) | 480 | d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends)) |
481 | d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package') | 481 | d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package') |
482 | packages.append(metapkg) | 482 | packages.append(metapkg) |
483 | d.setVar('PACKAGES', ' '.join(packages)) | 483 | d.setVar('PACKAGES', ' '.join(packages)) |
484 | } | 484 | } |
485 | 485 | ||
486 | # Support checking the kernel size since some kernels need to reside in partitions | 486 | # Support checking the kernel size since some kernels need to reside in partitions |
diff --git a/meta/classes/libc-common.bbclass b/meta/classes/libc-common.bbclass index 8145d64e29..06d520164e 100644 --- a/meta/classes/libc-common.bbclass +++ b/meta/classes/libc-common.bbclass | |||
@@ -23,13 +23,13 @@ def get_libc_fpu_setting(bb, d): | |||
23 | return "" | 23 | return "" |
24 | 24 | ||
25 | python populate_packages_prepend () { | 25 | python populate_packages_prepend () { |
26 | if d.getVar('DEBIAN_NAMES', True): | 26 | if d.getVar('DEBIAN_NAMES', True): |
27 | bpn = d.getVar('BPN', True) | 27 | bpn = d.getVar('BPN', True) |
28 | d.setVar('PKG_'+bpn, 'libc6') | 28 | d.setVar('PKG_'+bpn, 'libc6') |
29 | d.setVar('PKG_'+bpn+'-dev', 'libc6-dev') | 29 | d.setVar('PKG_'+bpn+'-dev', 'libc6-dev') |
30 | d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg') | 30 | d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg') |
31 | # For backward compatibility with old -dbg package | 31 | # For backward compatibility with old -dbg package |
32 | d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg') | 32 | d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg') |
33 | d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg') | 33 | d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg') |
34 | d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg') | 34 | d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg') |
35 | } | 35 | } |
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass index 9df3c17116..e3214a68a2 100644 --- a/meta/classes/libc-package.bbclass +++ b/meta/classes/libc-package.bbclass | |||
@@ -28,10 +28,10 @@ python __anonymous () { | |||
28 | 28 | ||
29 | if r.match(target_arch): | 29 | if r.match(target_arch): |
30 | depends = d.getVar("DEPENDS", True) | 30 | depends = d.getVar("DEPENDS", True) |
31 | if use_cross_localedef == "1" : | 31 | if use_cross_localedef == "1" : |
32 | depends = "%s cross-localedef-native" % depends | 32 | depends = "%s cross-localedef-native" % depends |
33 | else: | 33 | else: |
34 | depends = "%s qemu-native" % depends | 34 | depends = "%s qemu-native" % depends |
35 | d.setVar("DEPENDS", depends) | 35 | d.setVar("DEPENDS", depends) |
36 | d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile") | 36 | d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile") |
37 | break | 37 | break |
@@ -118,270 +118,270 @@ do_collect_bins_from_locale_tree() { | |||
118 | inherit qemu | 118 | inherit qemu |
119 | 119 | ||
120 | python package_do_split_gconvs () { | 120 | python package_do_split_gconvs () { |
121 | import os, re | 121 | import os, re |
122 | if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): | 122 | if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): |
123 | bb.note("package requested not splitting gconvs") | 123 | bb.note("package requested not splitting gconvs") |
124 | return | 124 | return |
125 | 125 | ||
126 | if not d.getVar('PACKAGES', True): | 126 | if not d.getVar('PACKAGES', True): |
127 | return | 127 | return |
128 | 128 | ||
129 | mlprefix = d.getVar("MLPREFIX", True) or "" | 129 | mlprefix = d.getVar("MLPREFIX", True) or "" |
130 | 130 | ||
131 | bpn = d.getVar('BPN', True) | 131 | bpn = d.getVar('BPN', True) |
132 | libdir = d.getVar('libdir', True) | 132 | libdir = d.getVar('libdir', True) |
133 | if not libdir: | 133 | if not libdir: |
134 | bb.error("libdir not defined") | 134 | bb.error("libdir not defined") |
135 | return | 135 | return |
136 | datadir = d.getVar('datadir', True) | 136 | datadir = d.getVar('datadir', True) |
137 | if not datadir: | 137 | if not datadir: |
138 | bb.error("datadir not defined") | 138 | bb.error("datadir not defined") |
139 | return | 139 | return |
140 | 140 | ||
141 | gconv_libdir = base_path_join(libdir, "gconv") | 141 | gconv_libdir = base_path_join(libdir, "gconv") |
142 | charmap_dir = base_path_join(datadir, "i18n", "charmaps") | 142 | charmap_dir = base_path_join(datadir, "i18n", "charmaps") |
143 | locales_dir = base_path_join(datadir, "i18n", "locales") | 143 | locales_dir = base_path_join(datadir, "i18n", "locales") |
144 | binary_locales_dir = base_path_join(libdir, "locale") | 144 | binary_locales_dir = base_path_join(libdir, "locale") |
145 | 145 | ||
146 | def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group): | 146 | def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group): |
147 | deps = [] | 147 | deps = [] |
148 | f = open(fn, "r") | 148 | f = open(fn, "r") |
149 | c_re = re.compile('^copy "(.*)"') | 149 | c_re = re.compile('^copy "(.*)"') |
150 | i_re = re.compile('^include "(\w+)".*') | 150 | i_re = re.compile('^include "(\w+)".*') |
151 | for l in f.readlines(): | 151 | for l in f.readlines(): |
152 | m = c_re.match(l) or i_re.match(l) | 152 | m = c_re.match(l) or i_re.match(l) |
153 | if m: | 153 | if m: |
154 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) | 154 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) |
155 | if not dp in deps: | 155 | if not dp in deps: |
156 | deps.append(dp) | 156 | deps.append(dp) |
157 | f.close() | 157 | f.close() |
158 | if deps != []: | 158 | if deps != []: |
159 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) | 159 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) |
160 | if bpn != 'glibc': | 160 | if bpn != 'glibc': |
161 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) | 161 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) |
162 | 162 | ||
163 | do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ | 163 | do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ |
164 | description='gconv module for character set %s', hook=calc_gconv_deps, \ | 164 | description='gconv module for character set %s', hook=calc_gconv_deps, \ |
165 | extra_depends=bpn+'-gconv') | 165 | extra_depends=bpn+'-gconv') |
166 | 166 | ||
167 | def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group): | 167 | def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group): |
168 | deps = [] | 168 | deps = [] |
169 | f = open(fn, "r") | 169 | f = open(fn, "r") |
170 | c_re = re.compile('^copy "(.*)"') | 170 | c_re = re.compile('^copy "(.*)"') |
171 | i_re = re.compile('^include "(\w+)".*') | 171 | i_re = re.compile('^include "(\w+)".*') |
172 | for l in f.readlines(): | 172 | for l in f.readlines(): |
173 | m = c_re.match(l) or i_re.match(l) | 173 | m = c_re.match(l) or i_re.match(l) |
174 | if m: | 174 | if m: |
175 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) | 175 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) |
176 | if not dp in deps: | 176 | if not dp in deps: |
177 | deps.append(dp) | 177 | deps.append(dp) |
178 | f.close() | 178 | f.close() |
179 | if deps != []: | 179 | if deps != []: |
180 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) | 180 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) |
181 | if bpn != 'glibc': | 181 | if bpn != 'glibc': |
182 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) | 182 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) |
183 | 183 | ||
184 | do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ | 184 | do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ |
185 | description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') | 185 | description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') |
186 | 186 | ||
187 | def calc_locale_deps(fn, pkg, file_regex, output_pattern, group): | 187 | def calc_locale_deps(fn, pkg, file_regex, output_pattern, group): |
188 | deps = [] | 188 | deps = [] |
189 | f = open(fn, "r") | 189 | f = open(fn, "r") |
190 | c_re = re.compile('^copy "(.*)"') | 190 | c_re = re.compile('^copy "(.*)"') |
191 | i_re = re.compile('^include "(\w+)".*') | 191 | i_re = re.compile('^include "(\w+)".*') |
192 | for l in f.readlines(): | 192 | for l in f.readlines(): |
193 | m = c_re.match(l) or i_re.match(l) | 193 | m = c_re.match(l) or i_re.match(l) |
194 | if m: | 194 | if m: |
195 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) | 195 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) |
196 | if not dp in deps: | 196 | if not dp in deps: |
197 | deps.append(dp) | 197 | deps.append(dp) |
198 | f.close() | 198 | f.close() |
199 | if deps != []: | 199 | if deps != []: |
200 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) | 200 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) |
201 | if bpn != 'glibc': | 201 | if bpn != 'glibc': |
202 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) | 202 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) |
203 | 203 | ||
204 | do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ | 204 | do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ |
205 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') | 205 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') |
206 | d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv') | 206 | d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv') |
207 | 207 | ||
208 | use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) | 208 | use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) |
209 | 209 | ||
210 | dot_re = re.compile("(.*)\.(.*)") | 210 | dot_re = re.compile("(.*)\.(.*)") |
211 | 211 | ||
212 | # Read in supported locales and associated encodings | 212 | # Read in supported locales and associated encodings |
213 | supported = {} | 213 | supported = {} |
214 | with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f: | 214 | with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f: |
215 | for line in f.readlines(): | 215 | for line in f.readlines(): |
216 | try: | 216 | try: |
217 | locale, charset = line.rstrip().split() | 217 | locale, charset = line.rstrip().split() |
218 | except ValueError: | 218 | except ValueError: |
219 | continue | 219 | continue |
220 | supported[locale] = charset | 220 | supported[locale] = charset |
221 | 221 | ||
222 | # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales | 222 | # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales |
223 | to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True) | 223 | to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True) |
224 | if not to_generate or to_generate == 'all': | 224 | if not to_generate or to_generate == 'all': |
225 | to_generate = supported.keys() | 225 | to_generate = supported.keys() |
226 | else: | 226 | else: |
227 | to_generate = to_generate.split() | 227 | to_generate = to_generate.split() |
228 | for locale in to_generate: | 228 | for locale in to_generate: |
229 | if locale not in supported: | 229 | if locale not in supported: |
230 | if '.' in locale: | 230 | if '.' in locale: |
231 | charset = locale.split('.')[1] | 231 | charset = locale.split('.')[1] |
232 | else: | 232 | else: |
233 | charset = 'UTF-8' | 233 | charset = 'UTF-8' |
234 | bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset)) | 234 | bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset)) |
235 | supported[locale] = charset | 235 | supported[locale] = charset |
236 | 236 | ||
237 | def output_locale_source(name, pkgname, locale, encoding): | 237 | def output_locale_source(name, pkgname, locale, encoding): |
238 | d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ | 238 | d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ |
239 | (mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) | 239 | (mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) |
240 | d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ | 240 | d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ |
241 | % (locale, encoding, locale)) | 241 | % (locale, encoding, locale)) |
242 | d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ | 242 | d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ |
243 | (locale, encoding, locale)) | 243 | (locale, encoding, locale)) |
244 | 244 | ||
245 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): | 245 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): |
246 | m = re.match("(.*)\.(.*)", name) | 246 | m = re.match("(.*)\.(.*)", name) |
247 | if m: | 247 | if m: |
248 | libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-","")) | 248 | libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-","")) |
249 | else: | 249 | else: |
250 | libc_name = name | 250 | libc_name = name |
251 | d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ | 251 | d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ |
252 | % (mlprefix+bpn, libc_name))) | 252 | % (mlprefix+bpn, libc_name))) |
253 | 253 | ||
254 | commands = {} | 254 | commands = {} |
255 | 255 | ||
256 | def output_locale_binary(name, pkgname, locale, encoding): | 256 | def output_locale_binary(name, pkgname, locale, encoding): |
257 | treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") | 257 | treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") |
258 | ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) | 258 | ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) |
259 | path = d.getVar("PATH", True) | 259 | path = d.getVar("PATH", True) |
260 | i18npath = base_path_join(treedir, datadir, "i18n") | 260 | i18npath = base_path_join(treedir, datadir, "i18n") |
261 | gconvpath = base_path_join(treedir, "iconvdata") | 261 | gconvpath = base_path_join(treedir, "iconvdata") |
262 | outputpath = base_path_join(treedir, libdir, "locale") | 262 | outputpath = base_path_join(treedir, libdir, "locale") |
263 | 263 | ||
264 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" | 264 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" |
265 | if use_cross_localedef == "1": | 265 | if use_cross_localedef == "1": |
266 | target_arch = d.getVar('TARGET_ARCH', True) | 266 | target_arch = d.getVar('TARGET_ARCH', True) |
267 | locale_arch_options = { \ | 267 | locale_arch_options = { \ |
268 | "arm": " --uint32-align=4 --little-endian ", \ | 268 | "arm": " --uint32-align=4 --little-endian ", \ |
269 | "sh4": " --uint32-align=4 --big-endian ", \ | 269 | "sh4": " --uint32-align=4 --big-endian ", \ |
270 | "powerpc": " --uint32-align=4 --big-endian ", \ | 270 | "powerpc": " --uint32-align=4 --big-endian ", \ |
271 | "powerpc64": " --uint32-align=4 --big-endian ", \ | 271 | "powerpc64": " --uint32-align=4 --big-endian ", \ |
272 | "mips": " --uint32-align=4 --big-endian ", \ | 272 | "mips": " --uint32-align=4 --big-endian ", \ |
273 | "mips64": " --uint32-align=4 --big-endian ", \ | 273 | "mips64": " --uint32-align=4 --big-endian ", \ |
274 | "mipsel": " --uint32-align=4 --little-endian ", \ | 274 | "mipsel": " --uint32-align=4 --little-endian ", \ |
275 | "mips64el":" --uint32-align=4 --little-endian ", \ | 275 | "mips64el":" --uint32-align=4 --little-endian ", \ |
276 | "i586": " --uint32-align=4 --little-endian ", \ | 276 | "i586": " --uint32-align=4 --little-endian ", \ |
277 | "i686": " --uint32-align=4 --little-endian ", \ | 277 | "i686": " --uint32-align=4 --little-endian ", \ |
278 | "x86_64": " --uint32-align=4 --little-endian " } | 278 | "x86_64": " --uint32-align=4 --little-endian " } |
279 | 279 | ||
280 | if target_arch in locale_arch_options: | 280 | if target_arch in locale_arch_options: |
281 | localedef_opts = locale_arch_options[target_arch] | 281 | localedef_opts = locale_arch_options[target_arch] |
282 | else: | 282 | else: |
283 | bb.error("locale_arch_options not found for target_arch=" + target_arch) | 283 | bb.error("locale_arch_options not found for target_arch=" + target_arch) |
284 | raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options") | 284 | raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options") |
285 | 285 | ||
286 | localedef_opts += " --force --old-style --no-archive --prefix=%s \ | 286 | localedef_opts += " --force --old-style --no-archive --prefix=%s \ |
287 | --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \ | 287 | --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \ |
288 | % (treedir, treedir, datadir, locale, encoding, outputpath, name) | 288 | % (treedir, treedir, datadir, locale, encoding, outputpath, name) |
289 | 289 | ||
290 | cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \ | 290 | cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \ |
291 | (path, i18npath, gconvpath, localedef_opts) | 291 | (path, i18npath, gconvpath, localedef_opts) |
292 | else: # earlier slower qemu way | 292 | else: # earlier slower qemu way |
293 | qemu = qemu_target_binary(d) | 293 | qemu = qemu_target_binary(d) |
294 | localedef_opts = "--force --old-style --no-archive --prefix=%s \ | 294 | localedef_opts = "--force --old-style --no-archive --prefix=%s \ |
295 | --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ | 295 | --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ |
296 | % (treedir, datadir, locale, encoding, name) | 296 | % (treedir, datadir, locale, encoding, name) |
297 | 297 | ||
298 | qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) | 298 | qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) |
299 | if not qemu_options: | 299 | if not qemu_options: |
300 | qemu_options = d.getVar('QEMU_OPTIONS', True) | 300 | qemu_options = d.getVar('QEMU_OPTIONS', True) |
301 | 301 | ||
302 | cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ | 302 | cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ |
303 | -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ | 303 | -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ |
304 | (path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts) | 304 | (path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts) |
305 | 305 | ||
306 | commands["%s/%s" % (outputpath, name)] = cmd | 306 | commands["%s/%s" % (outputpath, name)] = cmd |
307 | 307 | ||
308 | bb.note("generating locale %s (%s)" % (locale, encoding)) | 308 | bb.note("generating locale %s (%s)" % (locale, encoding)) |
309 | 309 | ||
310 | def output_locale(name, locale, encoding): | 310 | def output_locale(name, locale, encoding): |
311 | pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name) | 311 | pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name) |
312 | d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') | 312 | d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') |
313 | d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) | 313 | d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) |
314 | rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) | 314 | rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) |
315 | m = re.match("(.*)_(.*)", name) | 315 | m = re.match("(.*)_(.*)", name) |
316 | if m: | 316 | if m: |
317 | rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1)) | 317 | rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1)) |
318 | d.setVar('RPROVIDES_%s' % pkgname, rprovides) | 318 | d.setVar('RPROVIDES_%s' % pkgname, rprovides) |
319 | 319 | ||
320 | if use_bin == "compile": | 320 | if use_bin == "compile": |
321 | output_locale_binary_rdepends(name, pkgname, locale, encoding) | 321 | output_locale_binary_rdepends(name, pkgname, locale, encoding) |
322 | output_locale_binary(name, pkgname, locale, encoding) | 322 | output_locale_binary(name, pkgname, locale, encoding) |
323 | elif use_bin == "precompiled": | 323 | elif use_bin == "precompiled": |
324 | output_locale_binary_rdepends(name, pkgname, locale, encoding) | 324 | output_locale_binary_rdepends(name, pkgname, locale, encoding) |
325 | else: | 325 | else: |
326 | output_locale_source(name, pkgname, locale, encoding) | 326 | output_locale_source(name, pkgname, locale, encoding) |
327 | 327 | ||
328 | if use_bin == "compile": | 328 | if use_bin == "compile": |
329 | bb.note("preparing tree for binary locale generation") | 329 | bb.note("preparing tree for binary locale generation") |
330 | bb.build.exec_func("do_prep_locale_tree", d) | 330 | bb.build.exec_func("do_prep_locale_tree", d) |
331 | 331 | ||
332 | utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) | 332 | utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) |
333 | encodings = {} | 333 | encodings = {} |
334 | for locale in to_generate: | 334 | for locale in to_generate: |
335 | charset = supported[locale] | 335 | charset = supported[locale] |
336 | if utf8_only and charset != 'UTF-8': | 336 | if utf8_only and charset != 'UTF-8': |
337 | continue | 337 | continue |
338 | 338 | ||
339 | m = dot_re.match(locale) | 339 | m = dot_re.match(locale) |
340 | if m: | 340 | if m: |
341 | base = m.group(1) | 341 | base = m.group(1) |
342 | else: | 342 | else: |
343 | base = locale | 343 | base = locale |
344 | 344 | ||
345 | # Precompiled locales are kept as is, obeying SUPPORTED, while | 345 | # Precompiled locales are kept as is, obeying SUPPORTED, while |
346 | # others are adjusted, ensuring that the non-suffixed locales | 346 | # others are adjusted, ensuring that the non-suffixed locales |
347 | # are utf-8, while the suffixed are not. | 347 | # are utf-8, while the suffixed are not. |
348 | if use_bin == "precompiled": | 348 | if use_bin == "precompiled": |
349 | output_locale(locale, base, charset) | 349 | output_locale(locale, base, charset) |
350 | else: | 350 | else: |
351 | if charset == 'UTF-8': | 351 | if charset == 'UTF-8': |
352 | output_locale(base, base, charset) | 352 | output_locale(base, base, charset) |
353 | else: | 353 | else: |
354 | output_locale('%s.%s' % (base, charset), base, charset) | 354 | output_locale('%s.%s' % (base, charset), base, charset) |
355 | 355 | ||
356 | if use_bin == "compile": | 356 | if use_bin == "compile": |
357 | makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") | 357 | makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") |
358 | m = open(makefile, "w") | 358 | m = open(makefile, "w") |
359 | m.write("all: %s\n\n" % " ".join(commands.keys())) | 359 | m.write("all: %s\n\n" % " ".join(commands.keys())) |
360 | for cmd in commands: | 360 | for cmd in commands: |
361 | m.write(cmd + ":\n") | 361 | m.write(cmd + ":\n") |
362 | m.write(" " + commands[cmd] + "\n\n") | 362 | m.write("\t" + commands[cmd] + "\n\n") |
363 | m.close() | 363 | m.close() |
364 | d.setVar("B", os.path.dirname(makefile)) | 364 | d.setVar("B", os.path.dirname(makefile)) |
365 | d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}") | 365 | d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}") |
366 | bb.note("Executing binary locale generation makefile") | 366 | bb.note("Executing binary locale generation makefile") |
367 | bb.build.exec_func("oe_runmake", d) | 367 | bb.build.exec_func("oe_runmake", d) |
368 | bb.note("collecting binary locales from locale tree") | 368 | bb.note("collecting binary locales from locale tree") |
369 | bb.build.exec_func("do_collect_bins_from_locale_tree", d) | 369 | bb.build.exec_func("do_collect_bins_from_locale_tree", d) |
370 | do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ | 370 | do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ |
371 | output_pattern=bpn+'-binary-localedata-%s', \ | 371 | output_pattern=bpn+'-binary-localedata-%s', \ |
372 | description='binary locale definition for %s', extra_depends='', allow_dirs=True) | 372 | description='binary locale definition for %s', extra_depends='', allow_dirs=True) |
373 | elif use_bin == "precompiled": | 373 | elif use_bin == "precompiled": |
374 | do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ | 374 | do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ |
375 | output_pattern=bpn+'-binary-localedata-%s', \ | 375 | output_pattern=bpn+'-binary-localedata-%s', \ |
376 | description='binary locale definition for %s', extra_depends='', allow_dirs=True) | 376 | description='binary locale definition for %s', extra_depends='', allow_dirs=True) |
377 | else: | 377 | else: |
378 | bb.note("generation of binary locales disabled. this may break i18n!") | 378 | bb.note("generation of binary locales disabled. this may break i18n!") |
379 | 379 | ||
380 | } | 380 | } |
381 | 381 | ||
382 | # We want to do this indirection so that we can safely 'return' | 382 | # We want to do this indirection so that we can safely 'return' |
383 | # from the called function even though we're prepending | 383 | # from the called function even though we're prepending |
384 | python populate_packages_prepend () { | 384 | python populate_packages_prepend () { |
385 | bb.build.exec_func('package_do_split_gconvs', d) | 385 | bb.build.exec_func('package_do_split_gconvs', d) |
386 | } | 386 | } |
387 | 387 | ||
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass index 4e25cf82b5..03e413b3b8 100644 --- a/meta/classes/license.bbclass +++ b/meta/classes/license.bbclass | |||
@@ -385,6 +385,6 @@ do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/" | |||
385 | ROOTFS_POSTPROCESS_COMMAND_prepend = "license_create_manifest; " | 385 | ROOTFS_POSTPROCESS_COMMAND_prepend = "license_create_manifest; " |
386 | 386 | ||
387 | python do_populate_lic_setscene () { | 387 | python do_populate_lic_setscene () { |
388 | sstate_setscene(d) | 388 | sstate_setscene(d) |
389 | } | 389 | } |
390 | addtask do_populate_lic_setscene | 390 | addtask do_populate_lic_setscene |
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass index 62650be675..502d400cbb 100644 --- a/meta/classes/metadata_scm.bbclass +++ b/meta/classes/metadata_scm.bbclass | |||
@@ -2,76 +2,76 @@ METADATA_BRANCH ?= "${@base_detect_branch(d)}" | |||
2 | METADATA_REVISION ?= "${@base_detect_revision(d)}" | 2 | METADATA_REVISION ?= "${@base_detect_revision(d)}" |
3 | 3 | ||
4 | def base_detect_revision(d): | 4 | def base_detect_revision(d): |
5 | path = base_get_scmbasepath(d) | 5 | path = base_get_scmbasepath(d) |
6 | 6 | ||
7 | scms = [base_get_metadata_git_revision, \ | 7 | scms = [base_get_metadata_git_revision, \ |
8 | base_get_metadata_svn_revision] | 8 | base_get_metadata_svn_revision] |
9 | 9 | ||
10 | for scm in scms: | 10 | for scm in scms: |
11 | rev = scm(path, d) | 11 | rev = scm(path, d) |
12 | if rev <> "<unknown>": | 12 | if rev <> "<unknown>": |
13 | return rev | 13 | return rev |
14 | 14 | ||
15 | return "<unknown>" | 15 | return "<unknown>" |
16 | 16 | ||
17 | def base_detect_branch(d): | 17 | def base_detect_branch(d): |
18 | path = base_get_scmbasepath(d) | 18 | path = base_get_scmbasepath(d) |
19 | 19 | ||
20 | scms = [base_get_metadata_git_branch] | 20 | scms = [base_get_metadata_git_branch] |
21 | 21 | ||
22 | for scm in scms: | 22 | for scm in scms: |
23 | rev = scm(path, d) | 23 | rev = scm(path, d) |
24 | if rev <> "<unknown>": | 24 | if rev <> "<unknown>": |
25 | return rev.strip() | 25 | return rev.strip() |
26 | 26 | ||
27 | return "<unknown>" | 27 | return "<unknown>" |
28 | 28 | ||
29 | def base_get_scmbasepath(d): | 29 | def base_get_scmbasepath(d): |
30 | return d.getVar( 'COREBASE', True) | 30 | return d.getVar( 'COREBASE', True) |
31 | 31 | ||
32 | def base_get_metadata_monotone_branch(path, d): | 32 | def base_get_metadata_monotone_branch(path, d): |
33 | monotone_branch = "<unknown>" | 33 | monotone_branch = "<unknown>" |
34 | try: | 34 | try: |
35 | monotone_branch = file( "%s/_MTN/options" % path ).read().strip() | 35 | monotone_branch = file( "%s/_MTN/options" % path ).read().strip() |
36 | if monotone_branch.startswith( "database" ): | 36 | if monotone_branch.startswith( "database" ): |
37 | monotone_branch_words = monotone_branch.split() | 37 | monotone_branch_words = monotone_branch.split() |
38 | monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1] | 38 | monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1] |
39 | except: | 39 | except: |
40 | pass | 40 | pass |
41 | return monotone_branch | 41 | return monotone_branch |
42 | 42 | ||
43 | def base_get_metadata_monotone_revision(path, d): | 43 | def base_get_metadata_monotone_revision(path, d): |
44 | monotone_revision = "<unknown>" | 44 | monotone_revision = "<unknown>" |
45 | try: | 45 | try: |
46 | monotone_revision = file( "%s/_MTN/revision" % path ).read().strip() | 46 | monotone_revision = file( "%s/_MTN/revision" % path ).read().strip() |
47 | if monotone_revision.startswith( "format_version" ): | 47 | if monotone_revision.startswith( "format_version" ): |
48 | monotone_revision_words = monotone_revision.split() | 48 | monotone_revision_words = monotone_revision.split() |
49 | monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1] | 49 | monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1] |
50 | except IOError: | 50 | except IOError: |
51 | pass | 51 | pass |
52 | return monotone_revision | 52 | return monotone_revision |
53 | 53 | ||
54 | def base_get_metadata_svn_revision(path, d): | 54 | def base_get_metadata_svn_revision(path, d): |
55 | revision = "<unknown>" | 55 | revision = "<unknown>" |
56 | try: | 56 | try: |
57 | revision = file( "%s/.svn/entries" % path ).readlines()[3].strip() | 57 | revision = file( "%s/.svn/entries" % path ).readlines()[3].strip() |
58 | except IOError: | 58 | except IOError: |
59 | pass | 59 | pass |
60 | return revision | 60 | return revision |
61 | 61 | ||
62 | def base_get_metadata_git_branch(path, d): | 62 | def base_get_metadata_git_branch(path, d): |
63 | branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read() | 63 | branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read() |
64 | 64 | ||
65 | if len(branch) != 0: | 65 | if len(branch) != 0: |
66 | return branch | 66 | return branch |
67 | return "<unknown>" | 67 | return "<unknown>" |
68 | 68 | ||
69 | def base_get_metadata_git_revision(path, d): | 69 | def base_get_metadata_git_revision(path, d): |
70 | f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path) | 70 | f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path) |
71 | data = f.read() | 71 | data = f.read() |
72 | if f.close() is None: | 72 | if f.close() is None: |
73 | rev = data.split(" ")[0] | 73 | rev = data.split(" ")[0] |
74 | if len(rev) != 0: | 74 | if len(rev) != 0: |
75 | return rev | 75 | return rev |
76 | return "<unknown>" | 76 | return "<unknown>" |
77 | 77 | ||
diff --git a/meta/classes/mime.bbclass b/meta/classes/mime.bbclass index 6302747dc1..b669418286 100644 --- a/meta/classes/mime.bbclass +++ b/meta/classes/mime.bbclass | |||
@@ -29,32 +29,32 @@ fi | |||
29 | } | 29 | } |
30 | 30 | ||
31 | python populate_packages_append () { | 31 | python populate_packages_append () { |
32 | import re | 32 | import re |
33 | packages = d.getVar('PACKAGES', True).split() | 33 | packages = d.getVar('PACKAGES', True).split() |
34 | pkgdest = d.getVar('PKGDEST', True) | 34 | pkgdest = d.getVar('PKGDEST', True) |
35 | 35 | ||
36 | for pkg in packages: | 36 | for pkg in packages: |
37 | mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg) | 37 | mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg) |
38 | mimes = [] | 38 | mimes = [] |
39 | mime_re = re.compile(".*\.xml$") | 39 | mime_re = re.compile(".*\.xml$") |
40 | if os.path.exists(mime_dir): | 40 | if os.path.exists(mime_dir): |
41 | for f in os.listdir(mime_dir): | 41 | for f in os.listdir(mime_dir): |
42 | if mime_re.match(f): | 42 | if mime_re.match(f): |
43 | mimes.append(f) | 43 | mimes.append(f) |
44 | if mimes: | 44 | if mimes: |
45 | bb.note("adding mime postinst and postrm scripts to %s" % pkg) | 45 | bb.note("adding mime postinst and postrm scripts to %s" % pkg) |
46 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) | 46 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) |
47 | if not postinst: | 47 | if not postinst: |
48 | postinst = '#!/bin/sh\n' | 48 | postinst = '#!/bin/sh\n' |
49 | postinst += d.getVar('mime_postinst', True) | 49 | postinst += d.getVar('mime_postinst', True) |
50 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 50 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
51 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) | 51 | postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) |
52 | if not postrm: | 52 | if not postrm: |
53 | postrm = '#!/bin/sh\n' | 53 | postrm = '#!/bin/sh\n' |
54 | postrm += d.getVar('mime_postrm', True) | 54 | postrm += d.getVar('mime_postrm', True) |
55 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 55 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
56 | bb.note("adding shared-mime-info-data dependency to %s" % pkg) | 56 | bb.note("adding shared-mime-info-data dependency to %s" % pkg) |
57 | rdepends = explode_deps(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "" ) | 57 | rdepends = explode_deps(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "" ) |
58 | rdepends.append("shared-mime-info-data") | 58 | rdepends.append("shared-mime-info-data") |
59 | d.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends)) | 59 | d.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends)) |
60 | } | 60 | } |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index dfd42117c5..a51e955325 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -25,8 +25,8 @@ | |||
25 | # The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with | 25 | # The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with |
26 | # a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg | 26 | # a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg |
27 | # | 27 | # |
28 | # h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any | 28 | # h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any |
29 | # depenedencies found. Also stores the package name so anyone else using this library | 29 | # depenedencies found. Also stores the package name so anyone else using this library |
30 | # knows which package to depend on. | 30 | # knows which package to depend on. |
31 | # | 31 | # |
32 | # i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files | 32 | # i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files |
@@ -35,7 +35,7 @@ | |||
35 | # | 35 | # |
36 | # k) package_depchains - Adds automatic dependencies to -dbg and -dev packages | 36 | # k) package_depchains - Adds automatic dependencies to -dbg and -dev packages |
37 | # | 37 | # |
38 | # l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later | 38 | # l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later |
39 | # packaging steps | 39 | # packaging steps |
40 | 40 | ||
41 | inherit packagedata | 41 | inherit packagedata |
@@ -52,112 +52,112 @@ ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}" | |||
52 | PACKAGE_DEPENDS += "rpm-native" | 52 | PACKAGE_DEPENDS += "rpm-native" |
53 | 53 | ||
54 | def legitimize_package_name(s): | 54 | def legitimize_package_name(s): |
55 | """ | 55 | """ |
56 | Make sure package names are legitimate strings | 56 | Make sure package names are legitimate strings |
57 | """ | 57 | """ |
58 | import re | 58 | import re |
59 | 59 | ||
60 | def fixutf(m): | 60 | def fixutf(m): |
61 | cp = m.group(1) | 61 | cp = m.group(1) |
62 | if cp: | 62 | if cp: |
63 | return ('\u%s' % cp).decode('unicode_escape').encode('utf-8') | 63 | return ('\u%s' % cp).decode('unicode_escape').encode('utf-8') |
64 | 64 | ||
65 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | 65 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. |
66 | s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) | 66 | s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) |
67 | 67 | ||
68 | # Remaining package name validity fixes | 68 | # Remaining package name validity fixes |
69 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') | 69 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') |
70 | 70 | ||
71 | def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False): | 71 | def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False): |
72 | """ | 72 | """ |
73 | Used in .bb files to split up dynamically generated subpackages of a | 73 | Used in .bb files to split up dynamically generated subpackages of a |
74 | given package, usually plugins or modules. | 74 | given package, usually plugins or modules. |
75 | """ | 75 | """ |
76 | 76 | ||
77 | ml = d.getVar("MLPREFIX", True) | 77 | ml = d.getVar("MLPREFIX", True) |
78 | if ml: | 78 | if ml: |
79 | if not output_pattern.startswith(ml): | 79 | if not output_pattern.startswith(ml): |
80 | output_pattern = ml + output_pattern | 80 | output_pattern = ml + output_pattern |
81 | 81 | ||
82 | newdeps = [] | 82 | newdeps = [] |
83 | for dep in (extra_depends or "").split(): | 83 | for dep in (extra_depends or "").split(): |
84 | if dep.startswith(ml): | 84 | if dep.startswith(ml): |
85 | newdeps.append(dep) | 85 | newdeps.append(dep) |
86 | else: | 86 | else: |
87 | newdeps.append(ml + dep) | 87 | newdeps.append(ml + dep) |
88 | if newdeps: | 88 | if newdeps: |
89 | extra_depends = " ".join(newdeps) | 89 | extra_depends = " ".join(newdeps) |
90 | 90 | ||
91 | dvar = d.getVar('PKGD', True) | 91 | dvar = d.getVar('PKGD', True) |
92 | 92 | ||
93 | packages = d.getVar('PACKAGES', True).split() | 93 | packages = d.getVar('PACKAGES', True).split() |
94 | 94 | ||
95 | if postinst: | 95 | if postinst: |
96 | postinst = '#!/bin/sh\n' + postinst + '\n' | 96 | postinst = '#!/bin/sh\n' + postinst + '\n' |
97 | if postrm: | 97 | if postrm: |
98 | postrm = '#!/bin/sh\n' + postrm + '\n' | 98 | postrm = '#!/bin/sh\n' + postrm + '\n' |
99 | if not recursive: | 99 | if not recursive: |
100 | objs = os.listdir(dvar + root) | 100 | objs = os.listdir(dvar + root) |
101 | else: | 101 | else: |
102 | objs = [] | 102 | objs = [] |
103 | for walkroot, dirs, files in os.walk(dvar + root): | 103 | for walkroot, dirs, files in os.walk(dvar + root): |
104 | for file in files: | 104 | for file in files: |
105 | relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1) | 105 | relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1) |
106 | if relpath: | 106 | if relpath: |
107 | objs.append(relpath) | 107 | objs.append(relpath) |
108 | 108 | ||
109 | if extra_depends == None: | 109 | if extra_depends == None: |
110 | extra_depends = d.getVar("PN", True) | 110 | extra_depends = d.getVar("PN", True) |
111 | 111 | ||
112 | for o in sorted(objs): | 112 | for o in sorted(objs): |
113 | import re, stat | 113 | import re, stat |
114 | if match_path: | 114 | if match_path: |
115 | m = re.match(file_regex, o) | 115 | m = re.match(file_regex, o) |
116 | else: | 116 | else: |
117 | m = re.match(file_regex, os.path.basename(o)) | 117 | m = re.match(file_regex, os.path.basename(o)) |
118 | 118 | ||
119 | if not m: | 119 | if not m: |
120 | continue | 120 | continue |
121 | f = os.path.join(dvar + root, o) | 121 | f = os.path.join(dvar + root, o) |
122 | mode = os.lstat(f).st_mode | 122 | mode = os.lstat(f).st_mode |
123 | if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): | 123 | if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): |
124 | continue | 124 | continue |
125 | on = legitimize_package_name(m.group(1)) | 125 | on = legitimize_package_name(m.group(1)) |
126 | pkg = output_pattern % on | 126 | pkg = output_pattern % on |
127 | if not pkg in packages: | 127 | if not pkg in packages: |
128 | if prepend: | 128 | if prepend: |
129 | packages = [pkg] + packages | 129 | packages = [pkg] + packages |
130 | else: | 130 | else: |
131 | packages.append(pkg) | 131 | packages.append(pkg) |
132 | oldfiles = d.getVar('FILES_' + pkg, True) | 132 | oldfiles = d.getVar('FILES_' + pkg, True) |
133 | if not oldfiles: | 133 | if not oldfiles: |
134 | the_files = [os.path.join(root, o)] | 134 | the_files = [os.path.join(root, o)] |
135 | if aux_files_pattern: | 135 | if aux_files_pattern: |
136 | if type(aux_files_pattern) is list: | 136 | if type(aux_files_pattern) is list: |
137 | for fp in aux_files_pattern: | 137 | for fp in aux_files_pattern: |
138 | the_files.append(fp % on) | 138 | the_files.append(fp % on) |
139 | else: | 139 | else: |
140 | the_files.append(aux_files_pattern % on) | 140 | the_files.append(aux_files_pattern % on) |
141 | if aux_files_pattern_verbatim: | 141 | if aux_files_pattern_verbatim: |
142 | if type(aux_files_pattern_verbatim) is list: | 142 | if type(aux_files_pattern_verbatim) is list: |
143 | for fp in aux_files_pattern_verbatim: | 143 | for fp in aux_files_pattern_verbatim: |
144 | the_files.append(fp % m.group(1)) | 144 | the_files.append(fp % m.group(1)) |
145 | else: | 145 | else: |
146 | the_files.append(aux_files_pattern_verbatim % m.group(1)) | 146 | the_files.append(aux_files_pattern_verbatim % m.group(1)) |
147 | d.setVar('FILES_' + pkg, " ".join(the_files)) | 147 | d.setVar('FILES_' + pkg, " ".join(the_files)) |
148 | if extra_depends != '': | 148 | if extra_depends != '': |
149 | d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) | 149 | d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) |
150 | d.setVar('DESCRIPTION_' + pkg, description % on) | 150 | d.setVar('DESCRIPTION_' + pkg, description % on) |
151 | if postinst: | 151 | if postinst: |
152 | d.setVar('pkg_postinst_' + pkg, postinst) | 152 | d.setVar('pkg_postinst_' + pkg, postinst) |
153 | if postrm: | 153 | if postrm: |
154 | d.setVar('pkg_postrm_' + pkg, postrm) | 154 | d.setVar('pkg_postrm_' + pkg, postrm) |
155 | else: | 155 | else: |
156 | d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o)) | 156 | d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o)) |
157 | if callable(hook): | 157 | if callable(hook): |
158 | hook(f, pkg, file_regex, output_pattern, m.group(1)) | 158 | hook(f, pkg, file_regex, output_pattern, m.group(1)) |
159 | 159 | ||
160 | d.setVar('PACKAGES', ' '.join(packages)) | 160 | d.setVar('PACKAGES', ' '.join(packages)) |
161 | 161 | ||
162 | PACKAGE_DEPENDS += "file-native" | 162 | PACKAGE_DEPENDS += "file-native" |
163 | 163 | ||
@@ -195,7 +195,7 @@ def splitfile(file, debugfile, debugsrcdir, d): | |||
195 | 195 | ||
196 | # We ignore kernel modules, we don't generate debug info files. | 196 | # We ignore kernel modules, we don't generate debug info files. |
197 | if file.find("/lib/modules/") != -1 and file.endswith(".ko"): | 197 | if file.find("/lib/modules/") != -1 and file.endswith(".ko"): |
198 | return 1 | 198 | return 1 |
199 | 199 | ||
200 | newmode = None | 200 | newmode = None |
201 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | 201 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): |
@@ -205,7 +205,7 @@ def splitfile(file, debugfile, debugsrcdir, d): | |||
205 | 205 | ||
206 | # We need to extract the debug src information here... | 206 | # We need to extract the debug src information here... |
207 | if debugsrcdir: | 207 | if debugsrcdir: |
208 | subprocess.call("%s'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (pathprefix, debugedit, workparentdir, debugsrcdir, sourcefile, file), shell=True) | 208 | subprocess.call("%s'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (pathprefix, debugedit, workparentdir, debugsrcdir, sourcefile, file), shell=True) |
209 | 209 | ||
210 | bb.mkdirhier(os.path.dirname(debugfile)) | 210 | bb.mkdirhier(os.path.dirname(debugfile)) |
211 | 211 | ||
@@ -316,826 +316,826 @@ def runstrip(file, elftype, d): | |||
316 | # | 316 | # |
317 | 317 | ||
318 | def get_package_mapping (pkg, d): | 318 | def get_package_mapping (pkg, d): |
319 | import oe.packagedata | 319 | import oe.packagedata |
320 | 320 | ||
321 | data = oe.packagedata.read_subpkgdata(pkg, d) | 321 | data = oe.packagedata.read_subpkgdata(pkg, d) |
322 | key = "PKG_%s" % pkg | 322 | key = "PKG_%s" % pkg |
323 | 323 | ||
324 | if key in data: | 324 | if key in data: |
325 | return data[key] | 325 | return data[key] |
326 | 326 | ||
327 | return pkg | 327 | return pkg |
328 | 328 | ||
329 | def runtime_mapping_rename (varname, d): | 329 | def runtime_mapping_rename (varname, d): |
330 | #bb.note("%s before: %s" % (varname, d.getVar(varname, True))) | 330 | #bb.note("%s before: %s" % (varname, d.getVar(varname, True))) |
331 | 331 | ||
332 | new_depends = [] | 332 | new_depends = [] |
333 | deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "") | 333 | deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "") |
334 | for depend in deps: | 334 | for depend in deps: |
335 | # Have to be careful with any version component of the depend | 335 | # Have to be careful with any version component of the depend |
336 | new_depend = get_package_mapping(depend, d) | 336 | new_depend = get_package_mapping(depend, d) |
337 | if deps[depend]: | 337 | if deps[depend]: |
338 | new_depends.append("%s (%s)" % (new_depend, deps[depend])) | 338 | new_depends.append("%s (%s)" % (new_depend, deps[depend])) |
339 | else: | 339 | else: |
340 | new_depends.append(new_depend) | 340 | new_depends.append(new_depend) |
341 | 341 | ||
342 | d.setVar(varname, " ".join(new_depends) or None) | 342 | d.setVar(varname, " ".join(new_depends) or None) |
343 | 343 | ||
344 | #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) | 344 | #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) |
345 | 345 | ||
346 | # | 346 | # |
347 | # Package functions suitable for inclusion in PACKAGEFUNCS | 347 | # Package functions suitable for inclusion in PACKAGEFUNCS |
348 | # | 348 | # |
349 | 349 | ||
350 | python package_get_auto_pr() { | 350 | python package_get_auto_pr() { |
351 | # per recipe PRSERV_HOST PRSERV_PORT | 351 | # per recipe PRSERV_HOST PRSERV_PORT |
352 | pn = d.getVar('PN', True) | 352 | pn = d.getVar('PN', True) |
353 | host = d.getVar("PRSERV_HOST_" + pn, True) | 353 | host = d.getVar("PRSERV_HOST_" + pn, True) |
354 | port = d.getVar("PRSERV_PORT_" + pn, True) | 354 | port = d.getVar("PRSERV_PORT_" + pn, True) |
355 | if not (host is None): | 355 | if not (host is None): |
356 | d.setVar("PRSERV_HOST", host) | 356 | d.setVar("PRSERV_HOST", host) |
357 | if not (port is None): | 357 | if not (port is None): |
358 | d.setVar("PRSERV_PORT", port) | 358 | d.setVar("PRSERV_PORT", port) |
359 | if d.getVar('USE_PR_SERV', True) != "0": | 359 | if d.getVar('USE_PR_SERV', True) != "0": |
360 | try: | 360 | try: |
361 | auto_pr=prserv_get_pr_auto(d) | 361 | auto_pr=prserv_get_pr_auto(d) |
362 | except Exception as e: | 362 | except Exception as e: |
363 | bb.fatal("Can NOT get PRAUTO, exception %s" % str(e)) | 363 | bb.fatal("Can NOT get PRAUTO, exception %s" % str(e)) |
364 | return | 364 | return |
365 | if auto_pr is None: | 365 | if auto_pr is None: |
366 | if d.getVar('PRSERV_LOCKDOWN', True): | 366 | if d.getVar('PRSERV_LOCKDOWN', True): |
367 | bb.fatal("Can NOT get PRAUTO from lockdown exported file") | 367 | bb.fatal("Can NOT get PRAUTO from lockdown exported file") |
368 | else: | 368 | else: |
369 | bb.fatal("Can NOT get PRAUTO from remote PR service") | 369 | bb.fatal("Can NOT get PRAUTO from remote PR service") |
370 | return | 370 | return |
371 | d.setVar('PRAUTO',str(auto_pr)) | 371 | d.setVar('PRAUTO',str(auto_pr)) |
372 | } | 372 | } |
373 | 373 | ||
374 | python package_do_split_locales() { | 374 | python package_do_split_locales() { |
375 | if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): | 375 | if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): |
376 | bb.debug(1, "package requested not splitting locales") | 376 | bb.debug(1, "package requested not splitting locales") |
377 | return | 377 | return |
378 | 378 | ||
379 | packages = (d.getVar('PACKAGES', True) or "").split() | 379 | packages = (d.getVar('PACKAGES', True) or "").split() |
380 | 380 | ||
381 | datadir = d.getVar('datadir', True) | 381 | datadir = d.getVar('datadir', True) |
382 | if not datadir: | 382 | if not datadir: |
383 | bb.note("datadir not defined") | 383 | bb.note("datadir not defined") |
384 | return | 384 | return |
385 | 385 | ||
386 | dvar = d.getVar('PKGD', True) | 386 | dvar = d.getVar('PKGD', True) |
387 | pn = d.getVar('PN', True) | 387 | pn = d.getVar('PN', True) |
388 | 388 | ||
389 | if pn + '-locale' in packages: | 389 | if pn + '-locale' in packages: |
390 | packages.remove(pn + '-locale') | 390 | packages.remove(pn + '-locale') |
391 | 391 | ||
392 | localedir = os.path.join(dvar + datadir, 'locale') | 392 | localedir = os.path.join(dvar + datadir, 'locale') |
393 | 393 | ||
394 | if not os.path.isdir(localedir): | 394 | if not os.path.isdir(localedir): |
395 | bb.debug(1, "No locale files in this package") | 395 | bb.debug(1, "No locale files in this package") |
396 | return | 396 | return |
397 | 397 | ||
398 | locales = os.listdir(localedir) | 398 | locales = os.listdir(localedir) |
399 | 399 | ||
400 | summary = d.getVar('SUMMARY', True) or pn | 400 | summary = d.getVar('SUMMARY', True) or pn |
401 | description = d.getVar('DESCRIPTION', True) or "" | 401 | description = d.getVar('DESCRIPTION', True) or "" |
402 | locale_section = d.getVar('LOCALE_SECTION', True) | 402 | locale_section = d.getVar('LOCALE_SECTION', True) |
403 | mlprefix = d.getVar('MLPREFIX', True) or "" | 403 | mlprefix = d.getVar('MLPREFIX', True) or "" |
404 | for l in sorted(locales): | 404 | for l in sorted(locales): |
405 | ln = legitimize_package_name(l) | 405 | ln = legitimize_package_name(l) |
406 | pkg = pn + '-locale-' + ln | 406 | pkg = pn + '-locale-' + ln |
407 | packages.append(pkg) | 407 | packages.append(pkg) |
408 | d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l)) | 408 | d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l)) |
409 | d.setVar('RDEPENDS_' + pkg, '%s %svirtual-locale-%s' % (pn, mlprefix, ln)) | 409 | d.setVar('RDEPENDS_' + pkg, '%s %svirtual-locale-%s' % (pn, mlprefix, ln)) |
410 | d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | 410 | d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) |
411 | d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l)) | 411 | d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l)) |
412 | d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | 412 | d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) |
413 | if locale_section: | 413 | if locale_section: |
414 | d.setVar('SECTION_' + pkg, locale_section) | 414 | d.setVar('SECTION_' + pkg, locale_section) |
415 | 415 | ||
416 | d.setVar('PACKAGES', ' '.join(packages)) | 416 | d.setVar('PACKAGES', ' '.join(packages)) |
417 | 417 | ||
418 | # Disabled by RP 18/06/07 | 418 | # Disabled by RP 18/06/07 |
419 | # Wildcards aren't supported in debian | 419 | # Wildcards aren't supported in debian |
420 | # They break with ipkg since glibc-locale* will mean that | 420 | # They break with ipkg since glibc-locale* will mean that |
421 | # glibc-localedata-translit* won't install as a dependency | 421 | # glibc-localedata-translit* won't install as a dependency |
422 | # for some other package which breaks meta-toolchain | 422 | # for some other package which breaks meta-toolchain |
423 | # Probably breaks since virtual-locale- isn't provided anywhere | 423 | # Probably breaks since virtual-locale- isn't provided anywhere |
424 | #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or d.getVar('RDEPENDS', True) or "").split() | 424 | #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or d.getVar('RDEPENDS', True) or "").split() |
425 | #rdep.append('%s-locale*' % pn) | 425 | #rdep.append('%s-locale*' % pn) |
426 | #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) | 426 | #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) |
427 | } | 427 | } |
428 | 428 | ||
429 | python perform_packagecopy () { | 429 | python perform_packagecopy () { |
430 | import subprocess | 430 | import subprocess |
431 | dest = d.getVar('D', True) | 431 | dest = d.getVar('D', True) |
432 | dvar = d.getVar('PKGD', True) | 432 | dvar = d.getVar('PKGD', True) |
433 | 433 | ||
434 | bb.mkdirhier(dvar) | 434 | bb.mkdirhier(dvar) |
435 | 435 | ||
436 | # Start by package population by taking a copy of the installed | 436 | # Start by package population by taking a copy of the installed |
437 | # files to operate on | 437 | # files to operate on |
438 | subprocess.call('rm -rf %s/*' % (dvar), shell=True) | 438 | subprocess.call('rm -rf %s/*' % (dvar), shell=True) |
439 | # Preserve sparse files and hard links | 439 | # Preserve sparse files and hard links |
440 | subprocess.call('tar -cf - -C %s -ps . | tar -xf - -C %s' % (dest, dvar), shell=True) | 440 | subprocess.call('tar -cf - -C %s -ps . | tar -xf - -C %s' % (dest, dvar), shell=True) |
441 | } | 441 | } |
442 | 442 | ||
443 | # We generate a master list of directories to process, we start by | 443 | # We generate a master list of directories to process, we start by |
444 | # seeding this list with reasonable defaults, then load from | 444 | # seeding this list with reasonable defaults, then load from |
445 | # the fs-perms.txt files | 445 | # the fs-perms.txt files |
446 | python fixup_perms () { | 446 | python fixup_perms () { |
447 | import os, pwd, grp | 447 | import os, pwd, grp |
448 | 448 | ||
449 | # init using a string with the same format as a line as documented in | 449 | # init using a string with the same format as a line as documented in |
450 | # the fs-perms.txt file | 450 | # the fs-perms.txt file |
451 | # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid> | 451 | # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid> |
452 | # <path> link <link target> | 452 | # <path> link <link target> |
453 | # | 453 | # |
454 | # __str__ can be used to print out an entry in the input format | 454 | # __str__ can be used to print out an entry in the input format |
455 | # | 455 | # |
456 | # if fs_perms_entry.path is None: | 456 | # if fs_perms_entry.path is None: |
457 | # an error occured | 457 | # an error occured |
458 | # if fs_perms_entry.link, you can retrieve: | 458 | # if fs_perms_entry.link, you can retrieve: |
459 | # fs_perms_entry.path = path | 459 | # fs_perms_entry.path = path |
460 | # fs_perms_entry.link = target of link | 460 | # fs_perms_entry.link = target of link |
461 | # if not fs_perms_entry.link, you can retrieve: | 461 | # if not fs_perms_entry.link, you can retrieve: |
462 | # fs_perms_entry.path = path | 462 | # fs_perms_entry.path = path |
463 | # fs_perms_entry.mode = expected dir mode or None | 463 | # fs_perms_entry.mode = expected dir mode or None |
464 | # fs_perms_entry.uid = expected uid or -1 | 464 | # fs_perms_entry.uid = expected uid or -1 |
465 | # fs_perms_entry.gid = expected gid or -1 | 465 | # fs_perms_entry.gid = expected gid or -1 |
466 | # fs_perms_entry.walk = 'true' or something else | 466 | # fs_perms_entry.walk = 'true' or something else |
467 | # fs_perms_entry.fmode = expected file mode or None | 467 | # fs_perms_entry.fmode = expected file mode or None |
468 | # fs_perms_entry.fuid = expected file uid or -1 | 468 | # fs_perms_entry.fuid = expected file uid or -1 |
469 | # fs_perms_entry_fgid = expected file gid or -1 | 469 | # fs_perms_entry_fgid = expected file gid or -1 |
470 | class fs_perms_entry(): | 470 | class fs_perms_entry(): |
471 | def __init__(self, line): | 471 | def __init__(self, line): |
472 | lsplit = line.split() | 472 | lsplit = line.split() |
473 | if len(lsplit) == 3 and lsplit[1].lower() == "link": | 473 | if len(lsplit) == 3 and lsplit[1].lower() == "link": |
474 | self._setlink(lsplit[0], lsplit[2]) | 474 | self._setlink(lsplit[0], lsplit[2]) |
475 | elif len(lsplit) == 8: | 475 | elif len(lsplit) == 8: |
476 | self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7]) | 476 | self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7]) |
477 | else: | 477 | else: |
478 | bb.error("Fixup Perms: invalid config line %s" % line) | 478 | bb.error("Fixup Perms: invalid config line %s" % line) |
479 | self.path = None | 479 | self.path = None |
480 | self.link = None | 480 | self.link = None |
481 | 481 | ||
482 | def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid): | 482 | def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid): |
483 | self.path = os.path.normpath(path) | 483 | self.path = os.path.normpath(path) |
484 | self.link = None | 484 | self.link = None |
485 | self.mode = self._procmode(mode) | 485 | self.mode = self._procmode(mode) |
486 | self.uid = self._procuid(uid) | 486 | self.uid = self._procuid(uid) |
487 | self.gid = self._procgid(gid) | 487 | self.gid = self._procgid(gid) |
488 | self.walk = walk.lower() | 488 | self.walk = walk.lower() |
489 | self.fmode = self._procmode(fmode) | 489 | self.fmode = self._procmode(fmode) |
490 | self.fuid = self._procuid(fuid) | 490 | self.fuid = self._procuid(fuid) |
491 | self.fgid = self._procgid(fgid) | 491 | self.fgid = self._procgid(fgid) |
492 | 492 | ||
493 | def _setlink(self, path, link): | 493 | def _setlink(self, path, link): |
494 | self.path = os.path.normpath(path) | 494 | self.path = os.path.normpath(path) |
495 | self.link = link | 495 | self.link = link |
496 | 496 | ||
497 | def _procmode(self, mode): | 497 | def _procmode(self, mode): |
498 | if not mode or (mode and mode == "-"): | 498 | if not mode or (mode and mode == "-"): |
499 | return None | 499 | return None |
500 | else: | 500 | else: |
501 | return int(mode,8) | 501 | return int(mode,8) |
502 | 502 | ||
503 | # Note uid/gid -1 has special significance in os.lchown | 503 | # Note uid/gid -1 has special significance in os.lchown |
504 | def _procuid(self, uid): | 504 | def _procuid(self, uid): |
505 | if uid is None or uid == "-": | 505 | if uid is None or uid == "-": |
506 | return -1 | 506 | return -1 |
507 | elif uid.isdigit(): | 507 | elif uid.isdigit(): |
508 | return int(uid) | 508 | return int(uid) |
509 | else: | 509 | else: |
510 | return pwd.getpwnam(uid).pw_uid | 510 | return pwd.getpwnam(uid).pw_uid |
511 | 511 | ||
512 | def _procgid(self, gid): | 512 | def _procgid(self, gid): |
513 | if gid is None or gid == "-": | 513 | if gid is None or gid == "-": |
514 | return -1 | 514 | return -1 |
515 | elif gid.isdigit(): | 515 | elif gid.isdigit(): |
516 | return int(gid) | 516 | return int(gid) |
517 | else: | 517 | else: |
518 | return grp.getgrnam(gid).gr_gid | 518 | return grp.getgrnam(gid).gr_gid |
519 | 519 | ||
520 | # Use for debugging the entries | 520 | # Use for debugging the entries |
521 | def __str__(self): | 521 | def __str__(self): |
522 | if self.link: | 522 | if self.link: |
523 | return "%s link %s" % (self.path, self.link) | 523 | return "%s link %s" % (self.path, self.link) |
524 | else: | 524 | else: |
525 | mode = "-" | 525 | mode = "-" |
526 | if self.mode: | 526 | if self.mode: |
527 | mode = "0%o" % self.mode | 527 | mode = "0%o" % self.mode |
528 | fmode = "-" | 528 | fmode = "-" |
529 | if self.fmode: | 529 | if self.fmode: |
530 | fmode = "0%o" % self.fmode | 530 | fmode = "0%o" % self.fmode |
531 | uid = self._mapugid(self.uid) | 531 | uid = self._mapugid(self.uid) |
532 | gid = self._mapugid(self.gid) | 532 | gid = self._mapugid(self.gid) |
533 | fuid = self._mapugid(self.fuid) | 533 | fuid = self._mapugid(self.fuid) |
534 | fgid = self._mapugid(self.fgid) | 534 | fgid = self._mapugid(self.fgid) |
535 | return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid) | 535 | return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid) |
536 | 536 | ||
537 | def _mapugid(self, id): | 537 | def _mapugid(self, id): |
538 | if id is None or id == -1: | 538 | if id is None or id == -1: |
539 | return "-" | 539 | return "-" |
540 | else: | 540 | else: |
541 | return "%d" % id | 541 | return "%d" % id |
542 | 542 | ||
543 | # Fix the permission, owner and group of path | 543 | # Fix the permission, owner and group of path |
544 | def fix_perms(path, mode, uid, gid, dir): | 544 | def fix_perms(path, mode, uid, gid, dir): |
545 | if mode and not os.path.islink(path): | 545 | if mode and not os.path.islink(path): |
546 | #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir)) | 546 | #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir)) |
547 | os.chmod(path, mode) | 547 | os.chmod(path, mode) |
548 | # -1 is a special value that means don't change the uid/gid | 548 | # -1 is a special value that means don't change the uid/gid |
549 | # if they are BOTH -1, don't bother to lchown | 549 | # if they are BOTH -1, don't bother to lchown |
550 | if not (uid == -1 and gid == -1): | 550 | if not (uid == -1 and gid == -1): |
551 | #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir)) | 551 | #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir)) |
552 | os.lchown(path, uid, gid) | 552 | os.lchown(path, uid, gid) |
553 | 553 | ||
554 | # Return a list of configuration files based on either the default | 554 | # Return a list of configuration files based on either the default |
555 | # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES | 555 | # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES |
556 | # paths are resolved via BBPATH | 556 | # paths are resolved via BBPATH |
557 | def get_fs_perms_list(d): | 557 | def get_fs_perms_list(d): |
558 | str = "" | 558 | str = "" |
559 | fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) | 559 | fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) |
560 | if not fs_perms_tables: | 560 | if not fs_perms_tables: |
561 | fs_perms_tables = 'files/fs-perms.txt' | 561 | fs_perms_tables = 'files/fs-perms.txt' |
562 | for conf_file in fs_perms_tables.split(): | 562 | for conf_file in fs_perms_tables.split(): |
563 | str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file) | 563 | str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file) |
564 | return str | 564 | return str |
565 | 565 | ||
566 | 566 | ||
567 | 567 | ||
568 | dvar = d.getVar('PKGD', True) | 568 | dvar = d.getVar('PKGD', True) |
569 | 569 | ||
570 | fs_perms_table = {} | 570 | fs_perms_table = {} |
571 | 571 | ||
572 | # By default all of the standard directories specified in | 572 | # By default all of the standard directories specified in |
573 | # bitbake.conf will get 0755 root:root. | 573 | # bitbake.conf will get 0755 root:root. |
574 | target_path_vars = [ 'base_prefix', | 574 | target_path_vars = [ 'base_prefix', |
575 | 'prefix', | 575 | 'prefix', |
576 | 'exec_prefix', | 576 | 'exec_prefix', |
577 | 'base_bindir', | 577 | 'base_bindir', |
578 | 'base_sbindir', | 578 | 'base_sbindir', |
579 | 'base_libdir', | 579 | 'base_libdir', |
580 | 'datadir', | 580 | 'datadir', |
581 | 'sysconfdir', | 581 | 'sysconfdir', |
582 | 'servicedir', | 582 | 'servicedir', |
583 | 'sharedstatedir', | 583 | 'sharedstatedir', |
584 | 'localstatedir', | 584 | 'localstatedir', |
585 | 'infodir', | 585 | 'infodir', |
586 | 'mandir', | 586 | 'mandir', |
587 | 'docdir', | 587 | 'docdir', |
588 | 'bindir', | 588 | 'bindir', |
589 | 'sbindir', | 589 | 'sbindir', |
590 | 'libexecdir', | 590 | 'libexecdir', |
591 | 'libdir', | 591 | 'libdir', |
592 | 'includedir', | 592 | 'includedir', |
593 | 'oldincludedir' ] | 593 | 'oldincludedir' ] |
594 | 594 | ||
595 | for path in target_path_vars: | 595 | for path in target_path_vars: |
596 | dir = d.getVar(path, True) or "" | 596 | dir = d.getVar(path, True) or "" |
597 | if dir == "": | 597 | if dir == "": |
598 | continue | 598 | continue |
599 | fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) | 599 | fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) |
600 | 600 | ||
601 | # Now we actually load from the configuration files | 601 | # Now we actually load from the configuration files |
602 | for conf in get_fs_perms_list(d).split(): | 602 | for conf in get_fs_perms_list(d).split(): |
603 | if os.path.exists(conf): | 603 | if os.path.exists(conf): |
604 | f = open(conf) | 604 | f = open(conf) |
605 | for line in f: | 605 | for line in f: |
606 | if line.startswith('#'): | 606 | if line.startswith('#'): |
607 | continue | 607 | continue |
608 | lsplit = line.split() | 608 | lsplit = line.split() |
609 | if len(lsplit) == 0: | 609 | if len(lsplit) == 0: |
610 | continue | 610 | continue |
611 | if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"): | 611 | if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"): |
612 | bb.error("Fixup perms: %s invalid line: %s" % (conf, line)) | 612 | bb.error("Fixup perms: %s invalid line: %s" % (conf, line)) |
613 | continue | 613 | continue |
614 | entry = fs_perms_entry(d.expand(line)) | 614 | entry = fs_perms_entry(d.expand(line)) |
615 | if entry and entry.path: | 615 | if entry and entry.path: |
616 | fs_perms_table[entry.path] = entry | 616 | fs_perms_table[entry.path] = entry |
617 | f.close() | 617 | f.close() |
618 | 618 | ||
619 | # Debug -- list out in-memory table | 619 | # Debug -- list out in-memory table |
620 | #for dir in fs_perms_table: | 620 | #for dir in fs_perms_table: |
621 | # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir]))) | 621 | # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir]))) |
622 | 622 | ||
623 | # We process links first, so we can go back and fixup directory ownership | 623 | # We process links first, so we can go back and fixup directory ownership |
624 | # for any newly created directories | 624 | # for any newly created directories |
625 | for dir in fs_perms_table: | 625 | for dir in fs_perms_table: |
626 | if not fs_perms_table[dir].link: | 626 | if not fs_perms_table[dir].link: |
627 | continue | 627 | continue |
628 | 628 | ||
629 | origin = dvar + dir | 629 | origin = dvar + dir |
630 | if not (os.path.exists(origin) and os.path.isdir(origin) and not os.path.islink(origin)): | 630 | if not (os.path.exists(origin) and os.path.isdir(origin) and not os.path.islink(origin)): |
631 | continue | 631 | continue |
632 | 632 | ||
633 | link = fs_perms_table[dir].link | 633 | link = fs_perms_table[dir].link |
634 | if link[0] == "/": | 634 | if link[0] == "/": |
635 | target = dvar + link | 635 | target = dvar + link |
636 | ptarget = link | 636 | ptarget = link |
637 | else: | 637 | else: |
638 | target = os.path.join(os.path.dirname(origin), link) | 638 | target = os.path.join(os.path.dirname(origin), link) |
639 | ptarget = os.path.join(os.path.dirname(dir), link) | 639 | ptarget = os.path.join(os.path.dirname(dir), link) |
640 | if os.path.exists(target): | 640 | if os.path.exists(target): |
641 | bb.error("Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)) | 641 | bb.error("Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)) |
642 | continue | 642 | continue |
643 | 643 | ||
644 | # Create path to move directory to, move it, and then setup the symlink | 644 | # Create path to move directory to, move it, and then setup the symlink |
645 | bb.mkdirhier(os.path.dirname(target)) | 645 | bb.mkdirhier(os.path.dirname(target)) |
646 | #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget)) | 646 | #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget)) |
647 | os.rename(origin, target) | 647 | os.rename(origin, target) |
648 | #bb.note("Fixup Perms: Link %s -> %s" % (dir, link)) | 648 | #bb.note("Fixup Perms: Link %s -> %s" % (dir, link)) |
649 | os.symlink(link, origin) | 649 | os.symlink(link, origin) |
650 | 650 | ||
651 | for dir in fs_perms_table: | 651 | for dir in fs_perms_table: |
652 | if fs_perms_table[dir].link: | 652 | if fs_perms_table[dir].link: |
653 | continue | 653 | continue |
654 | 654 | ||
655 | origin = dvar + dir | 655 | origin = dvar + dir |
656 | if not (os.path.exists(origin) and os.path.isdir(origin)): | 656 | if not (os.path.exists(origin) and os.path.isdir(origin)): |
657 | continue | 657 | continue |
658 | 658 | ||
659 | fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) | 659 | fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) |
660 | 660 | ||
661 | if fs_perms_table[dir].walk == 'true': | 661 | if fs_perms_table[dir].walk == 'true': |
662 | for root, dirs, files in os.walk(origin): | 662 | for root, dirs, files in os.walk(origin): |
663 | for dr in dirs: | 663 | for dr in dirs: |
664 | each_dir = os.path.join(root, dr) | 664 | each_dir = os.path.join(root, dr) |
665 | fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) | 665 | fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) |
666 | for f in files: | 666 | for f in files: |
667 | each_file = os.path.join(root, f) | 667 | each_file = os.path.join(root, f) |
668 | fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir) | 668 | fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir) |
669 | } | 669 | } |
670 | 670 | ||
671 | python split_and_strip_files () { | 671 | python split_and_strip_files () { |
672 | import commands, stat, errno, subprocess | 672 | import commands, stat, errno, subprocess |
673 | 673 | ||
674 | dvar = d.getVar('PKGD', True) | 674 | dvar = d.getVar('PKGD', True) |
675 | pn = d.getVar('PN', True) | 675 | pn = d.getVar('PN', True) |
676 | 676 | ||
677 | # We default to '.debug' style | 677 | # We default to '.debug' style |
678 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': | 678 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': |
679 | # Single debug-file-directory style debug info | 679 | # Single debug-file-directory style debug info |
680 | debugappend = ".debug" | 680 | debugappend = ".debug" |
681 | debugdir = "" | 681 | debugdir = "" |
682 | debuglibdir = "/usr/lib/debug" | 682 | debuglibdir = "/usr/lib/debug" |
683 | debugsrcdir = "/usr/src/debug" | 683 | debugsrcdir = "/usr/src/debug" |
684 | else: | 684 | else: |
685 | # Original OE-core, a.k.a. ".debug", style debug info | 685 | # Original OE-core, a.k.a. ".debug", style debug info |
686 | debugappend = "" | 686 | debugappend = "" |
687 | debugdir = "/.debug" | 687 | debugdir = "/.debug" |
688 | debuglibdir = "" | 688 | debuglibdir = "" |
689 | debugsrcdir = "/usr/src/debug" | 689 | debugsrcdir = "/usr/src/debug" |
690 | 690 | ||
691 | os.chdir(dvar) | 691 | os.chdir(dvar) |
692 | 692 | ||
693 | # Return type (bits): | 693 | # Return type (bits): |
694 | # 0 - not elf | 694 | # 0 - not elf |
695 | # 1 - ELF | 695 | # 1 - ELF |
696 | # 2 - stripped | 696 | # 2 - stripped |
697 | # 4 - executable | 697 | # 4 - executable |
698 | # 8 - shared library | 698 | # 8 - shared library |
699 | def isELF(path): | 699 | def isELF(path): |
700 | type = 0 | 700 | type = 0 |
701 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) | 701 | pathprefix = "export PATH=%s; " % d.getVar('PATH', True) |
702 | ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path)) | 702 | ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path)) |
703 | 703 | ||
704 | if ret: | 704 | if ret: |
705 | bb.error("split_and_strip_files: 'file %s' failed" % path) | 705 | bb.error("split_and_strip_files: 'file %s' failed" % path) |
706 | return type | 706 | return type |
707 | 707 | ||
708 | # Not stripped | 708 | # Not stripped |
709 | if "ELF" in result: | 709 | if "ELF" in result: |
710 | type |= 1 | 710 | type |= 1 |
711 | if "not stripped" not in result: | 711 | if "not stripped" not in result: |
712 | type |= 2 | 712 | type |= 2 |
713 | if "executable" in result: | 713 | if "executable" in result: |
714 | type |= 4 | 714 | type |= 4 |
715 | if "shared" in result: | 715 | if "shared" in result: |
716 | type |= 8 | 716 | type |= 8 |
717 | return type | 717 | return type |
718 | 718 | ||
719 | 719 | ||
720 | # | 720 | # |
721 | # First lets figure out all of the files we may have to process ... do this only once! | 721 | # First lets figure out all of the files we may have to process ... do this only once! |
722 | # | 722 | # |
723 | file_list = {} | 723 | file_list = {} |
724 | file_links = {} | 724 | file_links = {} |
725 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \ | 725 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \ |
726 | (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): | 726 | (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): |
727 | for root, dirs, files in os.walk(dvar): | 727 | for root, dirs, files in os.walk(dvar): |
728 | for f in files: | 728 | for f in files: |
729 | file = os.path.join(root, f) | 729 | file = os.path.join(root, f) |
730 | # Only process files (and symlinks)... Skip files that are obviously debug files | 730 | # Only process files (and symlinks)... Skip files that are obviously debug files |
731 | if not (debugappend != "" and file.endswith(debugappend)) and \ | 731 | if not (debugappend != "" and file.endswith(debugappend)) and \ |
732 | not (debugdir != "" and debugdir in os.path.dirname(file[len(dvar):])) and \ | 732 | not (debugdir != "" and debugdir in os.path.dirname(file[len(dvar):])) and \ |
733 | os.path.isfile(file): | 733 | os.path.isfile(file): |
734 | try: | 734 | try: |
735 | s = os.stat(file) | 735 | s = os.stat(file) |
736 | except OSError, (err, strerror): | 736 | except OSError, (err, strerror): |
737 | if err != errno.ENOENT: | 737 | if err != errno.ENOENT: |
738 | raise | 738 | raise |
739 | # Skip broken symlinks | 739 | # Skip broken symlinks |
740 | continue | 740 | continue |
741 | # Is the item excutable? Then we need to process it. | 741 | # Is the item excutable? Then we need to process it. |
742 | if (s[stat.ST_MODE] & stat.S_IXUSR) or \ | 742 | if (s[stat.ST_MODE] & stat.S_IXUSR) or \ |
743 | (s[stat.ST_MODE] & stat.S_IXGRP) or \ | 743 | (s[stat.ST_MODE] & stat.S_IXGRP) or \ |
744 | (s[stat.ST_MODE] & stat.S_IXOTH): | 744 | (s[stat.ST_MODE] & stat.S_IXOTH): |
745 | # If it's a symlink, and points to an ELF file, we capture the readlink target | 745 | # If it's a symlink, and points to an ELF file, we capture the readlink target |
746 | if os.path.islink(file): | 746 | if os.path.islink(file): |
747 | target = os.readlink(file) | 747 | target = os.readlink(file) |
748 | if not os.path.isabs(target): | 748 | if not os.path.isabs(target): |
749 | ltarget = os.path.join(os.path.dirname(file), target) | 749 | ltarget = os.path.join(os.path.dirname(file), target) |
750 | else: | 750 | else: |
751 | ltarget = target | 751 | ltarget = target |
752 | 752 | ||
753 | if isELF(ltarget): | 753 | if isELF(ltarget): |
754 | #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget))) | 754 | #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget))) |
755 | file_list[file] = "sym: " + target | 755 | file_list[file] = "sym: " + target |
756 | continue | 756 | continue |
757 | # It's a file (or hardlink), not a link | 757 | # It's a file (or hardlink), not a link |
758 | # ...but is it ELF, and is it already stripped? | 758 | # ...but is it ELF, and is it already stripped? |
759 | elf_file = isELF(file) | 759 | elf_file = isELF(file) |
760 | if elf_file & 1: | 760 | if elf_file & 1: |
761 | # Check if it's a hard link to something else | 761 | # Check if it's a hard link to something else |
762 | if s.st_nlink > 1: | 762 | if s.st_nlink > 1: |
763 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | 763 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) |
764 | # Hard link to something else | 764 | # Hard link to something else |
765 | file_list[file] = "hard: " + file_reference | 765 | file_list[file] = "hard: " + file_reference |
766 | continue | 766 | continue |
767 | 767 | ||
768 | file_list[file] = "ELF: %d" % elf_file | 768 | file_list[file] = "ELF: %d" % elf_file |
769 | 769 | ||
770 | 770 | ||
771 | # | 771 | # |
772 | # First lets process debug splitting | 772 | # First lets process debug splitting |
773 | # | 773 | # |
774 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): | 774 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): |
775 | for file in file_list: | 775 | for file in file_list: |
776 | src = file[len(dvar):] | 776 | src = file[len(dvar):] |
777 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend | 777 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend |
778 | fpath = dvar + dest | 778 | fpath = dvar + dest |
779 | # Preserve symlinks in debug area... | 779 | # Preserve symlinks in debug area... |
780 | if file_list[file].startswith("sym: "): | 780 | if file_list[file].startswith("sym: "): |
781 | ltarget = file_list[file][5:] | 781 | ltarget = file_list[file][5:] |
782 | lpath = os.path.dirname(ltarget) | 782 | lpath = os.path.dirname(ltarget) |
783 | lbase = os.path.basename(ltarget) | 783 | lbase = os.path.basename(ltarget) |
784 | ftarget = "" | 784 | ftarget = "" |
785 | if lpath and lpath != ".": | 785 | if lpath and lpath != ".": |
786 | ftarget += lpath + debugdir + "/" | 786 | ftarget += lpath + debugdir + "/" |
787 | ftarget += lbase + debugappend | 787 | ftarget += lbase + debugappend |
788 | if lpath.startswith(".."): | 788 | if lpath.startswith(".."): |
789 | ftarget = os.path.join("..", ftarget) | 789 | ftarget = os.path.join("..", ftarget) |
790 | bb.mkdirhier(os.path.dirname(fpath)) | 790 | bb.mkdirhier(os.path.dirname(fpath)) |
791 | #bb.note("Symlink %s -> %s" % (fpath, ftarget)) | 791 | #bb.note("Symlink %s -> %s" % (fpath, ftarget)) |
792 | os.symlink(ftarget, fpath) | 792 | os.symlink(ftarget, fpath) |
793 | continue | 793 | continue |
794 | 794 | ||
795 | # Preserve hard links in debug area... | 795 | # Preserve hard links in debug area... |
796 | file_reference = "" | 796 | file_reference = "" |
797 | if file_list[file].startswith("hard: "): | 797 | if file_list[file].startswith("hard: "): |
798 | file_reference = file_list[file][6:] | 798 | file_reference = file_list[file][6:] |
799 | if file_reference not in file_links: | 799 | if file_reference not in file_links: |
800 | # If this is a new file, add it as a reference, and | 800 | # If this is a new file, add it as a reference, and |
801 | # update it's type, so we can fall through and split | 801 | # update it's type, so we can fall through and split |
802 | file_list[file] = "ELF: %d" % (isELF(file)) | 802 | file_list[file] = "ELF: %d" % (isELF(file)) |
803 | else: | 803 | else: |
804 | target = file_links[file_reference][len(dvar):] | 804 | target = file_links[file_reference][len(dvar):] |
805 | ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend | 805 | ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend |
806 | bb.mkdirhier(os.path.dirname(fpath)) | 806 | bb.mkdirhier(os.path.dirname(fpath)) |
807 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | 807 | #bb.note("Link %s -> %s" % (fpath, ftarget)) |
808 | os.link(ftarget, fpath) | 808 | os.link(ftarget, fpath) |
809 | continue | 809 | continue |
810 | 810 | ||
811 | # It's ELF... | 811 | # It's ELF... |
812 | if file_list[file].startswith("ELF: "): | 812 | if file_list[file].startswith("ELF: "): |
813 | elf_file = int(file_list[file][5:]) | 813 | elf_file = int(file_list[file][5:]) |
814 | if elf_file & 2: | 814 | if elf_file & 2: |
815 | bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (src, pn)) | 815 | bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (src, pn)) |
816 | continue | 816 | continue |
817 | 817 | ||
818 | # Split the file... | 818 | # Split the file... |
819 | bb.mkdirhier(os.path.dirname(fpath)) | 819 | bb.mkdirhier(os.path.dirname(fpath)) |
820 | #bb.note("Split %s -> %s" % (file, fpath)) | 820 | #bb.note("Split %s -> %s" % (file, fpath)) |
821 | # Only store off the hard link reference if we successfully split! | 821 | # Only store off the hard link reference if we successfully split! |
822 | if splitfile(file, fpath, debugsrcdir, d) == 0 and file_reference != "": | 822 | if splitfile(file, fpath, debugsrcdir, d) == 0 and file_reference != "": |
823 | file_links[file_reference] = file | 823 | file_links[file_reference] = file |
824 | 824 | ||
825 | # The above may have generated dangling symlinks, remove them! | 825 | # The above may have generated dangling symlinks, remove them! |
826 | # Dangling symlinks are a result of something NOT being split, such as a stripped binary. | 826 | # Dangling symlinks are a result of something NOT being split, such as a stripped binary. |
827 | # This should be a rare occurance, but we want to clean up anyway. | 827 | # This should be a rare occurance, but we want to clean up anyway. |
828 | for file in file_list: | 828 | for file in file_list: |
829 | if file_list[file].startswith("sym: "): | 829 | if file_list[file].startswith("sym: "): |
830 | src = file[len(dvar):] | 830 | src = file[len(dvar):] |
831 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend | 831 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend |
832 | fpath = dvar + dest | 832 | fpath = dvar + dest |
833 | try: | 833 | try: |
834 | s = os.stat(fpath) | 834 | s = os.stat(fpath) |
835 | except OSError, (err, strerror): | 835 | except OSError, (err, strerror): |
836 | if err != errno.ENOENT: | 836 | if err != errno.ENOENT: |
837 | raise | 837 | raise |
838 | #bb.note("Remove dangling link %s -> %s" % (fpath, os.readlink(fpath))) | 838 | #bb.note("Remove dangling link %s -> %s" % (fpath, os.readlink(fpath))) |
839 | os.unlink(fpath) | 839 | os.unlink(fpath) |
840 | # This could leave an empty debug directory laying around | 840 | # This could leave an empty debug directory laying around |
841 | # take care of the obvious case... | 841 | # take care of the obvious case... |
842 | subprocess.call("rmdir %s 2>/dev/null" % os.path.dirname(fpath), shell=True) | 842 | subprocess.call("rmdir %s 2>/dev/null" % os.path.dirname(fpath), shell=True) |
843 | 843 | ||
844 | # Process the debugsrcdir if requested... | 844 | # Process the debugsrcdir if requested... |
845 | # This copies and places the referenced sources for later debugging... | 845 | # This copies and places the referenced sources for later debugging... |
846 | splitfile2(debugsrcdir, d) | 846 | splitfile2(debugsrcdir, d) |
847 | # | 847 | # |
848 | # End of debug splitting | 848 | # End of debug splitting |
849 | # | 849 | # |
850 | 850 | ||
851 | # | 851 | # |
852 | # Now lets go back over things and strip them | 852 | # Now lets go back over things and strip them |
853 | # | 853 | # |
854 | if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): | 854 | if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): |
855 | for file in file_list: | 855 | for file in file_list: |
856 | if file_list[file].startswith("ELF: "): | 856 | if file_list[file].startswith("ELF: "): |
857 | elf_file = int(file_list[file][5:]) | 857 | elf_file = int(file_list[file][5:]) |
858 | #bb.note("Strip %s" % file) | 858 | #bb.note("Strip %s" % file) |
859 | runstrip(file, elf_file, d) | 859 | runstrip(file, elf_file, d) |
860 | 860 | ||
861 | 861 | ||
862 | if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): | 862 | if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): |
863 | for root, dirs, files in os.walk(dvar): | 863 | for root, dirs, files in os.walk(dvar): |
864 | for f in files: | 864 | for f in files: |
865 | if not f.endswith(".ko"): | 865 | if not f.endswith(".ko"): |
866 | continue | 866 | continue |
867 | runstrip(os.path.join(root, f), None, d) | 867 | runstrip(os.path.join(root, f), None, d) |
868 | # | 868 | # |
869 | # End of strip | 869 | # End of strip |
870 | # | 870 | # |
871 | } | 871 | } |
872 | 872 | ||
873 | python populate_packages () { | 873 | python populate_packages () { |
874 | import glob, stat, errno, re, subprocess | 874 | import glob, stat, errno, re, subprocess |
875 | 875 | ||
876 | workdir = d.getVar('WORKDIR', True) | 876 | workdir = d.getVar('WORKDIR', True) |
877 | outdir = d.getVar('DEPLOY_DIR', True) | 877 | outdir = d.getVar('DEPLOY_DIR', True) |
878 | dvar = d.getVar('PKGD', True) | 878 | dvar = d.getVar('PKGD', True) |
879 | packages = d.getVar('PACKAGES', True) | 879 | packages = d.getVar('PACKAGES', True) |
880 | pn = d.getVar('PN', True) | 880 | pn = d.getVar('PN', True) |
881 | 881 | ||
882 | bb.mkdirhier(outdir) | 882 | bb.mkdirhier(outdir) |
883 | os.chdir(dvar) | 883 | os.chdir(dvar) |
884 | 884 | ||
885 | # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION | 885 | # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION |
886 | # Sanity should be moved to sanity.bbclass once we have the infrastucture | 886 | # Sanity should be moved to sanity.bbclass once we have the infrastucture |
887 | package_list = [] | 887 | package_list = [] |
888 | 888 | ||
889 | for pkg in packages.split(): | 889 | for pkg in packages.split(): |
890 | if d.getVar('LICENSE_EXCLUSION-' + pkg, True): | 890 | if d.getVar('LICENSE_EXCLUSION-' + pkg, True): |
891 | bb.warn("%s has an incompatible license. Excluding from packaging." % pkg) | 891 | bb.warn("%s has an incompatible license. Excluding from packaging." % pkg) |
892 | packages.remove(pkg) | 892 | packages.remove(pkg) |
893 | else: | 893 | else: |
894 | if pkg in package_list: | 894 | if pkg in package_list: |
895 | bb.error("%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg) | 895 | bb.error("%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg) |
896 | else: | 896 | else: |
897 | package_list.append(pkg) | 897 | package_list.append(pkg) |
898 | d.setVar('PACKAGES', ' '.join(package_list)) | 898 | d.setVar('PACKAGES', ' '.join(package_list)) |
899 | pkgdest = d.getVar('PKGDEST', True) | 899 | pkgdest = d.getVar('PKGDEST', True) |
900 | subprocess.call('rm -rf %s' % pkgdest, shell=True) | 900 | subprocess.call('rm -rf %s' % pkgdest, shell=True) |
901 | 901 | ||
902 | seen = [] | 902 | seen = [] |
903 | 903 | ||
904 | for pkg in package_list: | 904 | for pkg in package_list: |
905 | localdata = bb.data.createCopy(d) | 905 | localdata = bb.data.createCopy(d) |
906 | root = os.path.join(pkgdest, pkg) | 906 | root = os.path.join(pkgdest, pkg) |
907 | bb.mkdirhier(root) | 907 | bb.mkdirhier(root) |
908 | 908 | ||
909 | localdata.setVar('PKG', pkg) | 909 | localdata.setVar('PKG', pkg) |
910 | overrides = localdata.getVar('OVERRIDES', True) | 910 | overrides = localdata.getVar('OVERRIDES', True) |
911 | if not overrides: | 911 | if not overrides: |
912 | raise bb.build.FuncFailed('OVERRIDES not defined') | 912 | raise bb.build.FuncFailed('OVERRIDES not defined') |
913 | localdata.setVar('OVERRIDES', overrides + ':' + pkg) | 913 | localdata.setVar('OVERRIDES', overrides + ':' + pkg) |
914 | bb.data.update_data(localdata) | 914 | bb.data.update_data(localdata) |
915 | 915 | ||
916 | filesvar = localdata.getVar('FILES', True) or "" | 916 | filesvar = localdata.getVar('FILES', True) or "" |
917 | files = filesvar.split() | 917 | files = filesvar.split() |
918 | file_links = {} | 918 | file_links = {} |
919 | for file in files: | 919 | for file in files: |
920 | if file.find("//") != -1: | 920 | if file.find("//") != -1: |
921 | bb.warn("FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg) | 921 | bb.warn("FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg) |
922 | file.replace("//", "/") | 922 | file.replace("//", "/") |
923 | if os.path.isabs(file): | 923 | if os.path.isabs(file): |
924 | file = '.' + file | 924 | file = '.' + file |
925 | if not os.path.islink(file): | 925 | if not os.path.islink(file): |
926 | if os.path.isdir(file): | 926 | if os.path.isdir(file): |
927 | newfiles = [ os.path.join(file,x) for x in os.listdir(file) ] | 927 | newfiles = [ os.path.join(file,x) for x in os.listdir(file) ] |
928 | if newfiles: | 928 | if newfiles: |
929 | files += newfiles | 929 | files += newfiles |
930 | continue | 930 | continue |
931 | globbed = glob.glob(file) | 931 | globbed = glob.glob(file) |
932 | if globbed: | 932 | if globbed: |
933 | if [ file ] != globbed: | 933 | if [ file ] != globbed: |
934 | files += globbed | 934 | files += globbed |
935 | continue | 935 | continue |
936 | if (not os.path.islink(file)) and (not os.path.exists(file)): | 936 | if (not os.path.islink(file)) and (not os.path.exists(file)): |
937 | continue | 937 | continue |
938 | if file in seen: | 938 | if file in seen: |
939 | continue | 939 | continue |
940 | seen.append(file) | 940 | seen.append(file) |
941 | 941 | ||
942 | def mkdir(src, dest, p): | 942 | def mkdir(src, dest, p): |
943 | src = os.path.join(src, p) | 943 | src = os.path.join(src, p) |
944 | dest = os.path.join(dest, p) | 944 | dest = os.path.join(dest, p) |
945 | bb.mkdirhier(dest) | 945 | bb.mkdirhier(dest) |
946 | fstat = os.stat(src) | 946 | fstat = os.stat(src) |
947 | os.chmod(dest, fstat.st_mode) | 947 | os.chmod(dest, fstat.st_mode) |
948 | os.chown(dest, fstat.st_uid, fstat.st_gid) | 948 | os.chown(dest, fstat.st_uid, fstat.st_gid) |
949 | if p not in seen: | 949 | if p not in seen: |
950 | seen.append(p) | 950 | seen.append(p) |
951 | 951 | ||
952 | def mkdir_recurse(src, dest, paths): | 952 | def mkdir_recurse(src, dest, paths): |
953 | while paths.startswith("./"): | 953 | while paths.startswith("./"): |
954 | paths = paths[2:] | 954 | paths = paths[2:] |
955 | p = "." | 955 | p = "." |
956 | for c in paths.split("/"): | 956 | for c in paths.split("/"): |
957 | p = os.path.join(p, c) | 957 | p = os.path.join(p, c) |
958 | if not os.path.exists(os.path.join(dest, p)): | 958 | if not os.path.exists(os.path.join(dest, p)): |
959 | mkdir(src, dest, p) | 959 | mkdir(src, dest, p) |
960 | 960 | ||
961 | if os.path.isdir(file) and not os.path.islink(file): | 961 | if os.path.isdir(file) and not os.path.islink(file): |
962 | mkdir_recurse(dvar, root, file) | 962 | mkdir_recurse(dvar, root, file) |
963 | continue | 963 | continue |
964 | 964 | ||
965 | mkdir_recurse(dvar, root, os.path.dirname(file)) | 965 | mkdir_recurse(dvar, root, os.path.dirname(file)) |
966 | fpath = os.path.join(root,file) | 966 | fpath = os.path.join(root,file) |
967 | if not os.path.islink(file): | 967 | if not os.path.islink(file): |
968 | os.link(file, fpath) | 968 | os.link(file, fpath) |
969 | fstat = os.stat(file) | 969 | fstat = os.stat(file) |
970 | os.chmod(fpath, fstat.st_mode) | 970 | os.chmod(fpath, fstat.st_mode) |
971 | os.chown(fpath, fstat.st_uid, fstat.st_gid) | 971 | os.chown(fpath, fstat.st_uid, fstat.st_gid) |
972 | continue | 972 | continue |
973 | ret = bb.copyfile(file, fpath) | 973 | ret = bb.copyfile(file, fpath) |
974 | if ret is False or ret == 0: | 974 | if ret is False or ret == 0: |
975 | raise bb.build.FuncFailed("File population failed") | 975 | raise bb.build.FuncFailed("File population failed") |
976 | 976 | ||
977 | del localdata | 977 | del localdata |
978 | os.chdir(workdir) | 978 | os.chdir(workdir) |
979 | 979 | ||
980 | unshipped = [] | 980 | unshipped = [] |
981 | for root, dirs, files in os.walk(dvar): | 981 | for root, dirs, files in os.walk(dvar): |
982 | dir = root[len(dvar):] | 982 | dir = root[len(dvar):] |
983 | if not dir: | 983 | if not dir: |
984 | dir = os.sep | 984 | dir = os.sep |
985 | for f in (files + dirs): | 985 | for f in (files + dirs): |
986 | path = os.path.join(dir, f) | 986 | path = os.path.join(dir, f) |
987 | if ('.' + path) not in seen: | 987 | if ('.' + path) not in seen: |
988 | unshipped.append(path) | 988 | unshipped.append(path) |
989 | 989 | ||
990 | if unshipped != []: | 990 | if unshipped != []: |
991 | msg = pn + ": Files/directories were installed but not shipped" | 991 | msg = pn + ": Files/directories were installed but not shipped" |
992 | if "installed_vs_shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): | 992 | if "installed_vs_shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): |
993 | bb.note("Package %s skipping QA tests: installed_vs_shipped" % pn) | 993 | bb.note("Package %s skipping QA tests: installed_vs_shipped" % pn) |
994 | else: | 994 | else: |
995 | for f in unshipped: | 995 | for f in unshipped: |
996 | msg = msg + "\n " + f | 996 | msg = msg + "\n " + f |
997 | package_qa_handle_error("installed_vs_shipped", msg, d) | 997 | package_qa_handle_error("installed_vs_shipped", msg, d) |
998 | 998 | ||
999 | bb.build.exec_func("package_name_hook", d) | 999 | bb.build.exec_func("package_name_hook", d) |
1000 | 1000 | ||
1001 | for pkg in package_list: | 1001 | for pkg in package_list: |
1002 | pkgname = d.getVar('PKG_%s' % pkg, True) | 1002 | pkgname = d.getVar('PKG_%s' % pkg, True) |
1003 | if pkgname is None: | 1003 | if pkgname is None: |
1004 | d.setVar('PKG_%s' % pkg, pkg) | 1004 | d.setVar('PKG_%s' % pkg, pkg) |
1005 | 1005 | ||
1006 | dangling_links = {} | 1006 | dangling_links = {} |
1007 | pkg_files = {} | 1007 | pkg_files = {} |
1008 | for pkg in package_list: | 1008 | for pkg in package_list: |
1009 | dangling_links[pkg] = [] | 1009 | dangling_links[pkg] = [] |
1010 | pkg_files[pkg] = [] | 1010 | pkg_files[pkg] = [] |
1011 | inst_root = os.path.join(pkgdest, pkg) | 1011 | inst_root = os.path.join(pkgdest, pkg) |
1012 | for root, dirs, files in os.walk(inst_root): | 1012 | for root, dirs, files in os.walk(inst_root): |
1013 | for f in files: | 1013 | for f in files: |
1014 | path = os.path.join(root, f) | 1014 | path = os.path.join(root, f) |
1015 | rpath = path[len(inst_root):] | 1015 | rpath = path[len(inst_root):] |
1016 | pkg_files[pkg].append(rpath) | 1016 | pkg_files[pkg].append(rpath) |
1017 | try: | 1017 | try: |
1018 | s = os.stat(path) | 1018 | s = os.stat(path) |
1019 | except OSError, (err, strerror): | 1019 | except OSError, (err, strerror): |
1020 | if err != errno.ENOENT: | 1020 | if err != errno.ENOENT: |
1021 | raise | 1021 | raise |
1022 | target = os.readlink(path) | 1022 | target = os.readlink(path) |
1023 | if target[0] != '/': | 1023 | if target[0] != '/': |
1024 | target = os.path.join(root[len(inst_root):], target) | 1024 | target = os.path.join(root[len(inst_root):], target) |
1025 | dangling_links[pkg].append(os.path.normpath(target)) | 1025 | dangling_links[pkg].append(os.path.normpath(target)) |
1026 | 1026 | ||
1027 | for pkg in package_list: | 1027 | for pkg in package_list: |
1028 | rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "") | 1028 | rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "") |
1029 | 1029 | ||
1030 | for l in dangling_links[pkg]: | 1030 | for l in dangling_links[pkg]: |
1031 | found = False | 1031 | found = False |
1032 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) | 1032 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) |
1033 | for p in package_list: | 1033 | for p in package_list: |
1034 | for f in pkg_files[p]: | 1034 | for f in pkg_files[p]: |
1035 | if f == l: | 1035 | if f == l: |
1036 | found = True | 1036 | found = True |
1037 | bb.debug(1, "target found in %s" % p) | 1037 | bb.debug(1, "target found in %s" % p) |
1038 | if p == pkg: | 1038 | if p == pkg: |
1039 | break | 1039 | break |
1040 | if p not in rdepends: | 1040 | if p not in rdepends: |
1041 | rdepends[p] = "" | 1041 | rdepends[p] = "" |
1042 | break | 1042 | break |
1043 | if found == False: | 1043 | if found == False: |
1044 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | 1044 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) |
1045 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | 1045 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) |
1046 | } | 1046 | } |
1047 | populate_packages[dirs] = "${D}" | 1047 | populate_packages[dirs] = "${D}" |
1048 | 1048 | ||
1049 | PKGDESTWORK = "${WORKDIR}/pkgdata" | 1049 | PKGDESTWORK = "${WORKDIR}/pkgdata" |
1050 | 1050 | ||
1051 | python emit_pkgdata() { | 1051 | python emit_pkgdata() { |
1052 | from glob import glob | 1052 | from glob import glob |
1053 | 1053 | ||
1054 | def write_if_exists(f, pkg, var): | 1054 | def write_if_exists(f, pkg, var): |
1055 | def encode(str): | 1055 | def encode(str): |
1056 | import codecs | 1056 | import codecs |
1057 | c = codecs.getencoder("string_escape") | 1057 | c = codecs.getencoder("string_escape") |
1058 | return c(str)[0] | 1058 | return c(str)[0] |
1059 | 1059 | ||
1060 | val = d.getVar('%s_%s' % (var, pkg), True) | 1060 | val = d.getVar('%s_%s' % (var, pkg), True) |
1061 | if val: | 1061 | if val: |
1062 | f.write('%s_%s: %s\n' % (var, pkg, encode(val))) | 1062 | f.write('%s_%s: %s\n' % (var, pkg, encode(val))) |
1063 | return | 1063 | return |
1064 | val = d.getVar('%s' % (var), True) | 1064 | val = d.getVar('%s' % (var), True) |
1065 | if val: | 1065 | if val: |
1066 | f.write('%s: %s\n' % (var, encode(val))) | 1066 | f.write('%s: %s\n' % (var, encode(val))) |
1067 | return | 1067 | return |
1068 | 1068 | ||
1069 | def get_directory_size(dir): | 1069 | def get_directory_size(dir): |
1070 | if os.listdir(dir): | 1070 | if os.listdir(dir): |
1071 | size = int(os.popen('du -sk %s' % dir).readlines()[0].split('\t')[0]) | 1071 | size = int(os.popen('du -sk %s' % dir).readlines()[0].split('\t')[0]) |
1072 | else: | 1072 | else: |
1073 | size = 0 | 1073 | size = 0 |
1074 | return size | 1074 | return size |
1075 | 1075 | ||
1076 | packages = d.getVar('PACKAGES', True) | 1076 | packages = d.getVar('PACKAGES', True) |
1077 | pkgdest = d.getVar('PKGDEST', True) | 1077 | pkgdest = d.getVar('PKGDEST', True) |
1078 | pkgdatadir = d.getVar('PKGDESTWORK', True) | 1078 | pkgdatadir = d.getVar('PKGDESTWORK', True) |
1079 | 1079 | ||
1080 | # Take shared lock since we're only reading, not writing | 1080 | # Take shared lock since we're only reading, not writing |
1081 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) | 1081 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) |
1082 | 1082 | ||
1083 | data_file = pkgdatadir + d.expand("/${PN}" ) | 1083 | data_file = pkgdatadir + d.expand("/${PN}" ) |
1084 | f = open(data_file, 'w') | 1084 | f = open(data_file, 'w') |
1085 | f.write("PACKAGES: %s\n" % packages) | 1085 | f.write("PACKAGES: %s\n" % packages) |
1086 | f.close() | 1086 | f.close() |
1087 | 1087 | ||
1088 | workdir = d.getVar('WORKDIR', True) | 1088 | workdir = d.getVar('WORKDIR', True) |
1089 | 1089 | ||
1090 | for pkg in packages.split(): | 1090 | for pkg in packages.split(): |
1091 | subdata_file = pkgdatadir + "/runtime/%s" % pkg | 1091 | subdata_file = pkgdatadir + "/runtime/%s" % pkg |
1092 | 1092 | ||
1093 | sf = open(subdata_file, 'w') | 1093 | sf = open(subdata_file, 'w') |
1094 | write_if_exists(sf, pkg, 'PN') | 1094 | write_if_exists(sf, pkg, 'PN') |
1095 | write_if_exists(sf, pkg, 'PV') | 1095 | write_if_exists(sf, pkg, 'PV') |
1096 | write_if_exists(sf, pkg, 'PR') | 1096 | write_if_exists(sf, pkg, 'PR') |
1097 | write_if_exists(sf, pkg, 'PKGV') | 1097 | write_if_exists(sf, pkg, 'PKGV') |
1098 | write_if_exists(sf, pkg, 'PKGR') | 1098 | write_if_exists(sf, pkg, 'PKGR') |
1099 | write_if_exists(sf, pkg, 'LICENSE') | 1099 | write_if_exists(sf, pkg, 'LICENSE') |
1100 | write_if_exists(sf, pkg, 'DESCRIPTION') | 1100 | write_if_exists(sf, pkg, 'DESCRIPTION') |
1101 | write_if_exists(sf, pkg, 'SUMMARY') | 1101 | write_if_exists(sf, pkg, 'SUMMARY') |
1102 | write_if_exists(sf, pkg, 'RDEPENDS') | 1102 | write_if_exists(sf, pkg, 'RDEPENDS') |
1103 | write_if_exists(sf, pkg, 'RPROVIDES') | 1103 | write_if_exists(sf, pkg, 'RPROVIDES') |
1104 | write_if_exists(sf, pkg, 'RRECOMMENDS') | 1104 | write_if_exists(sf, pkg, 'RRECOMMENDS') |
1105 | write_if_exists(sf, pkg, 'RSUGGESTS') | 1105 | write_if_exists(sf, pkg, 'RSUGGESTS') |
1106 | write_if_exists(sf, pkg, 'RREPLACES') | 1106 | write_if_exists(sf, pkg, 'RREPLACES') |
1107 | write_if_exists(sf, pkg, 'RCONFLICTS') | 1107 | write_if_exists(sf, pkg, 'RCONFLICTS') |
1108 | write_if_exists(sf, pkg, 'SECTION') | 1108 | write_if_exists(sf, pkg, 'SECTION') |
1109 | write_if_exists(sf, pkg, 'PKG') | 1109 | write_if_exists(sf, pkg, 'PKG') |
1110 | write_if_exists(sf, pkg, 'ALLOW_EMPTY') | 1110 | write_if_exists(sf, pkg, 'ALLOW_EMPTY') |
1111 | write_if_exists(sf, pkg, 'FILES') | 1111 | write_if_exists(sf, pkg, 'FILES') |
1112 | write_if_exists(sf, pkg, 'pkg_postinst') | 1112 | write_if_exists(sf, pkg, 'pkg_postinst') |
1113 | write_if_exists(sf, pkg, 'pkg_postrm') | 1113 | write_if_exists(sf, pkg, 'pkg_postrm') |
1114 | write_if_exists(sf, pkg, 'pkg_preinst') | 1114 | write_if_exists(sf, pkg, 'pkg_preinst') |
1115 | write_if_exists(sf, pkg, 'pkg_prerm') | 1115 | write_if_exists(sf, pkg, 'pkg_prerm') |
1116 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') | 1116 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') |
1117 | for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): | 1117 | for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): |
1118 | write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) | 1118 | write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) |
1119 | 1119 | ||
1120 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') | 1120 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') |
1121 | for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): | 1121 | for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): |
1122 | write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) | 1122 | write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) |
1123 | 1123 | ||
1124 | sf.write('%s_%s: %s\n' % ('PKGSIZE', pkg, get_directory_size(pkgdest + "/%s" % pkg))) | 1124 | sf.write('%s_%s: %s\n' % ('PKGSIZE', pkg, get_directory_size(pkgdest + "/%s" % pkg))) |
1125 | sf.close() | 1125 | sf.close() |
1126 | 1126 | ||
1127 | 1127 | ||
1128 | allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) | 1128 | allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) |
1129 | if not allow_empty: | 1129 | if not allow_empty: |
1130 | allow_empty = d.getVar('ALLOW_EMPTY', True) | 1130 | allow_empty = d.getVar('ALLOW_EMPTY', True) |
1131 | root = "%s/%s" % (pkgdest, pkg) | 1131 | root = "%s/%s" % (pkgdest, pkg) |
1132 | os.chdir(root) | 1132 | os.chdir(root) |
1133 | g = glob('*') | 1133 | g = glob('*') |
1134 | if g or allow_empty == "1": | 1134 | if g or allow_empty == "1": |
1135 | packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg | 1135 | packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg |
1136 | file(packagedfile, 'w').close() | 1136 | file(packagedfile, 'w').close() |
1137 | 1137 | ||
1138 | bb.utils.unlockfile(lf) | 1138 | bb.utils.unlockfile(lf) |
1139 | } | 1139 | } |
1140 | emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime" | 1140 | emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime" |
1141 | 1141 | ||
@@ -1156,557 +1156,557 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LI | |||
1156 | # FILERDEPENDS_filepath_pkg - per file dep | 1156 | # FILERDEPENDS_filepath_pkg - per file dep |
1157 | 1157 | ||
1158 | python package_do_filedeps() { | 1158 | python package_do_filedeps() { |
1159 | import re | 1159 | import re |
1160 | 1160 | ||
1161 | if d.getVar('SKIP_FILEDEPS', True) == '1': | 1161 | if d.getVar('SKIP_FILEDEPS', True) == '1': |
1162 | return | 1162 | return |
1163 | 1163 | ||
1164 | pkgdest = d.getVar('PKGDEST', True) | 1164 | pkgdest = d.getVar('PKGDEST', True) |
1165 | packages = d.getVar('PACKAGES', True) | 1165 | packages = d.getVar('PACKAGES', True) |
1166 | 1166 | ||
1167 | rpmdeps = d.expand("${RPMDEPS}") | 1167 | rpmdeps = d.expand("${RPMDEPS}") |
1168 | r = re.compile(r'[<>=]+ +[^ ]*') | 1168 | r = re.compile(r'[<>=]+ +[^ ]*') |
1169 | 1169 | ||
1170 | def file_translate(file): | 1170 | def file_translate(file): |
1171 | ft = file.replace("@", "@at@") | 1171 | ft = file.replace("@", "@at@") |
1172 | ft = ft.replace(" ", "@space@") | 1172 | ft = ft.replace(" ", "@space@") |
1173 | ft = ft.replace("\t", "@tab@") | 1173 | ft = ft.replace("\t", "@tab@") |
1174 | ft = ft.replace("[", "@openbrace@") | 1174 | ft = ft.replace("[", "@openbrace@") |
1175 | ft = ft.replace("]", "@closebrace@") | 1175 | ft = ft.replace("]", "@closebrace@") |
1176 | ft = ft.replace("_", "@underscore@") | 1176 | ft = ft.replace("_", "@underscore@") |
1177 | return ft | 1177 | return ft |
1178 | 1178 | ||
1179 | # Quick routine to process the results of the rpmdeps call... | 1179 | # Quick routine to process the results of the rpmdeps call... |
1180 | def process_deps(pipe, pkg, provides_files, requires_files): | 1180 | def process_deps(pipe, pkg, provides_files, requires_files): |
1181 | provides = {} | 1181 | provides = {} |
1182 | requires = {} | 1182 | requires = {} |
1183 | 1183 | ||
1184 | for line in pipe: | 1184 | for line in pipe: |
1185 | f = line.split(" ", 1)[0].strip() | 1185 | f = line.split(" ", 1)[0].strip() |
1186 | line = line.split(" ", 1)[1].strip() | 1186 | line = line.split(" ", 1)[1].strip() |
1187 | 1187 | ||
1188 | if line.startswith("Requires:"): | 1188 | if line.startswith("Requires:"): |
1189 | i = requires | 1189 | i = requires |
1190 | elif line.startswith("Provides:"): | 1190 | elif line.startswith("Provides:"): |
1191 | i = provides | 1191 | i = provides |
1192 | else: | 1192 | else: |
1193 | continue | 1193 | continue |
1194 | 1194 | ||
1195 | file = f.replace(pkgdest + "/" + pkg, "") | 1195 | file = f.replace(pkgdest + "/" + pkg, "") |
1196 | file = file_translate(file) | 1196 | file = file_translate(file) |
1197 | value = line.split(":", 1)[1].strip() | 1197 | value = line.split(":", 1)[1].strip() |
1198 | value = r.sub(r'(\g<0>)', value) | 1198 | value = r.sub(r'(\g<0>)', value) |
1199 | 1199 | ||
1200 | if value.startswith("rpmlib("): | 1200 | if value.startswith("rpmlib("): |
1201 | continue | 1201 | continue |
1202 | if value == "python": | 1202 | if value == "python": |
1203 | continue | 1203 | continue |
1204 | if file not in i: | 1204 | if file not in i: |
1205 | i[file] = [] | 1205 | i[file] = [] |
1206 | i[file].append(value) | 1206 | i[file].append(value) |
1207 | 1207 | ||
1208 | for file in provides: | 1208 | for file in provides: |
1209 | provides_files.append(file) | 1209 | provides_files.append(file) |
1210 | key = "FILERPROVIDES_" + file + "_" + pkg | 1210 | key = "FILERPROVIDES_" + file + "_" + pkg |
1211 | d.setVar(key, " ".join(provides[file])) | 1211 | d.setVar(key, " ".join(provides[file])) |
1212 | 1212 | ||
1213 | for file in requires: | 1213 | for file in requires: |
1214 | requires_files.append(file) | 1214 | requires_files.append(file) |
1215 | key = "FILERDEPENDS_" + file + "_" + pkg | 1215 | key = "FILERDEPENDS_" + file + "_" + pkg |
1216 | d.setVar(key, " ".join(requires[file])) | 1216 | d.setVar(key, " ".join(requires[file])) |
1217 | 1217 | ||
1218 | def chunks(files, n): | 1218 | def chunks(files, n): |
1219 | return [files[i:i+n] for i in range(0, len(files), n)] | 1219 | return [files[i:i+n] for i in range(0, len(files), n)] |
1220 | 1220 | ||
1221 | # Determine dependencies | 1221 | # Determine dependencies |
1222 | for pkg in packages.split(): | 1222 | for pkg in packages.split(): |
1223 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): | 1223 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): |
1224 | continue | 1224 | continue |
1225 | 1225 | ||
1226 | provides_files = [] | 1226 | provides_files = [] |
1227 | requires_files = [] | 1227 | requires_files = [] |
1228 | rpfiles = [] | 1228 | rpfiles = [] |
1229 | for root, dirs, files in os.walk(pkgdest + "/" + pkg): | 1229 | for root, dirs, files in os.walk(pkgdest + "/" + pkg): |
1230 | for file in files: | 1230 | for file in files: |
1231 | rpfiles.append(os.path.join(root, file)) | 1231 | rpfiles.append(os.path.join(root, file)) |
1232 | 1232 | ||
1233 | for files in chunks(rpfiles, 100): | 1233 | for files in chunks(rpfiles, 100): |
1234 | dep_pipe = os.popen(rpmdeps + " " + " ".join(files)) | 1234 | dep_pipe = os.popen(rpmdeps + " " + " ".join(files)) |
1235 | 1235 | ||
1236 | process_deps(dep_pipe, pkg, provides_files, requires_files) | 1236 | process_deps(dep_pipe, pkg, provides_files, requires_files) |
1237 | 1237 | ||
1238 | d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files)) | 1238 | d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files)) |
1239 | d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files)) | 1239 | d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files)) |
1240 | } | 1240 | } |
1241 | 1241 | ||
1242 | SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" | 1242 | SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" |
1243 | SHLIBSWORKDIR = "${WORKDIR}/shlibs" | 1243 | SHLIBSWORKDIR = "${WORKDIR}/shlibs" |
1244 | 1244 | ||
1245 | python package_do_shlibs() { | 1245 | python package_do_shlibs() { |
1246 | import re, pipes | 1246 | import re, pipes |
1247 | 1247 | ||
1248 | exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0) | 1248 | exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0) |
1249 | if exclude_shlibs: | 1249 | if exclude_shlibs: |
1250 | bb.note("not generating shlibs") | 1250 | bb.note("not generating shlibs") |
1251 | return | 1251 | return |
1252 | 1252 | ||
1253 | lib_re = re.compile("^.*\.so") | 1253 | lib_re = re.compile("^.*\.so") |
1254 | libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True)) | 1254 | libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True)) |
1255 | 1255 | ||
1256 | packages = d.getVar('PACKAGES', True) | 1256 | packages = d.getVar('PACKAGES', True) |
1257 | targetos = d.getVar('TARGET_OS', True) | 1257 | targetos = d.getVar('TARGET_OS', True) |
1258 | 1258 | ||
1259 | workdir = d.getVar('WORKDIR', True) | 1259 | workdir = d.getVar('WORKDIR', True) |
1260 | 1260 | ||
1261 | ver = d.getVar('PKGV', True) | 1261 | ver = d.getVar('PKGV', True) |
1262 | if not ver: | 1262 | if not ver: |
1263 | bb.error("PKGV not defined") | 1263 | bb.error("PKGV not defined") |
1264 | return | 1264 | return |
1265 | 1265 | ||
1266 | pkgdest = d.getVar('PKGDEST', True) | 1266 | pkgdest = d.getVar('PKGDEST', True) |
1267 | 1267 | ||
1268 | shlibs_dir = d.getVar('SHLIBSDIR', True) | 1268 | shlibs_dir = d.getVar('SHLIBSDIR', True) |
1269 | shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) | 1269 | shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) |
1270 | 1270 | ||
1271 | # Take shared lock since we're only reading, not writing | 1271 | # Take shared lock since we're only reading, not writing |
1272 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) | 1272 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) |
1273 | 1273 | ||
1274 | def linux_so(root, path, file): | 1274 | def linux_so(root, path, file): |
1275 | needs_ldconfig = False | 1275 | needs_ldconfig = False |
1276 | cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file)) + " 2>/dev/null" | 1276 | cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file)) + " 2>/dev/null" |
1277 | cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd) | 1277 | cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd) |
1278 | fd = os.popen(cmd) | 1278 | fd = os.popen(cmd) |
1279 | lines = fd.readlines() | 1279 | lines = fd.readlines() |
1280 | fd.close() | 1280 | fd.close() |
1281 | for l in lines: | 1281 | for l in lines: |
1282 | m = re.match("\s+NEEDED\s+([^\s]*)", l) | 1282 | m = re.match("\s+NEEDED\s+([^\s]*)", l) |
1283 | if m: | 1283 | if m: |
1284 | if m.group(1) not in needed[pkg]: | 1284 | if m.group(1) not in needed[pkg]: |
1285 | needed[pkg].append(m.group(1)) | 1285 | needed[pkg].append(m.group(1)) |
1286 | m = re.match("\s+SONAME\s+([^\s]*)", l) | 1286 | m = re.match("\s+SONAME\s+([^\s]*)", l) |
1287 | if m: | 1287 | if m: |
1288 | this_soname = m.group(1) | 1288 | this_soname = m.group(1) |
1289 | if not this_soname in sonames: | 1289 | if not this_soname in sonames: |
1290 | # if library is private (only used by package) then do not build shlib for it | 1290 | # if library is private (only used by package) then do not build shlib for it |
1291 | if not private_libs or -1 == private_libs.find(this_soname): | 1291 | if not private_libs or -1 == private_libs.find(this_soname): |
1292 | sonames.append(this_soname) | 1292 | sonames.append(this_soname) |
1293 | if libdir_re.match(root): | 1293 | if libdir_re.match(root): |
1294 | needs_ldconfig = True | 1294 | needs_ldconfig = True |
1295 | if snap_symlinks and (file != this_soname): | 1295 | if snap_symlinks and (file != this_soname): |
1296 | renames.append((os.path.join(root, file), os.path.join(root, this_soname))) | 1296 | renames.append((os.path.join(root, file), os.path.join(root, this_soname))) |
1297 | return needs_ldconfig | 1297 | return needs_ldconfig |
1298 | 1298 | ||
1299 | def darwin_so(root, path, file): | 1299 | def darwin_so(root, path, file): |
1300 | fullpath = os.path.join(root, file) | 1300 | fullpath = os.path.join(root, file) |
1301 | if not os.path.exists(fullpath): | 1301 | if not os.path.exists(fullpath): |
1302 | return | 1302 | return |
1303 | 1303 | ||
1304 | def get_combinations(base): | 1304 | def get_combinations(base): |
1305 | # | 1305 | # |
1306 | # Given a base library name, find all combinations of this split by "." and "-" | 1306 | # Given a base library name, find all combinations of this split by "." and "-" |
1307 | # | 1307 | # |
1308 | combos = [] | 1308 | combos = [] |
1309 | options = base.split(".") | 1309 | options = base.split(".") |
1310 | for i in range(1, len(options) + 1): | 1310 | for i in range(1, len(options) + 1): |
1311 | combos.append(".".join(options[0:i])) | 1311 | combos.append(".".join(options[0:i])) |
1312 | options = base.split("-") | 1312 | options = base.split("-") |
1313 | for i in range(1, len(options) + 1): | 1313 | for i in range(1, len(options) + 1): |
1314 | combos.append("-".join(options[0:i])) | 1314 | combos.append("-".join(options[0:i])) |
1315 | return combos | 1315 | return combos |
1316 | 1316 | ||
1317 | if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'): | 1317 | if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'): |
1318 | # Drop suffix | 1318 | # Drop suffix |
1319 | name = file.rsplit(".",1)[0] | 1319 | name = file.rsplit(".",1)[0] |
1320 | # Find all combinations | 1320 | # Find all combinations |
1321 | combos = get_combinations(name) | 1321 | combos = get_combinations(name) |
1322 | for combo in combos: | 1322 | for combo in combos: |
1323 | if not combo in sonames: | 1323 | if not combo in sonames: |
1324 | sonames.append(combo) | 1324 | sonames.append(combo) |
1325 | if file.endswith('.dylib') or file.endswith('.so'): | 1325 | if file.endswith('.dylib') or file.endswith('.so'): |
1326 | lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True)) | 1326 | lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True)) |
1327 | # Drop suffix | 1327 | # Drop suffix |
1328 | lafile = lafile.rsplit(".",1)[0] | 1328 | lafile = lafile.rsplit(".",1)[0] |
1329 | lapath = os.path.dirname(lafile) | 1329 | lapath = os.path.dirname(lafile) |
1330 | lafile = os.path.basename(lafile) | 1330 | lafile = os.path.basename(lafile) |
1331 | # Find all combinations | 1331 | # Find all combinations |
1332 | combos = get_combinations(lafile) | 1332 | combos = get_combinations(lafile) |
1333 | for combo in combos: | 1333 | for combo in combos: |
1334 | if os.path.exists(lapath + '/' + combo + '.la'): | 1334 | if os.path.exists(lapath + '/' + combo + '.la'): |
1335 | break | 1335 | break |
1336 | lafile = lapath + '/' + combo + '.la' | 1336 | lafile = lapath + '/' + combo + '.la' |
1337 | 1337 | ||
1338 | #bb.note("Foo2: %s" % lafile) | 1338 | #bb.note("Foo2: %s" % lafile) |
1339 | #bb.note("Foo %s %s" % (file, fullpath)) | 1339 | #bb.note("Foo %s %s" % (file, fullpath)) |
1340 | if os.path.exists(lafile): | 1340 | if os.path.exists(lafile): |
1341 | fd = open(lafile, 'r') | 1341 | fd = open(lafile, 'r') |
1342 | lines = fd.readlines() | 1342 | lines = fd.readlines() |
1343 | fd.close() | 1343 | fd.close() |
1344 | for l in lines: | 1344 | for l in lines: |
1345 | m = re.match("\s*dependency_libs=\s*'(.*)'", l) | 1345 | m = re.match("\s*dependency_libs=\s*'(.*)'", l) |
1346 | if m: | 1346 | if m: |
1347 | deps = m.group(1).split(" ") | 1347 | deps = m.group(1).split(" ") |
1348 | for dep in deps: | 1348 | for dep in deps: |
1349 | #bb.note("Trying %s for %s" % (dep, pkg)) | 1349 | #bb.note("Trying %s for %s" % (dep, pkg)) |
1350 | name = None | 1350 | name = None |
1351 | if dep.endswith(".la"): | 1351 | if dep.endswith(".la"): |
1352 | name = os.path.basename(dep).replace(".la", "") | 1352 | name = os.path.basename(dep).replace(".la", "") |
1353 | elif dep.startswith("-l"): | 1353 | elif dep.startswith("-l"): |
1354 | name = dep.replace("-l", "lib") | 1354 | name = dep.replace("-l", "lib") |
1355 | if pkg not in needed: | 1355 | if pkg not in needed: |
1356 | needed[pkg] = [] | 1356 | needed[pkg] = [] |
1357 | if name and name not in needed[pkg]: | 1357 | if name and name not in needed[pkg]: |
1358 | needed[pkg].append(name) | 1358 | needed[pkg].append(name) |
1359 | #bb.note("Adding %s for %s" % (name, pkg)) | 1359 | #bb.note("Adding %s for %s" % (name, pkg)) |
1360 | 1360 | ||
1361 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": | 1361 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": |
1362 | snap_symlinks = True | 1362 | snap_symlinks = True |
1363 | else: | 1363 | else: |
1364 | snap_symlinks = False | 1364 | snap_symlinks = False |
1365 | 1365 | ||
1366 | if (d.getVar('USE_LDCONFIG', True) or "1") == "1": | 1366 | if (d.getVar('USE_LDCONFIG', True) or "1") == "1": |
1367 | use_ldconfig = True | 1367 | use_ldconfig = True |
1368 | else: | 1368 | else: |
1369 | use_ldconfig = False | 1369 | use_ldconfig = False |
1370 | 1370 | ||
1371 | needed = {} | 1371 | needed = {} |
1372 | shlib_provider = {} | 1372 | shlib_provider = {} |
1373 | for pkg in packages.split(): | 1373 | for pkg in packages.split(): |
1374 | private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) | 1374 | private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) |
1375 | needs_ldconfig = False | 1375 | needs_ldconfig = False |
1376 | bb.debug(2, "calculating shlib provides for %s" % pkg) | 1376 | bb.debug(2, "calculating shlib provides for %s" % pkg) |
1377 | 1377 | ||
1378 | pkgver = d.getVar('PKGV_' + pkg, True) | 1378 | pkgver = d.getVar('PKGV_' + pkg, True) |
1379 | if not pkgver: | 1379 | if not pkgver: |
1380 | pkgver = d.getVar('PV_' + pkg, True) | 1380 | pkgver = d.getVar('PV_' + pkg, True) |
1381 | if not pkgver: | 1381 | if not pkgver: |
1382 | pkgver = ver | 1382 | pkgver = ver |
1383 | 1383 | ||
1384 | needed[pkg] = [] | 1384 | needed[pkg] = [] |
1385 | sonames = list() | 1385 | sonames = list() |
1386 | renames = list() | 1386 | renames = list() |
1387 | top = os.path.join(pkgdest, pkg) | 1387 | top = os.path.join(pkgdest, pkg) |
1388 | for root, dirs, files in os.walk(top): | 1388 | for root, dirs, files in os.walk(top): |
1389 | for file in files: | 1389 | for file in files: |
1390 | soname = None | 1390 | soname = None |
1391 | path = os.path.join(root, file) | 1391 | path = os.path.join(root, file) |
1392 | if os.path.islink(path): | 1392 | if os.path.islink(path): |
1393 | continue | 1393 | continue |
1394 | if targetos == "darwin" or targetos == "darwin8": | 1394 | if targetos == "darwin" or targetos == "darwin8": |
1395 | darwin_so(root, dirs, file) | 1395 | darwin_so(root, dirs, file) |
1396 | elif os.access(path, os.X_OK) or lib_re.match(file): | 1396 | elif os.access(path, os.X_OK) or lib_re.match(file): |
1397 | ldconfig = linux_so(root, dirs, file) | 1397 | ldconfig = linux_so(root, dirs, file) |
1398 | needs_ldconfig = needs_ldconfig or ldconfig | 1398 | needs_ldconfig = needs_ldconfig or ldconfig |
1399 | for (old, new) in renames: | 1399 | for (old, new) in renames: |
1400 | bb.note("Renaming %s to %s" % (old, new)) | 1400 | bb.note("Renaming %s to %s" % (old, new)) |
1401 | os.rename(old, new) | 1401 | os.rename(old, new) |
1402 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") | 1402 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") |
1403 | shver_file = os.path.join(shlibswork_dir, pkg + ".ver") | 1403 | shver_file = os.path.join(shlibswork_dir, pkg + ".ver") |
1404 | if len(sonames): | 1404 | if len(sonames): |
1405 | fd = open(shlibs_file, 'w') | 1405 | fd = open(shlibs_file, 'w') |
1406 | for s in sonames: | 1406 | for s in sonames: |
1407 | fd.write(s + '\n') | 1407 | fd.write(s + '\n') |
1408 | shlib_provider[s] = (pkg, pkgver) | 1408 | shlib_provider[s] = (pkg, pkgver) |
1409 | fd.close() | 1409 | fd.close() |
1410 | fd = open(shver_file, 'w') | 1410 | fd = open(shver_file, 'w') |
1411 | fd.write(pkgver + '\n') | 1411 | fd.write(pkgver + '\n') |
1412 | fd.close() | 1412 | fd.close() |
1413 | if needs_ldconfig and use_ldconfig: | 1413 | if needs_ldconfig and use_ldconfig: |
1414 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | 1414 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) |
1415 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) | 1415 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) |
1416 | if not postinst: | 1416 | if not postinst: |
1417 | postinst = '#!/bin/sh\n' | 1417 | postinst = '#!/bin/sh\n' |
1418 | postinst += d.getVar('ldconfig_postinst_fragment', True) | 1418 | postinst += d.getVar('ldconfig_postinst_fragment', True) |
1419 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 1419 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
1420 | 1420 | ||
1421 | list_re = re.compile('^(.*)\.list$') | 1421 | list_re = re.compile('^(.*)\.list$') |
1422 | for dir in [shlibs_dir]: | 1422 | for dir in [shlibs_dir]: |
1423 | if not os.path.exists(dir): | 1423 | if not os.path.exists(dir): |
1424 | continue | 1424 | continue |
1425 | for file in os.listdir(dir): | 1425 | for file in os.listdir(dir): |
1426 | m = list_re.match(file) | 1426 | m = list_re.match(file) |
1427 | if m: | 1427 | if m: |
1428 | dep_pkg = m.group(1) | 1428 | dep_pkg = m.group(1) |
1429 | fd = open(os.path.join(dir, file)) | 1429 | fd = open(os.path.join(dir, file)) |
1430 | lines = fd.readlines() | 1430 | lines = fd.readlines() |
1431 | fd.close() | 1431 | fd.close() |
1432 | ver_file = os.path.join(dir, dep_pkg + '.ver') | 1432 | ver_file = os.path.join(dir, dep_pkg + '.ver') |
1433 | lib_ver = None | 1433 | lib_ver = None |
1434 | if os.path.exists(ver_file): | 1434 | if os.path.exists(ver_file): |
1435 | fd = open(ver_file) | 1435 | fd = open(ver_file) |
1436 | lib_ver = fd.readline().rstrip() | 1436 | lib_ver = fd.readline().rstrip() |
1437 | fd.close() | 1437 | fd.close() |
1438 | for l in lines: | 1438 | for l in lines: |
1439 | shlib_provider[l.rstrip()] = (dep_pkg, lib_ver) | 1439 | shlib_provider[l.rstrip()] = (dep_pkg, lib_ver) |
1440 | 1440 | ||
1441 | bb.utils.unlockfile(lf) | 1441 | bb.utils.unlockfile(lf) |
1442 | 1442 | ||
1443 | assumed_libs = d.getVar('ASSUME_SHLIBS', True) | 1443 | assumed_libs = d.getVar('ASSUME_SHLIBS', True) |
1444 | if assumed_libs: | 1444 | if assumed_libs: |
1445 | for e in assumed_libs.split(): | 1445 | for e in assumed_libs.split(): |
1446 | l, dep_pkg = e.split(":") | 1446 | l, dep_pkg = e.split(":") |
1447 | lib_ver = None | 1447 | lib_ver = None |
1448 | dep_pkg = dep_pkg.rsplit("_", 1) | 1448 | dep_pkg = dep_pkg.rsplit("_", 1) |
1449 | if len(dep_pkg) == 2: | 1449 | if len(dep_pkg) == 2: |
1450 | lib_ver = dep_pkg[1] | 1450 | lib_ver = dep_pkg[1] |
1451 | dep_pkg = dep_pkg[0] | 1451 | dep_pkg = dep_pkg[0] |
1452 | shlib_provider[l] = (dep_pkg, lib_ver) | 1452 | shlib_provider[l] = (dep_pkg, lib_ver) |
1453 | 1453 | ||
1454 | for pkg in packages.split(): | 1454 | for pkg in packages.split(): |
1455 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | 1455 | bb.debug(2, "calculating shlib requirements for %s" % pkg) |
1456 | 1456 | ||
1457 | deps = list() | 1457 | deps = list() |
1458 | for n in needed[pkg]: | 1458 | for n in needed[pkg]: |
1459 | if n in shlib_provider.keys(): | 1459 | if n in shlib_provider.keys(): |
1460 | (dep_pkg, ver_needed) = shlib_provider[n] | 1460 | (dep_pkg, ver_needed) = shlib_provider[n] |
1461 | 1461 | ||
1462 | bb.debug(2, '%s: Dependency %s requires package %s' % (pkg, n, dep_pkg)) | 1462 | bb.debug(2, '%s: Dependency %s requires package %s' % (pkg, n, dep_pkg)) |
1463 | 1463 | ||
1464 | if dep_pkg == pkg: | 1464 | if dep_pkg == pkg: |
1465 | continue | 1465 | continue |
1466 | 1466 | ||
1467 | if ver_needed: | 1467 | if ver_needed: |
1468 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) | 1468 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) |
1469 | else: | 1469 | else: |
1470 | dep = dep_pkg | 1470 | dep = dep_pkg |
1471 | if not dep in deps: | 1471 | if not dep in deps: |
1472 | deps.append(dep) | 1472 | deps.append(dep) |
1473 | else: | 1473 | else: |
1474 | bb.note("Couldn't find shared library provider for %s" % n) | 1474 | bb.note("Couldn't find shared library provider for %s" % n) |
1475 | 1475 | ||
1476 | deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") | 1476 | deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") |
1477 | if os.path.exists(deps_file): | 1477 | if os.path.exists(deps_file): |
1478 | os.remove(deps_file) | 1478 | os.remove(deps_file) |
1479 | if len(deps): | 1479 | if len(deps): |
1480 | fd = open(deps_file, 'w') | 1480 | fd = open(deps_file, 'w') |
1481 | for dep in deps: | 1481 | for dep in deps: |
1482 | fd.write(dep + '\n') | 1482 | fd.write(dep + '\n') |
1483 | fd.close() | 1483 | fd.close() |
1484 | } | 1484 | } |
1485 | 1485 | ||
1486 | python package_do_pkgconfig () { | 1486 | python package_do_pkgconfig () { |
1487 | import re | 1487 | import re |
1488 | 1488 | ||
1489 | packages = d.getVar('PACKAGES', True) | 1489 | packages = d.getVar('PACKAGES', True) |
1490 | workdir = d.getVar('WORKDIR', True) | 1490 | workdir = d.getVar('WORKDIR', True) |
1491 | pkgdest = d.getVar('PKGDEST', True) | 1491 | pkgdest = d.getVar('PKGDEST', True) |
1492 | 1492 | ||
1493 | shlibs_dir = d.getVar('SHLIBSDIR', True) | 1493 | shlibs_dir = d.getVar('SHLIBSDIR', True) |
1494 | shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) | 1494 | shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) |
1495 | 1495 | ||
1496 | pc_re = re.compile('(.*)\.pc$') | 1496 | pc_re = re.compile('(.*)\.pc$') |
1497 | var_re = re.compile('(.*)=(.*)') | 1497 | var_re = re.compile('(.*)=(.*)') |
1498 | field_re = re.compile('(.*): (.*)') | 1498 | field_re = re.compile('(.*): (.*)') |
1499 | 1499 | ||
1500 | pkgconfig_provided = {} | 1500 | pkgconfig_provided = {} |
1501 | pkgconfig_needed = {} | 1501 | pkgconfig_needed = {} |
1502 | for pkg in packages.split(): | 1502 | for pkg in packages.split(): |
1503 | pkgconfig_provided[pkg] = [] | 1503 | pkgconfig_provided[pkg] = [] |
1504 | pkgconfig_needed[pkg] = [] | 1504 | pkgconfig_needed[pkg] = [] |
1505 | top = os.path.join(pkgdest, pkg) | 1505 | top = os.path.join(pkgdest, pkg) |
1506 | for root, dirs, files in os.walk(top): | 1506 | for root, dirs, files in os.walk(top): |
1507 | for file in files: | 1507 | for file in files: |
1508 | m = pc_re.match(file) | 1508 | m = pc_re.match(file) |
1509 | if m: | 1509 | if m: |
1510 | pd = bb.data.init() | 1510 | pd = bb.data.init() |
1511 | name = m.group(1) | 1511 | name = m.group(1) |
1512 | pkgconfig_provided[pkg].append(name) | 1512 | pkgconfig_provided[pkg].append(name) |
1513 | path = os.path.join(root, file) | 1513 | path = os.path.join(root, file) |
1514 | if not os.access(path, os.R_OK): | 1514 | if not os.access(path, os.R_OK): |
1515 | continue | 1515 | continue |
1516 | f = open(path, 'r') | 1516 | f = open(path, 'r') |
1517 | lines = f.readlines() | 1517 | lines = f.readlines() |
1518 | f.close() | 1518 | f.close() |
1519 | for l in lines: | 1519 | for l in lines: |
1520 | m = var_re.match(l) | 1520 | m = var_re.match(l) |
1521 | if m: | 1521 | if m: |
1522 | name = m.group(1) | 1522 | name = m.group(1) |
1523 | val = m.group(2) | 1523 | val = m.group(2) |
1524 | pd.setVar(name, pd.expand(val)) | 1524 | pd.setVar(name, pd.expand(val)) |
1525 | continue | 1525 | continue |
1526 | m = field_re.match(l) | 1526 | m = field_re.match(l) |
1527 | if m: | 1527 | if m: |
1528 | hdr = m.group(1) | 1528 | hdr = m.group(1) |
1529 | exp = bb.data.expand(m.group(2), pd) | 1529 | exp = bb.data.expand(m.group(2), pd) |
1530 | if hdr == 'Requires': | 1530 | if hdr == 'Requires': |
1531 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | 1531 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() |
1532 | 1532 | ||
1533 | # Take shared lock since we're only reading, not writing | 1533 | # Take shared lock since we're only reading, not writing |
1534 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) | 1534 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) |
1535 | 1535 | ||
1536 | for pkg in packages.split(): | 1536 | for pkg in packages.split(): |
1537 | pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") | 1537 | pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") |
1538 | if pkgconfig_provided[pkg] != []: | 1538 | if pkgconfig_provided[pkg] != []: |
1539 | f = open(pkgs_file, 'w') | 1539 | f = open(pkgs_file, 'w') |
1540 | for p in pkgconfig_provided[pkg]: | 1540 | for p in pkgconfig_provided[pkg]: |
1541 | f.write('%s\n' % p) | 1541 | f.write('%s\n' % p) |
1542 | f.close() | 1542 | f.close() |
1543 | 1543 | ||
1544 | for dir in [shlibs_dir]: | 1544 | for dir in [shlibs_dir]: |
1545 | if not os.path.exists(dir): | 1545 | if not os.path.exists(dir): |
1546 | continue | 1546 | continue |
1547 | for file in os.listdir(dir): | 1547 | for file in os.listdir(dir): |
1548 | m = re.match('^(.*)\.pclist$', file) | 1548 | m = re.match('^(.*)\.pclist$', file) |
1549 | if m: | 1549 | if m: |
1550 | pkg = m.group(1) | 1550 | pkg = m.group(1) |
1551 | fd = open(os.path.join(dir, file)) | 1551 | fd = open(os.path.join(dir, file)) |
1552 | lines = fd.readlines() | 1552 | lines = fd.readlines() |
1553 | fd.close() | 1553 | fd.close() |
1554 | pkgconfig_provided[pkg] = [] | 1554 | pkgconfig_provided[pkg] = [] |
1555 | for l in lines: | 1555 | for l in lines: |
1556 | pkgconfig_provided[pkg].append(l.rstrip()) | 1556 | pkgconfig_provided[pkg].append(l.rstrip()) |
1557 | 1557 | ||
1558 | for pkg in packages.split(): | 1558 | for pkg in packages.split(): |
1559 | deps = [] | 1559 | deps = [] |
1560 | for n in pkgconfig_needed[pkg]: | 1560 | for n in pkgconfig_needed[pkg]: |
1561 | found = False | 1561 | found = False |
1562 | for k in pkgconfig_provided.keys(): | 1562 | for k in pkgconfig_provided.keys(): |
1563 | if n in pkgconfig_provided[k]: | 1563 | if n in pkgconfig_provided[k]: |
1564 | if k != pkg and not (k in deps): | 1564 | if k != pkg and not (k in deps): |
1565 | deps.append(k) | 1565 | deps.append(k) |
1566 | found = True | 1566 | found = True |
1567 | if found == False: | 1567 | if found == False: |
1568 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) | 1568 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) |
1569 | deps_file = os.path.join(pkgdest, pkg + ".pcdeps") | 1569 | deps_file = os.path.join(pkgdest, pkg + ".pcdeps") |
1570 | if len(deps): | 1570 | if len(deps): |
1571 | fd = open(deps_file, 'w') | 1571 | fd = open(deps_file, 'w') |
1572 | for dep in deps: | 1572 | for dep in deps: |
1573 | fd.write(dep + '\n') | 1573 | fd.write(dep + '\n') |
1574 | fd.close() | 1574 | fd.close() |
1575 | 1575 | ||
1576 | bb.utils.unlockfile(lf) | 1576 | bb.utils.unlockfile(lf) |
1577 | } | 1577 | } |
1578 | 1578 | ||
1579 | python read_shlibdeps () { | 1579 | python read_shlibdeps () { |
1580 | packages = d.getVar('PACKAGES', True).split() | 1580 | packages = d.getVar('PACKAGES', True).split() |
1581 | for pkg in packages: | 1581 | for pkg in packages: |
1582 | rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "") | 1582 | rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "") |
1583 | 1583 | ||
1584 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | 1584 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": |
1585 | depsfile = d.expand("${PKGDEST}/" + pkg + extension) | 1585 | depsfile = d.expand("${PKGDEST}/" + pkg + extension) |
1586 | if os.access(depsfile, os.R_OK): | 1586 | if os.access(depsfile, os.R_OK): |
1587 | fd = file(depsfile) | 1587 | fd = file(depsfile) |
1588 | lines = fd.readlines() | 1588 | lines = fd.readlines() |
1589 | fd.close() | 1589 | fd.close() |
1590 | for l in lines: | 1590 | for l in lines: |
1591 | rdepends[l.rstrip()] = "" | 1591 | rdepends[l.rstrip()] = "" |
1592 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | 1592 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) |
1593 | } | 1593 | } |
1594 | 1594 | ||
1595 | python package_depchains() { | 1595 | python package_depchains() { |
1596 | """ | 1596 | """ |
1597 | For a given set of prefix and postfix modifiers, make those packages | 1597 | For a given set of prefix and postfix modifiers, make those packages |
1598 | RRECOMMENDS on the corresponding packages for its RDEPENDS. | 1598 | RRECOMMENDS on the corresponding packages for its RDEPENDS. |
1599 | 1599 | ||
1600 | Example: If package A depends upon package B, and A's .bb emits an | 1600 | Example: If package A depends upon package B, and A's .bb emits an |
1601 | A-dev package, this would make A-dev Recommends: B-dev. | 1601 | A-dev package, this would make A-dev Recommends: B-dev. |
1602 | 1602 | ||
1603 | If only one of a given suffix is specified, it will take the RRECOMMENDS | 1603 | If only one of a given suffix is specified, it will take the RRECOMMENDS |
1604 | based on the RDEPENDS of *all* other packages. If more than one of a given | 1604 | based on the RDEPENDS of *all* other packages. If more than one of a given |
1605 | suffix is specified, its will only use the RDEPENDS of the single parent | 1605 | suffix is specified, its will only use the RDEPENDS of the single parent |
1606 | package. | 1606 | package. |
1607 | """ | 1607 | """ |
1608 | 1608 | ||
1609 | packages = d.getVar('PACKAGES', True) | 1609 | packages = d.getVar('PACKAGES', True) |
1610 | postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() | 1610 | postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() |
1611 | prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() | 1611 | prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() |
1612 | 1612 | ||
1613 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | 1613 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): |
1614 | 1614 | ||
1615 | #bb.note('depends for %s is %s' % (base, depends)) | 1615 | #bb.note('depends for %s is %s' % (base, depends)) |
1616 | rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "") | 1616 | rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "") |
1617 | 1617 | ||
1618 | for depend in depends: | 1618 | for depend in depends: |
1619 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | 1619 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): |
1620 | #bb.note("Skipping %s" % depend) | 1620 | #bb.note("Skipping %s" % depend) |
1621 | continue | 1621 | continue |
1622 | if depend.endswith('-dev'): | 1622 | if depend.endswith('-dev'): |
1623 | depend = depend.replace('-dev', '') | 1623 | depend = depend.replace('-dev', '') |
1624 | if depend.endswith('-dbg'): | 1624 | if depend.endswith('-dbg'): |
1625 | depend = depend.replace('-dbg', '') | 1625 | depend = depend.replace('-dbg', '') |
1626 | pkgname = getname(depend, suffix) | 1626 | pkgname = getname(depend, suffix) |
1627 | #bb.note("Adding %s for %s" % (pkgname, depend)) | 1627 | #bb.note("Adding %s for %s" % (pkgname, depend)) |
1628 | if pkgname not in rreclist: | 1628 | if pkgname not in rreclist: |
1629 | rreclist[pkgname] = "" | 1629 | rreclist[pkgname] = "" |
1630 | 1630 | ||
1631 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) | 1631 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) |
1632 | d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | 1632 | d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) |
1633 | 1633 | ||
1634 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | 1634 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): |
1635 | 1635 | ||
1636 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | 1636 | #bb.note('rdepends for %s is %s' % (base, rdepends)) |
1637 | rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "") | 1637 | rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "") |
1638 | 1638 | ||
1639 | for depend in rdepends: | 1639 | for depend in rdepends: |
1640 | if depend.find('virtual-locale-') != -1: | 1640 | if depend.find('virtual-locale-') != -1: |
1641 | #bb.note("Skipping %s" % depend) | 1641 | #bb.note("Skipping %s" % depend) |
1642 | continue | 1642 | continue |
1643 | if depend.endswith('-dev'): | 1643 | if depend.endswith('-dev'): |
1644 | depend = depend.replace('-dev', '') | 1644 | depend = depend.replace('-dev', '') |
1645 | if depend.endswith('-dbg'): | 1645 | if depend.endswith('-dbg'): |
1646 | depend = depend.replace('-dbg', '') | 1646 | depend = depend.replace('-dbg', '') |
1647 | pkgname = getname(depend, suffix) | 1647 | pkgname = getname(depend, suffix) |
1648 | #bb.note("Adding %s for %s" % (pkgname, depend)) | 1648 | #bb.note("Adding %s for %s" % (pkgname, depend)) |
1649 | if pkgname not in rreclist: | 1649 | if pkgname not in rreclist: |
1650 | rreclist[pkgname] = "" | 1650 | rreclist[pkgname] = "" |
1651 | 1651 | ||
1652 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) | 1652 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) |
1653 | d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | 1653 | d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) |
1654 | 1654 | ||
1655 | def add_dep(list, dep): | 1655 | def add_dep(list, dep): |
1656 | dep = dep.split(' (')[0].strip() | 1656 | dep = dep.split(' (')[0].strip() |
1657 | if dep not in list: | 1657 | if dep not in list: |
1658 | list.append(dep) | 1658 | list.append(dep) |
1659 | 1659 | ||
1660 | depends = [] | 1660 | depends = [] |
1661 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): | 1661 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): |
1662 | add_dep(depends, dep) | 1662 | add_dep(depends, dep) |
1663 | 1663 | ||
1664 | rdepends = [] | 1664 | rdepends = [] |
1665 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""): | 1665 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""): |
1666 | add_dep(rdepends, dep) | 1666 | add_dep(rdepends, dep) |
1667 | 1667 | ||
1668 | for pkg in packages.split(): | 1668 | for pkg in packages.split(): |
1669 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): | 1669 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): |
1670 | add_dep(rdepends, dep) | 1670 | add_dep(rdepends, dep) |
1671 | 1671 | ||
1672 | #bb.note('rdepends is %s' % rdepends) | 1672 | #bb.note('rdepends is %s' % rdepends) |
1673 | 1673 | ||
1674 | def post_getname(name, suffix): | 1674 | def post_getname(name, suffix): |
1675 | return '%s%s' % (name, suffix) | 1675 | return '%s%s' % (name, suffix) |
1676 | def pre_getname(name, suffix): | 1676 | def pre_getname(name, suffix): |
1677 | return '%s%s' % (suffix, name) | 1677 | return '%s%s' % (suffix, name) |
1678 | 1678 | ||
1679 | pkgs = {} | 1679 | pkgs = {} |
1680 | for pkg in packages.split(): | 1680 | for pkg in packages.split(): |
1681 | for postfix in postfixes: | 1681 | for postfix in postfixes: |
1682 | if pkg.endswith(postfix): | 1682 | if pkg.endswith(postfix): |
1683 | if not postfix in pkgs: | 1683 | if not postfix in pkgs: |
1684 | pkgs[postfix] = {} | 1684 | pkgs[postfix] = {} |
1685 | pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) | 1685 | pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) |
1686 | 1686 | ||
1687 | for prefix in prefixes: | 1687 | for prefix in prefixes: |
1688 | if pkg.startswith(prefix): | 1688 | if pkg.startswith(prefix): |
1689 | if not prefix in pkgs: | 1689 | if not prefix in pkgs: |
1690 | pkgs[prefix] = {} | 1690 | pkgs[prefix] = {} |
1691 | pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) | 1691 | pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) |
1692 | 1692 | ||
1693 | for suffix in pkgs: | 1693 | for suffix in pkgs: |
1694 | for pkg in pkgs[suffix]: | 1694 | for pkg in pkgs[suffix]: |
1695 | if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'): | 1695 | if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'): |
1696 | continue | 1696 | continue |
1697 | (base, func) = pkgs[suffix][pkg] | 1697 | (base, func) = pkgs[suffix][pkg] |
1698 | if suffix == "-dev": | 1698 | if suffix == "-dev": |
1699 | pkg_adddeprrecs(pkg, base, suffix, func, depends, d) | 1699 | pkg_adddeprrecs(pkg, base, suffix, func, depends, d) |
1700 | if len(pkgs[suffix]) == 1: | 1700 | if len(pkgs[suffix]) == 1: |
1701 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | 1701 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) |
1702 | else: | 1702 | else: |
1703 | rdeps = [] | 1703 | rdeps = [] |
1704 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""): | 1704 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""): |
1705 | add_dep(rdeps, dep) | 1705 | add_dep(rdeps, dep) |
1706 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | 1706 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) |
1707 | } | 1707 | } |
1708 | 1708 | ||
1709 | # Since bitbake can't determine which variables are accessed during package | 1709 | # Since bitbake can't determine which variables are accessed during package |
1710 | # iteration, we need to list them here: | 1710 | # iteration, we need to list them here: |
1711 | PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR" | 1711 | PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR" |
1712 | 1712 | ||
@@ -1720,44 +1720,44 @@ def gen_packagevar(d): | |||
1720 | return " ".join(ret) | 1720 | return " ".join(ret) |
1721 | 1721 | ||
1722 | PACKAGE_PREPROCESS_FUNCS ?= "" | 1722 | PACKAGE_PREPROCESS_FUNCS ?= "" |
1723 | PACKAGEFUNCS ?= "package_get_auto_pr \ | 1723 | PACKAGEFUNCS ?= "package_get_auto_pr \ |
1724 | perform_packagecopy \ | 1724 | perform_packagecopy \ |
1725 | ${PACKAGE_PREPROCESS_FUNCS} \ | 1725 | ${PACKAGE_PREPROCESS_FUNCS} \ |
1726 | package_do_split_locales \ | 1726 | package_do_split_locales \ |
1727 | split_and_strip_files \ | 1727 | split_and_strip_files \ |
1728 | fixup_perms \ | 1728 | fixup_perms \ |
1729 | populate_packages \ | 1729 | populate_packages \ |
1730 | package_do_filedeps \ | 1730 | package_do_filedeps \ |
1731 | package_do_shlibs \ | 1731 | package_do_shlibs \ |
1732 | package_do_pkgconfig \ | 1732 | package_do_pkgconfig \ |
1733 | read_shlibdeps \ | 1733 | read_shlibdeps \ |
1734 | package_depchains \ | 1734 | package_depchains \ |
1735 | emit_pkgdata" | 1735 | emit_pkgdata" |
1736 | 1736 | ||
1737 | python do_package () { | 1737 | python do_package () { |
1738 | # Change the following version to cause sstate to invalidate the package | 1738 | # Change the following version to cause sstate to invalidate the package |
1739 | # cache. This is useful if an item this class depends on changes in a | 1739 | # cache. This is useful if an item this class depends on changes in a |
1740 | # way that the output of this class changes. rpmdeps is a good example | 1740 | # way that the output of this class changes. rpmdeps is a good example |
1741 | # as any change to rpmdeps requires this to be rerun. | 1741 | # as any change to rpmdeps requires this to be rerun. |
1742 | # PACKAGE_BBCLASS_VERSION = "1" | 1742 | # PACKAGE_BBCLASS_VERSION = "1" |
1743 | 1743 | ||
1744 | packages = (d.getVar('PACKAGES', True) or "").split() | 1744 | packages = (d.getVar('PACKAGES', True) or "").split() |
1745 | if len(packages) < 1: | 1745 | if len(packages) < 1: |
1746 | bb.debug(1, "No packages to build, skipping do_package") | 1746 | bb.debug(1, "No packages to build, skipping do_package") |
1747 | return | 1747 | return |
1748 | 1748 | ||
1749 | workdir = d.getVar('WORKDIR', True) | 1749 | workdir = d.getVar('WORKDIR', True) |
1750 | outdir = d.getVar('DEPLOY_DIR', True) | 1750 | outdir = d.getVar('DEPLOY_DIR', True) |
1751 | dest = d.getVar('D', True) | 1751 | dest = d.getVar('D', True) |
1752 | dvar = d.getVar('PKGD', True) | 1752 | dvar = d.getVar('PKGD', True) |
1753 | pn = d.getVar('PN', True) | 1753 | pn = d.getVar('PN', True) |
1754 | 1754 | ||
1755 | if not workdir or not outdir or not dest or not dvar or not pn or not packages: | 1755 | if not workdir or not outdir or not dest or not dvar or not pn or not packages: |
1756 | bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package") | 1756 | bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package") |
1757 | return | 1757 | return |
1758 | 1758 | ||
1759 | for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): | 1759 | for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): |
1760 | bb.build.exec_func(f, d) | 1760 | bb.build.exec_func(f, d) |
1761 | } | 1761 | } |
1762 | 1762 | ||
1763 | do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}" | 1763 | do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}" |
@@ -1775,7 +1775,7 @@ do_package[stamp-extra-info] = "${MACHINE}" | |||
1775 | do_package_setscene[dirs] = "${STAGING_DIR}" | 1775 | do_package_setscene[dirs] = "${STAGING_DIR}" |
1776 | 1776 | ||
1777 | python do_package_setscene () { | 1777 | python do_package_setscene () { |
1778 | sstate_setscene(d) | 1778 | sstate_setscene(d) |
1779 | } | 1779 | } |
1780 | addtask do_package_setscene | 1780 | addtask do_package_setscene |
1781 | 1781 | ||
@@ -1793,14 +1793,14 @@ addtask package_write before do_build after do_package | |||
1793 | # | 1793 | # |
1794 | 1794 | ||
1795 | def mapping_rename_hook(d): | 1795 | def mapping_rename_hook(d): |
1796 | """ | 1796 | """ |
1797 | Rewrite variables to account for package renaming in things | 1797 | Rewrite variables to account for package renaming in things |
1798 | like debian.bbclass or manual PKG variable name changes | 1798 | like debian.bbclass or manual PKG variable name changes |
1799 | """ | 1799 | """ |
1800 | runtime_mapping_rename("RDEPENDS", d) | 1800 | runtime_mapping_rename("RDEPENDS", d) |
1801 | runtime_mapping_rename("RRECOMMENDS", d) | 1801 | runtime_mapping_rename("RRECOMMENDS", d) |
1802 | runtime_mapping_rename("RSUGGESTS", d) | 1802 | runtime_mapping_rename("RSUGGESTS", d) |
1803 | runtime_mapping_rename("RPROVIDES", d) | 1803 | runtime_mapping_rename("RPROVIDES", d) |
1804 | runtime_mapping_rename("RREPLACES", d) | 1804 | runtime_mapping_rename("RREPLACES", d) |
1805 | runtime_mapping_rename("RCONFLICTS", d) | 1805 | runtime_mapping_rename("RCONFLICTS", d) |
1806 | 1806 | ||
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index f58fd2be02..d09baeaa28 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass | |||
@@ -418,8 +418,8 @@ python () { | |||
418 | } | 418 | } |
419 | 419 | ||
420 | python do_package_write_deb () { | 420 | python do_package_write_deb () { |
421 | bb.build.exec_func("read_subpackage_metadata", d) | 421 | bb.build.exec_func("read_subpackage_metadata", d) |
422 | bb.build.exec_func("do_package_deb", d) | 422 | bb.build.exec_func("do_package_deb", d) |
423 | } | 423 | } |
424 | do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}" | 424 | do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}" |
425 | do_package_write_deb[umask] = "022" | 425 | do_package_write_deb[umask] = "022" |
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index 2a2991768b..b20df0f243 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass | |||
@@ -9,11 +9,11 @@ PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" | |||
9 | PKGWRITEDIRSRPM = "${DEPLOY_DIR}/sources/deploy-srpm" | 9 | PKGWRITEDIRSRPM = "${DEPLOY_DIR}/sources/deploy-srpm" |
10 | 10 | ||
11 | python package_rpm_fn () { | 11 | python package_rpm_fn () { |
12 | d.setVar('PKGFN', d.getVar('PKG')) | 12 | d.setVar('PKGFN', d.getVar('PKG')) |
13 | } | 13 | } |
14 | 14 | ||
15 | python package_rpm_install () { | 15 | python package_rpm_install () { |
16 | bb.fatal("package_rpm_install not implemented!") | 16 | bb.fatal("package_rpm_install not implemented!") |
17 | } | 17 | } |
18 | 18 | ||
19 | RPMCONF_TARGET_BASE = "${DEPLOY_DIR_RPM}/solvedb" | 19 | RPMCONF_TARGET_BASE = "${DEPLOY_DIR_RPM}/solvedb" |
@@ -547,601 +547,601 @@ EOF | |||
547 | } | 547 | } |
548 | 548 | ||
549 | python write_specfile () { | 549 | python write_specfile () { |
550 | import textwrap | 550 | import textwrap |
551 | import oe.packagedata | 551 | import oe.packagedata |
552 | 552 | ||
553 | # append information for logs and patches to %prep | 553 | # append information for logs and patches to %prep |
554 | def add_prep(d,spec_files_bottom): | 554 | def add_prep(d,spec_files_bottom): |
555 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': | 555 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': |
556 | spec_files_bottom.append('%%prep -n %s' % d.getVar('PN', True) ) | 556 | spec_files_bottom.append('%%prep -n %s' % d.getVar('PN', True) ) |
557 | spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"") | 557 | spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"") |
558 | spec_files_bottom.append('') | 558 | spec_files_bottom.append('') |
559 | 559 | ||
560 | # get the name of tarball for sources, patches and logs | 560 | # get the name of tarball for sources, patches and logs |
561 | def get_tarballs(d): | 561 | def get_tarballs(d): |
562 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': | 562 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': |
563 | return get_package(d) | 563 | return get_package(d) |
564 | 564 | ||
565 | # append the name of tarball to key word 'SOURCE' in xxx.spec. | 565 | # append the name of tarball to key word 'SOURCE' in xxx.spec. |
566 | def tail_source(d,source_list=[],patch_list=None): | 566 | def tail_source(d,source_list=[],patch_list=None): |
567 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': | 567 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': |
568 | source_number = 0 | 568 | source_number = 0 |
569 | patch_number = 0 | 569 | patch_number = 0 |
570 | for source in source_list: | 570 | for source in source_list: |
571 | spec_preamble_top.append('Source' + str(source_number) + ': %s' % source) | 571 | spec_preamble_top.append('Source' + str(source_number) + ': %s' % source) |
572 | source_number += 1 | 572 | source_number += 1 |
573 | if patch_list: | 573 | if patch_list: |
574 | for patch in patch_list: | 574 | for patch in patch_list: |
575 | print_deps(patch, "Patch" + str(patch_number), spec_preamble_top, d) | 575 | print_deps(patch, "Patch" + str(patch_number), spec_preamble_top, d) |
576 | patch_number += 1 | 576 | patch_number += 1 |
577 | # We need a simple way to remove the MLPREFIX from the package name, | 577 | # We need a simple way to remove the MLPREFIX from the package name, |
578 | # and dependency information... | 578 | # and dependency information... |
579 | def strip_multilib(name, d): | 579 | def strip_multilib(name, d): |
580 | multilibs = d.getVar('MULTILIBS', True) or "" | 580 | multilibs = d.getVar('MULTILIBS', True) or "" |
581 | for ext in multilibs.split(): | 581 | for ext in multilibs.split(): |
582 | eext = ext.split(':') | 582 | eext = ext.split(':') |
583 | if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0: | 583 | if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0: |
584 | name = "".join(name.split(eext[1] + '-')) | 584 | name = "".join(name.split(eext[1] + '-')) |
585 | return name | 585 | return name |
586 | 586 | ||
587 | # ml = d.getVar("MLPREFIX", True) | 587 | # ml = d.getVar("MLPREFIX", True) |
588 | # if ml and name and len(ml) != 0 and name.find(ml) == 0: | 588 | # if ml and name and len(ml) != 0 and name.find(ml) == 0: |
589 | # return ml.join(name.split(ml, 1)[1:]) | 589 | # return ml.join(name.split(ml, 1)[1:]) |
590 | # return name | 590 | # return name |
591 | 591 | ||
592 | # In RPM, dependencies are of the format: pkg <>= Epoch:Version-Release | 592 | # In RPM, dependencies are of the format: pkg <>= Epoch:Version-Release |
593 | # This format is similar to OE, however there are restrictions on the | 593 | # This format is similar to OE, however there are restrictions on the |
594 | # characters that can be in a field. In the Version field, "-" | 594 | # characters that can be in a field. In the Version field, "-" |
595 | # characters are not allowed. "-" is allowed in the Release field. | 595 | # characters are not allowed. "-" is allowed in the Release field. |
596 | # | 596 | # |
597 | # We translate the "-" in the version to a "+", by loading the PKGV | 597 | # We translate the "-" in the version to a "+", by loading the PKGV |
598 | # from the dependent recipe, replacing the - with a +, and then using | 598 | # from the dependent recipe, replacing the - with a +, and then using |
599 | # that value to do a replace inside of this recipe's dependencies. | 599 | # that value to do a replace inside of this recipe's dependencies. |
600 | # This preserves the "-" separator between the version and release, as | 600 | # This preserves the "-" separator between the version and release, as |
601 | # well as any "-" characters inside of the release field. | 601 | # well as any "-" characters inside of the release field. |
602 | # | 602 | # |
603 | # All of this has to happen BEFORE the mapping_rename_hook as | 603 | # All of this has to happen BEFORE the mapping_rename_hook as |
604 | # after renaming we cannot look up the dependencies in the packagedata | 604 | # after renaming we cannot look up the dependencies in the packagedata |
605 | # store. | 605 | # store. |
606 | def translate_vers(varname, d): | 606 | def translate_vers(varname, d): |
607 | depends = d.getVar(varname, True) | 607 | depends = d.getVar(varname, True) |
608 | if depends: | 608 | if depends: |
609 | depends_dict = bb.utils.explode_dep_versions(depends) | 609 | depends_dict = bb.utils.explode_dep_versions(depends) |
610 | newdeps_dict = {} | 610 | newdeps_dict = {} |
611 | for dep in depends_dict: | 611 | for dep in depends_dict: |
612 | ver = depends_dict[dep] | 612 | ver = depends_dict[dep] |
613 | if dep and ver: | 613 | if dep and ver: |
614 | if '-' in ver: | 614 | if '-' in ver: |
615 | subd = oe.packagedata.read_subpkgdata_dict(dep, d) | 615 | subd = oe.packagedata.read_subpkgdata_dict(dep, d) |
616 | if 'PKGV' in subd: | 616 | if 'PKGV' in subd: |
617 | pv = subd['PKGV'] | 617 | pv = subd['PKGV'] |
618 | reppv = pv.replace('-', '+') | 618 | reppv = pv.replace('-', '+') |
619 | ver = ver.replace(pv, reppv) | 619 | ver = ver.replace(pv, reppv) |
620 | newdeps_dict[dep] = ver | 620 | newdeps_dict[dep] = ver |
621 | depends = bb.utils.join_deps(newdeps_dict) | 621 | depends = bb.utils.join_deps(newdeps_dict) |
622 | d.setVar(varname, depends.strip()) | 622 | d.setVar(varname, depends.strip()) |
623 | 623 | ||
624 | # We need to change the style the dependency from BB to RPM | 624 | # We need to change the style the dependency from BB to RPM |
625 | # This needs to happen AFTER the mapping_rename_hook | 625 | # This needs to happen AFTER the mapping_rename_hook |
626 | def print_deps(variable, tag, array, d): | 626 | def print_deps(variable, tag, array, d): |
627 | depends = variable | 627 | depends = variable |
628 | if depends: | 628 | if depends: |
629 | depends_dict = bb.utils.explode_dep_versions(depends) | 629 | depends_dict = bb.utils.explode_dep_versions(depends) |
630 | for dep in depends_dict: | 630 | for dep in depends_dict: |
631 | ver = depends_dict[dep] | 631 | ver = depends_dict[dep] |
632 | if dep and ver: | 632 | if dep and ver: |
633 | ver = ver.replace('(', '') | 633 | ver = ver.replace('(', '') |
634 | ver = ver.replace(')', '') | 634 | ver = ver.replace(')', '') |
635 | array.append("%s: %s %s" % (tag, dep, ver)) | 635 | array.append("%s: %s %s" % (tag, dep, ver)) |
636 | else: | 636 | else: |
637 | array.append("%s: %s" % (tag, dep)) | 637 | array.append("%s: %s" % (tag, dep)) |
638 | 638 | ||
639 | def walk_files(walkpath, target, conffiles): | 639 | def walk_files(walkpath, target, conffiles): |
640 | import os | 640 | import os |
641 | for rootpath, dirs, files in os.walk(walkpath): | 641 | for rootpath, dirs, files in os.walk(walkpath): |
642 | path = rootpath.replace(walkpath, "") | 642 | path = rootpath.replace(walkpath, "") |
643 | for dir in dirs: | 643 | for dir in dirs: |
644 | # All packages own the directories their files are in... | 644 | # All packages own the directories their files are in... |
645 | target.append('%dir "' + path + '/' + dir + '"') | 645 | target.append('%dir "' + path + '/' + dir + '"') |
646 | for file in files: | 646 | for file in files: |
647 | if conffiles.count(path + '/' + file): | 647 | if conffiles.count(path + '/' + file): |
648 | target.append('%config "' + path + '/' + file + '"') | 648 | target.append('%config "' + path + '/' + file + '"') |
649 | else: | 649 | else: |
650 | target.append('"' + path + '/' + file + '"') | 650 | target.append('"' + path + '/' + file + '"') |
651 | 651 | ||
652 | # Prevent the prerm/postrm scripts from being run during an upgrade | 652 | # Prevent the prerm/postrm scripts from being run during an upgrade |
653 | def wrap_uninstall(scriptvar): | 653 | def wrap_uninstall(scriptvar): |
654 | scr = scriptvar.strip() | 654 | scr = scriptvar.strip() |
655 | if scr.startswith("#!"): | 655 | if scr.startswith("#!"): |
656 | pos = scr.find("\n") + 1 | 656 | pos = scr.find("\n") + 1 |
657 | else: | 657 | else: |
658 | pos = 0 | 658 | pos = 0 |
659 | scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi' | 659 | scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi' |
660 | return scr | 660 | return scr |
661 | 661 | ||
662 | packages = d.getVar('PACKAGES', True) | 662 | packages = d.getVar('PACKAGES', True) |
663 | if not packages or packages == '': | 663 | if not packages or packages == '': |
664 | bb.debug(1, "No packages; nothing to do") | 664 | bb.debug(1, "No packages; nothing to do") |
665 | return | 665 | return |
666 | 666 | ||
667 | pkgdest = d.getVar('PKGDEST', True) | 667 | pkgdest = d.getVar('PKGDEST', True) |
668 | if not pkgdest: | 668 | if not pkgdest: |
669 | bb.fatal("No PKGDEST") | 669 | bb.fatal("No PKGDEST") |
670 | return | 670 | return |
671 | 671 | ||
672 | outspecfile = d.getVar('OUTSPECFILE', True) | 672 | outspecfile = d.getVar('OUTSPECFILE', True) |
673 | if not outspecfile: | 673 | if not outspecfile: |
674 | bb.fatal("No OUTSPECFILE") | 674 | bb.fatal("No OUTSPECFILE") |
675 | return | 675 | return |
676 | 676 | ||
677 | # Construct the SPEC file... | 677 | # Construct the SPEC file... |
678 | srcname = strip_multilib(d.getVar('PN', True), d) | 678 | srcname = strip_multilib(d.getVar('PN', True), d) |
679 | srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".") | 679 | srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".") |
680 | srcversion = d.getVar('PKGV', True).replace('-', '+') | 680 | srcversion = d.getVar('PKGV', True).replace('-', '+') |
681 | srcrelease = d.getVar('PKGR', True) | 681 | srcrelease = d.getVar('PKGR', True) |
682 | srcepoch = (d.getVar('PKGE', True) or "") | 682 | srcepoch = (d.getVar('PKGE', True) or "") |
683 | srclicense = d.getVar('LICENSE', True) | 683 | srclicense = d.getVar('LICENSE', True) |
684 | srcsection = d.getVar('SECTION', True) | 684 | srcsection = d.getVar('SECTION', True) |
685 | srcmaintainer = d.getVar('MAINTAINER', True) | 685 | srcmaintainer = d.getVar('MAINTAINER', True) |
686 | srchomepage = d.getVar('HOMEPAGE', True) | 686 | srchomepage = d.getVar('HOMEPAGE', True) |
687 | srcdescription = d.getVar('DESCRIPTION', True) or "." | 687 | srcdescription = d.getVar('DESCRIPTION', True) or "." |
688 | 688 | ||
689 | srcdepends = strip_multilib(d.getVar('DEPENDS', True), d) | 689 | srcdepends = strip_multilib(d.getVar('DEPENDS', True), d) |
690 | srcrdepends = [] | 690 | srcrdepends = [] |
691 | srcrrecommends = [] | 691 | srcrrecommends = [] |
692 | srcrsuggests = [] | 692 | srcrsuggests = [] |
693 | srcrprovides = [] | 693 | srcrprovides = [] |
694 | srcrreplaces = [] | 694 | srcrreplaces = [] |
695 | srcrconflicts = [] | 695 | srcrconflicts = [] |
696 | srcrobsoletes = [] | 696 | srcrobsoletes = [] |
697 | 697 | ||
698 | srcpreinst = [] | 698 | srcpreinst = [] |
699 | srcpostinst = [] | 699 | srcpostinst = [] |
700 | srcprerm = [] | 700 | srcprerm = [] |
701 | srcpostrm = [] | 701 | srcpostrm = [] |
702 | 702 | ||
703 | spec_preamble_top = [] | 703 | spec_preamble_top = [] |
704 | spec_preamble_bottom = [] | 704 | spec_preamble_bottom = [] |
705 | 705 | ||
706 | spec_scriptlets_top = [] | 706 | spec_scriptlets_top = [] |
707 | spec_scriptlets_bottom = [] | 707 | spec_scriptlets_bottom = [] |
708 | 708 | ||
709 | spec_files_top = [] | 709 | spec_files_top = [] |
710 | spec_files_bottom = [] | 710 | spec_files_bottom = [] |
711 | 711 | ||
712 | for pkg in packages.split(): | 712 | for pkg in packages.split(): |
713 | localdata = bb.data.createCopy(d) | 713 | localdata = bb.data.createCopy(d) |
714 | 714 | ||
715 | root = "%s/%s" % (pkgdest, pkg) | 715 | root = "%s/%s" % (pkgdest, pkg) |
716 | 716 | ||
717 | lf = bb.utils.lockfile(root + ".lock") | 717 | lf = bb.utils.lockfile(root + ".lock") |
718 | 718 | ||
719 | localdata.setVar('ROOT', '') | 719 | localdata.setVar('ROOT', '') |
720 | localdata.setVar('ROOT_%s' % pkg, root) | 720 | localdata.setVar('ROOT_%s' % pkg, root) |
721 | pkgname = localdata.getVar('PKG_%s' % pkg, True) | 721 | pkgname = localdata.getVar('PKG_%s' % pkg, True) |
722 | if not pkgname: | 722 | if not pkgname: |
723 | pkgname = pkg | 723 | pkgname = pkg |
724 | localdata.setVar('PKG', pkgname) | 724 | localdata.setVar('PKG', pkgname) |
725 | 725 | ||
726 | localdata.setVar('OVERRIDES', pkg) | 726 | localdata.setVar('OVERRIDES', pkg) |
727 | 727 | ||
728 | bb.data.update_data(localdata) | 728 | bb.data.update_data(localdata) |
729 | 729 | ||
730 | conffiles = (localdata.getVar('CONFFILES', True) or "").split() | 730 | conffiles = (localdata.getVar('CONFFILES', True) or "").split() |
731 | 731 | ||
732 | splitname = strip_multilib(pkgname, d) | 732 | splitname = strip_multilib(pkgname, d) |
733 | 733 | ||
734 | splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".") | 734 | splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".") |
735 | splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+') | 735 | splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+') |
736 | splitrelease = (localdata.getVar('PKGR', True) or "") | 736 | splitrelease = (localdata.getVar('PKGR', True) or "") |
737 | splitepoch = (localdata.getVar('PKGE', True) or "") | 737 | splitepoch = (localdata.getVar('PKGE', True) or "") |
738 | splitlicense = (localdata.getVar('LICENSE', True) or "") | 738 | splitlicense = (localdata.getVar('LICENSE', True) or "") |
739 | splitsection = (localdata.getVar('SECTION', True) or "") | 739 | splitsection = (localdata.getVar('SECTION', True) or "") |
740 | splitdescription = (localdata.getVar('DESCRIPTION', True) or ".") | 740 | splitdescription = (localdata.getVar('DESCRIPTION', True) or ".") |
741 | 741 | ||
742 | translate_vers('RDEPENDS', localdata) | 742 | translate_vers('RDEPENDS', localdata) |
743 | translate_vers('RRECOMMENDS', localdata) | 743 | translate_vers('RRECOMMENDS', localdata) |
744 | translate_vers('RSUGGESTS', localdata) | 744 | translate_vers('RSUGGESTS', localdata) |
745 | translate_vers('RPROVIDES', localdata) | 745 | translate_vers('RPROVIDES', localdata) |
746 | translate_vers('RREPLACES', localdata) | 746 | translate_vers('RREPLACES', localdata) |
747 | translate_vers('RCONFLICTS', localdata) | 747 | translate_vers('RCONFLICTS', localdata) |
748 | 748 | ||
749 | # Map the dependencies into their final form | 749 | # Map the dependencies into their final form |
750 | mapping_rename_hook(localdata) | 750 | mapping_rename_hook(localdata) |
751 | 751 | ||
752 | splitrdepends = strip_multilib(localdata.getVar('RDEPENDS', True), d) or "" | 752 | splitrdepends = strip_multilib(localdata.getVar('RDEPENDS', True), d) or "" |
753 | splitrrecommends = strip_multilib(localdata.getVar('RRECOMMENDS', True), d) or "" | 753 | splitrrecommends = strip_multilib(localdata.getVar('RRECOMMENDS', True), d) or "" |
754 | splitrsuggests = strip_multilib(localdata.getVar('RSUGGESTS', True), d) or "" | 754 | splitrsuggests = strip_multilib(localdata.getVar('RSUGGESTS', True), d) or "" |
755 | splitrprovides = strip_multilib(localdata.getVar('RPROVIDES', True), d) or "" | 755 | splitrprovides = strip_multilib(localdata.getVar('RPROVIDES', True), d) or "" |
756 | splitrreplaces = strip_multilib(localdata.getVar('RREPLACES', True), d) or "" | 756 | splitrreplaces = strip_multilib(localdata.getVar('RREPLACES', True), d) or "" |
757 | splitrconflicts = strip_multilib(localdata.getVar('RCONFLICTS', True), d) or "" | 757 | splitrconflicts = strip_multilib(localdata.getVar('RCONFLICTS', True), d) or "" |
758 | splitrobsoletes = [] | 758 | splitrobsoletes = [] |
759 | 759 | ||
760 | # Gather special src/first package data | 760 | # Gather special src/first package data |
761 | if srcname == splitname: | 761 | if srcname == splitname: |
762 | srcrdepends = splitrdepends | 762 | srcrdepends = splitrdepends |
763 | srcrrecommends = splitrrecommends | 763 | srcrrecommends = splitrrecommends |
764 | srcrsuggests = splitrsuggests | 764 | srcrsuggests = splitrsuggests |
765 | srcrprovides = splitrprovides | 765 | srcrprovides = splitrprovides |
766 | srcrreplaces = splitrreplaces | 766 | srcrreplaces = splitrreplaces |
767 | srcrconflicts = splitrconflicts | 767 | srcrconflicts = splitrconflicts |
768 | 768 | ||
769 | srcpreinst = localdata.getVar('pkg_preinst', True) | 769 | srcpreinst = localdata.getVar('pkg_preinst', True) |
770 | srcpostinst = localdata.getVar('pkg_postinst', True) | 770 | srcpostinst = localdata.getVar('pkg_postinst', True) |
771 | srcprerm = localdata.getVar('pkg_prerm', True) | 771 | srcprerm = localdata.getVar('pkg_prerm', True) |
772 | srcpostrm = localdata.getVar('pkg_postrm', True) | 772 | srcpostrm = localdata.getVar('pkg_postrm', True) |
773 | 773 | ||
774 | file_list = [] | 774 | file_list = [] |
775 | walk_files(root, file_list, conffiles) | 775 | walk_files(root, file_list, conffiles) |
776 | if not file_list and localdata.getVar('ALLOW_EMPTY') != "1": | 776 | if not file_list and localdata.getVar('ALLOW_EMPTY') != "1": |
777 | bb.note("Not creating empty RPM package for %s" % splitname) | 777 | bb.note("Not creating empty RPM package for %s" % splitname) |
778 | else: | 778 | else: |
779 | bb.note("Creating RPM package for %s" % splitname) | 779 | bb.note("Creating RPM package for %s" % splitname) |
780 | spec_files_top.append('%files') | 780 | spec_files_top.append('%files') |
781 | spec_files_top.append('%defattr(-,-,-,-)') | 781 | spec_files_top.append('%defattr(-,-,-,-)') |
782 | if file_list: | 782 | if file_list: |
783 | bb.note("Creating RPM package for %s" % splitname) | 783 | bb.note("Creating RPM package for %s" % splitname) |
784 | spec_files_top.extend(file_list) | 784 | spec_files_top.extend(file_list) |
785 | else: | 785 | else: |
786 | bb.note("Creating EMPTY RPM Package for %s" % splitname) | 786 | bb.note("Creating EMPTY RPM Package for %s" % splitname) |
787 | spec_files_top.append('') | 787 | spec_files_top.append('') |
788 | 788 | ||
789 | bb.utils.unlockfile(lf) | 789 | bb.utils.unlockfile(lf) |
790 | continue | 790 | continue |
791 | 791 | ||
792 | # Process subpackage data | 792 | # Process subpackage data |
793 | spec_preamble_bottom.append('%%package -n %s' % splitname) | 793 | spec_preamble_bottom.append('%%package -n %s' % splitname) |
794 | spec_preamble_bottom.append('Summary: %s' % splitsummary) | 794 | spec_preamble_bottom.append('Summary: %s' % splitsummary) |
795 | if srcversion != splitversion: | 795 | if srcversion != splitversion: |
796 | spec_preamble_bottom.append('Version: %s' % splitversion) | 796 | spec_preamble_bottom.append('Version: %s' % splitversion) |
797 | if srcrelease != splitrelease: | 797 | if srcrelease != splitrelease: |
798 | spec_preamble_bottom.append('Release: %s' % splitrelease) | 798 | spec_preamble_bottom.append('Release: %s' % splitrelease) |
799 | if srcepoch != splitepoch: | 799 | if srcepoch != splitepoch: |
800 | spec_preamble_bottom.append('Epoch: %s' % splitepoch) | 800 | spec_preamble_bottom.append('Epoch: %s' % splitepoch) |
801 | if srclicense != splitlicense: | 801 | if srclicense != splitlicense: |
802 | spec_preamble_bottom.append('License: %s' % splitlicense) | 802 | spec_preamble_bottom.append('License: %s' % splitlicense) |
803 | spec_preamble_bottom.append('Group: %s' % splitsection) | 803 | spec_preamble_bottom.append('Group: %s' % splitsection) |
804 | 804 | ||
805 | # Replaces == Obsoletes && Provides | 805 | # Replaces == Obsoletes && Provides |
806 | if splitrreplaces and splitrreplaces.strip() != "": | 806 | if splitrreplaces and splitrreplaces.strip() != "": |
807 | for dep in splitrreplaces.split(','): | 807 | for dep in splitrreplaces.split(','): |
808 | if splitrprovides: | 808 | if splitrprovides: |
809 | splitrprovides = splitrprovides + ", " + dep | 809 | splitrprovides = splitrprovides + ", " + dep |
810 | else: | 810 | else: |
811 | splitrprovides = dep | 811 | splitrprovides = dep |
812 | if splitrobsoletes: | 812 | if splitrobsoletes: |
813 | splitrobsoletes = splitrobsoletes + ", " + dep | 813 | splitrobsoletes = splitrobsoletes + ", " + dep |
814 | else: | 814 | else: |
815 | splitrobsoletes = dep | 815 | splitrobsoletes = dep |
816 | 816 | ||
817 | print_deps(splitrdepends, "Requires", spec_preamble_bottom, d) | 817 | print_deps(splitrdepends, "Requires", spec_preamble_bottom, d) |
818 | # Suggests in RPM are like recommends in OE-core! | 818 | # Suggests in RPM are like recommends in OE-core! |
819 | print_deps(splitrrecommends, "Suggests", spec_preamble_bottom, d) | 819 | print_deps(splitrrecommends, "Suggests", spec_preamble_bottom, d) |
820 | # While there is no analog for suggests... (So call them recommends for now) | 820 | # While there is no analog for suggests... (So call them recommends for now) |
821 | print_deps(splitrsuggests, "Recommends", spec_preamble_bottom, d) | 821 | print_deps(splitrsuggests, "Recommends", spec_preamble_bottom, d) |
822 | print_deps(splitrprovides, "Provides", spec_preamble_bottom, d) | 822 | print_deps(splitrprovides, "Provides", spec_preamble_bottom, d) |
823 | print_deps(splitrobsoletes, "Obsoletes", spec_preamble_bottom, d) | 823 | print_deps(splitrobsoletes, "Obsoletes", spec_preamble_bottom, d) |
824 | 824 | ||
825 | # conflicts can not be in a provide! We will need to filter it. | 825 | # conflicts can not be in a provide! We will need to filter it. |
826 | if splitrconflicts: | 826 | if splitrconflicts: |
827 | depends_dict = bb.utils.explode_dep_versions(splitrconflicts) | 827 | depends_dict = bb.utils.explode_dep_versions(splitrconflicts) |
828 | newdeps_dict = {} | 828 | newdeps_dict = {} |
829 | for dep in depends_dict: | 829 | for dep in depends_dict: |
830 | if dep not in splitrprovides: | 830 | if dep not in splitrprovides: |
831 | newdeps_dict[dep] = depends_dict[dep] | 831 | newdeps_dict[dep] = depends_dict[dep] |
832 | if newdeps_dict: | 832 | if newdeps_dict: |
833 | splitrconflicts = bb.utils.join_deps(newdeps_dict) | 833 | splitrconflicts = bb.utils.join_deps(newdeps_dict) |
834 | else: | 834 | else: |
835 | splitrconflicts = "" | 835 | splitrconflicts = "" |
836 | 836 | ||
837 | print_deps(splitrconflicts, "Conflicts", spec_preamble_bottom, d) | 837 | print_deps(splitrconflicts, "Conflicts", spec_preamble_bottom, d) |
838 | 838 | ||
839 | spec_preamble_bottom.append('') | 839 | spec_preamble_bottom.append('') |
840 | 840 | ||
841 | spec_preamble_bottom.append('%%description -n %s' % splitname) | 841 | spec_preamble_bottom.append('%%description -n %s' % splitname) |
842 | dedent_text = textwrap.dedent(splitdescription).strip() | 842 | dedent_text = textwrap.dedent(splitdescription).strip() |
843 | spec_preamble_bottom.append('%s' % textwrap.fill(dedent_text, width=75)) | 843 | spec_preamble_bottom.append('%s' % textwrap.fill(dedent_text, width=75)) |
844 | 844 | ||
845 | spec_preamble_bottom.append('') | 845 | spec_preamble_bottom.append('') |
846 | 846 | ||
847 | # Now process scriptlets | 847 | # Now process scriptlets |
848 | for script in ["preinst", "postinst", "prerm", "postrm"]: | 848 | for script in ["preinst", "postinst", "prerm", "postrm"]: |
849 | scriptvar = localdata.getVar('pkg_%s' % script, True) | 849 | scriptvar = localdata.getVar('pkg_%s' % script, True) |
850 | if not scriptvar: | 850 | if not scriptvar: |
851 | continue | 851 | continue |
852 | if script == 'preinst': | 852 | if script == 'preinst': |
853 | spec_scriptlets_bottom.append('%%pre -n %s' % splitname) | 853 | spec_scriptlets_bottom.append('%%pre -n %s' % splitname) |
854 | elif script == 'postinst': | 854 | elif script == 'postinst': |
855 | spec_scriptlets_bottom.append('%%post -n %s' % splitname) | 855 | spec_scriptlets_bottom.append('%%post -n %s' % splitname) |
856 | elif script == 'prerm': | 856 | elif script == 'prerm': |
857 | spec_scriptlets_bottom.append('%%preun -n %s' % splitname) | 857 | spec_scriptlets_bottom.append('%%preun -n %s' % splitname) |
858 | scriptvar = wrap_uninstall(scriptvar) | 858 | scriptvar = wrap_uninstall(scriptvar) |
859 | elif script == 'postrm': | 859 | elif script == 'postrm': |
860 | spec_scriptlets_bottom.append('%%postun -n %s' % splitname) | 860 | spec_scriptlets_bottom.append('%%postun -n %s' % splitname) |
861 | scriptvar = wrap_uninstall(scriptvar) | 861 | scriptvar = wrap_uninstall(scriptvar) |
862 | spec_scriptlets_bottom.append('# %s - %s' % (splitname, script)) | 862 | spec_scriptlets_bottom.append('# %s - %s' % (splitname, script)) |
863 | spec_scriptlets_bottom.append(scriptvar) | 863 | spec_scriptlets_bottom.append(scriptvar) |
864 | spec_scriptlets_bottom.append('') | 864 | spec_scriptlets_bottom.append('') |
865 | 865 | ||
866 | # Now process files | 866 | # Now process files |
867 | file_list = [] | 867 | file_list = [] |
868 | walk_files(root, file_list, conffiles) | 868 | walk_files(root, file_list, conffiles) |
869 | if not file_list and localdata.getVar('ALLOW_EMPTY') != "1": | 869 | if not file_list and localdata.getVar('ALLOW_EMPTY') != "1": |
870 | bb.note("Not creating empty RPM package for %s" % splitname) | 870 | bb.note("Not creating empty RPM package for %s" % splitname) |
871 | else: | 871 | else: |
872 | spec_files_bottom.append('%%files -n %s' % splitname) | 872 | spec_files_bottom.append('%%files -n %s' % splitname) |
873 | spec_files_bottom.append('%defattr(-,-,-,-)') | 873 | spec_files_bottom.append('%defattr(-,-,-,-)') |
874 | if file_list: | 874 | if file_list: |
875 | bb.note("Creating RPM package for %s" % splitname) | 875 | bb.note("Creating RPM package for %s" % splitname) |
876 | spec_files_bottom.extend(file_list) | 876 | spec_files_bottom.extend(file_list) |
877 | else: | 877 | else: |
878 | bb.note("Creating EMPTY RPM Package for %s" % splitname) | 878 | bb.note("Creating EMPTY RPM Package for %s" % splitname) |
879 | spec_files_bottom.append('') | 879 | spec_files_bottom.append('') |
880 | 880 | ||
881 | del localdata | 881 | del localdata |
882 | bb.utils.unlockfile(lf) | 882 | bb.utils.unlockfile(lf) |
883 | |||
884 | add_prep(d,spec_files_bottom) | ||
885 | spec_preamble_top.append('Summary: %s' % srcsummary) | ||
886 | spec_preamble_top.append('Name: %s' % srcname) | ||
887 | spec_preamble_top.append('Version: %s' % srcversion) | ||
888 | spec_preamble_top.append('Release: %s' % srcrelease) | ||
889 | if srcepoch and srcepoch.strip() != "": | ||
890 | spec_preamble_top.append('Epoch: %s' % srcepoch) | ||
891 | spec_preamble_top.append('License: %s' % srclicense) | ||
892 | spec_preamble_top.append('Group: %s' % srcsection) | ||
893 | spec_preamble_top.append('Packager: %s' % srcmaintainer) | ||
894 | spec_preamble_top.append('URL: %s' % srchomepage) | ||
895 | source_list = get_tarballs(d) | ||
896 | tail_source(d,source_list,None) | ||
897 | |||
898 | # Replaces == Obsoletes && Provides | ||
899 | if srcrreplaces and srcrreplaces.strip() != "": | ||
900 | for dep in srcrreplaces.split(','): | ||
901 | if srcrprovides: | ||
902 | srcrprovides = srcrprovides + ", " + dep | ||
903 | else: | ||
904 | srcrprovides = dep | ||
905 | if srcrobsoletes: | ||
906 | srcrobsoletes = srcrobsoletes + ", " + dep | ||
907 | else: | ||
908 | srcrobsoletes = dep | ||
909 | |||
910 | print_deps(srcdepends, "BuildRequires", spec_preamble_top, d) | ||
911 | print_deps(srcrdepends, "Requires", spec_preamble_top, d) | ||
912 | # Suggests in RPM are like recommends in OE-core! | ||
913 | print_deps(srcrrecommends, "Suggests", spec_preamble_top, d) | ||
914 | # While there is no analog for suggests... (So call them recommends for now) | ||
915 | print_deps(srcrsuggests, "Recommends", spec_preamble_top, d) | ||
916 | print_deps(srcrprovides, "Provides", spec_preamble_top, d) | ||
917 | print_deps(srcrobsoletes, "Obsoletes", spec_preamble_top, d) | ||
918 | 883 | ||
919 | # conflicts can not be in a provide! We will need to filter it. | 884 | add_prep(d,spec_files_bottom) |
920 | if srcrconflicts: | 885 | spec_preamble_top.append('Summary: %s' % srcsummary) |
921 | depends_dict = bb.utils.explode_dep_versions(srcrconflicts) | 886 | spec_preamble_top.append('Name: %s' % srcname) |
922 | newdeps_dict = {} | 887 | spec_preamble_top.append('Version: %s' % srcversion) |
923 | for dep in depends_dict: | 888 | spec_preamble_top.append('Release: %s' % srcrelease) |
924 | if dep not in srcrprovides: | 889 | if srcepoch and srcepoch.strip() != "": |
925 | newdeps_dict[dep] = depends_dict[dep] | 890 | spec_preamble_top.append('Epoch: %s' % srcepoch) |
926 | if newdeps_dict: | 891 | spec_preamble_top.append('License: %s' % srclicense) |
927 | srcrconflicts = bb.utils.join_deps(newdeps_dict) | 892 | spec_preamble_top.append('Group: %s' % srcsection) |
928 | else: | 893 | spec_preamble_top.append('Packager: %s' % srcmaintainer) |
929 | srcrconflicts = "" | 894 | spec_preamble_top.append('URL: %s' % srchomepage) |
930 | 895 | source_list = get_tarballs(d) | |
931 | print_deps(srcrconflicts, "Conflicts", spec_preamble_top, d) | 896 | tail_source(d,source_list,None) |
932 | 897 | ||
933 | spec_preamble_top.append('') | 898 | # Replaces == Obsoletes && Provides |
934 | 899 | if srcrreplaces and srcrreplaces.strip() != "": | |
935 | spec_preamble_top.append('%description') | 900 | for dep in srcrreplaces.split(','): |
936 | dedent_text = textwrap.dedent(srcdescription).strip() | 901 | if srcrprovides: |
937 | spec_preamble_top.append('%s' % textwrap.fill(dedent_text, width=75)) | 902 | srcrprovides = srcrprovides + ", " + dep |
938 | 903 | else: | |
939 | spec_preamble_top.append('') | 904 | srcrprovides = dep |
940 | 905 | if srcrobsoletes: | |
941 | if srcpreinst: | 906 | srcrobsoletes = srcrobsoletes + ", " + dep |
942 | spec_scriptlets_top.append('%pre') | 907 | else: |
943 | spec_scriptlets_top.append('# %s - preinst' % srcname) | 908 | srcrobsoletes = dep |
944 | spec_scriptlets_top.append(srcpreinst) | 909 | |
945 | spec_scriptlets_top.append('') | 910 | print_deps(srcdepends, "BuildRequires", spec_preamble_top, d) |
946 | if srcpostinst: | 911 | print_deps(srcrdepends, "Requires", spec_preamble_top, d) |
947 | spec_scriptlets_top.append('%post') | 912 | # Suggests in RPM are like recommends in OE-core! |
948 | spec_scriptlets_top.append('# %s - postinst' % srcname) | 913 | print_deps(srcrrecommends, "Suggests", spec_preamble_top, d) |
949 | spec_scriptlets_top.append(srcpostinst) | 914 | # While there is no analog for suggests... (So call them recommends for now) |
950 | spec_scriptlets_top.append('') | 915 | print_deps(srcrsuggests, "Recommends", spec_preamble_top, d) |
951 | if srcprerm: | 916 | print_deps(srcrprovides, "Provides", spec_preamble_top, d) |
952 | spec_scriptlets_top.append('%preun') | 917 | print_deps(srcrobsoletes, "Obsoletes", spec_preamble_top, d) |
953 | spec_scriptlets_top.append('# %s - prerm' % srcname) | 918 | |
954 | scriptvar = wrap_uninstall(srcprerm) | 919 | # conflicts can not be in a provide! We will need to filter it. |
955 | spec_scriptlets_top.append(scriptvar) | 920 | if srcrconflicts: |
956 | spec_scriptlets_top.append('') | 921 | depends_dict = bb.utils.explode_dep_versions(srcrconflicts) |
957 | if srcpostrm: | 922 | newdeps_dict = {} |
958 | spec_scriptlets_top.append('%postun') | 923 | for dep in depends_dict: |
959 | spec_scriptlets_top.append('# %s - postrm' % srcname) | 924 | if dep not in srcrprovides: |
960 | scriptvar = wrap_uninstall(srcpostrm) | 925 | newdeps_dict[dep] = depends_dict[dep] |
961 | spec_scriptlets_top.append(scriptvar) | 926 | if newdeps_dict: |
962 | spec_scriptlets_top.append('') | 927 | srcrconflicts = bb.utils.join_deps(newdeps_dict) |
963 | 928 | else: | |
964 | # Write the SPEC file | 929 | srcrconflicts = "" |
965 | try: | 930 | |
966 | from __builtin__ import file | 931 | print_deps(srcrconflicts, "Conflicts", spec_preamble_top, d) |
967 | specfile = file(outspecfile, 'w') | 932 | |
968 | except OSError: | 933 | spec_preamble_top.append('') |
969 | raise bb.build.FuncFailed("unable to open spec file for writing.") | 934 | |
970 | 935 | spec_preamble_top.append('%description') | |
971 | # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top | 936 | dedent_text = textwrap.dedent(srcdescription).strip() |
972 | # of the generated spec file | 937 | spec_preamble_top.append('%s' % textwrap.fill(dedent_text, width=75)) |
973 | external_preamble = d.getVar("RPMSPEC_PREAMBLE", True) | 938 | |
974 | if external_preamble: | 939 | spec_preamble_top.append('') |
975 | specfile.write(external_preamble + "\n") | 940 | |
976 | 941 | if srcpreinst: | |
977 | for line in spec_preamble_top: | 942 | spec_scriptlets_top.append('%pre') |
978 | specfile.write(line + "\n") | 943 | spec_scriptlets_top.append('# %s - preinst' % srcname) |
979 | 944 | spec_scriptlets_top.append(srcpreinst) | |
980 | for line in spec_preamble_bottom: | 945 | spec_scriptlets_top.append('') |
981 | specfile.write(line + "\n") | 946 | if srcpostinst: |
982 | 947 | spec_scriptlets_top.append('%post') | |
983 | for line in spec_scriptlets_top: | 948 | spec_scriptlets_top.append('# %s - postinst' % srcname) |
984 | specfile.write(line + "\n") | 949 | spec_scriptlets_top.append(srcpostinst) |
985 | 950 | spec_scriptlets_top.append('') | |
986 | for line in spec_scriptlets_bottom: | 951 | if srcprerm: |
987 | specfile.write(line + "\n") | 952 | spec_scriptlets_top.append('%preun') |
988 | 953 | spec_scriptlets_top.append('# %s - prerm' % srcname) | |
989 | for line in spec_files_top: | 954 | scriptvar = wrap_uninstall(srcprerm) |
990 | specfile.write(line + "\n") | 955 | spec_scriptlets_top.append(scriptvar) |
991 | 956 | spec_scriptlets_top.append('') | |
992 | for line in spec_files_bottom: | 957 | if srcpostrm: |
993 | specfile.write(line + "\n") | 958 | spec_scriptlets_top.append('%postun') |
994 | 959 | spec_scriptlets_top.append('# %s - postrm' % srcname) | |
995 | specfile.close() | 960 | scriptvar = wrap_uninstall(srcpostrm) |
961 | spec_scriptlets_top.append(scriptvar) | ||
962 | spec_scriptlets_top.append('') | ||
963 | |||
964 | # Write the SPEC file | ||
965 | try: | ||
966 | from __builtin__ import file | ||
967 | specfile = file(outspecfile, 'w') | ||
968 | except OSError: | ||
969 | raise bb.build.FuncFailed("unable to open spec file for writing.") | ||
970 | |||
971 | # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top | ||
972 | # of the generated spec file | ||
973 | external_preamble = d.getVar("RPMSPEC_PREAMBLE", True) | ||
974 | if external_preamble: | ||
975 | specfile.write(external_preamble + "\n") | ||
976 | |||
977 | for line in spec_preamble_top: | ||
978 | specfile.write(line + "\n") | ||
979 | |||
980 | for line in spec_preamble_bottom: | ||
981 | specfile.write(line + "\n") | ||
982 | |||
983 | for line in spec_scriptlets_top: | ||
984 | specfile.write(line + "\n") | ||
985 | |||
986 | for line in spec_scriptlets_bottom: | ||
987 | specfile.write(line + "\n") | ||
988 | |||
989 | for line in spec_files_top: | ||
990 | specfile.write(line + "\n") | ||
991 | |||
992 | for line in spec_files_bottom: | ||
993 | specfile.write(line + "\n") | ||
994 | |||
995 | specfile.close() | ||
996 | } | 996 | } |
997 | 997 | ||
998 | python do_package_rpm () { | 998 | python do_package_rpm () { |
999 | import os | 999 | import os |
1000 | 1000 | ||
1001 | def creat_srpm_dir(d): | 1001 | def creat_srpm_dir(d): |
1002 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': | 1002 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': |
1003 | clean_licenses = get_licenses(d) | 1003 | clean_licenses = get_licenses(d) |
1004 | pkgwritesrpmdir = bb.data.expand('${PKGWRITEDIRSRPM}/${PACKAGE_ARCH_EXTEND}', d) | 1004 | pkgwritesrpmdir = bb.data.expand('${PKGWRITEDIRSRPM}/${PACKAGE_ARCH_EXTEND}', d) |
1005 | pkgwritesrpmdir = pkgwritesrpmdir + '/' + clean_licenses | 1005 | pkgwritesrpmdir = pkgwritesrpmdir + '/' + clean_licenses |
1006 | bb.mkdirhier(pkgwritesrpmdir) | 1006 | bb.mkdirhier(pkgwritesrpmdir) |
1007 | os.chmod(pkgwritesrpmdir, 0755) | 1007 | os.chmod(pkgwritesrpmdir, 0755) |
1008 | return pkgwritesrpmdir | 1008 | return pkgwritesrpmdir |
1009 | 1009 | ||
1010 | # We need a simple way to remove the MLPREFIX from the package name, | 1010 | # We need a simple way to remove the MLPREFIX from the package name, |
1011 | # and dependency information... | 1011 | # and dependency information... |
1012 | def strip_multilib(name, d): | 1012 | def strip_multilib(name, d): |
1013 | ml = d.getVar("MLPREFIX", True) | 1013 | ml = d.getVar("MLPREFIX", True) |
1014 | if ml and name and len(ml) != 0 and name.find(ml) >= 0: | 1014 | if ml and name and len(ml) != 0 and name.find(ml) >= 0: |
1015 | return "".join(name.split(ml)) | 1015 | return "".join(name.split(ml)) |
1016 | return name | 1016 | return name |
1017 | 1017 | ||
1018 | workdir = d.getVar('WORKDIR', True) | 1018 | workdir = d.getVar('WORKDIR', True) |
1019 | outdir = d.getVar('DEPLOY_DIR_IPK', True) | 1019 | outdir = d.getVar('DEPLOY_DIR_IPK', True) |
1020 | tmpdir = d.getVar('TMPDIR', True) | 1020 | tmpdir = d.getVar('TMPDIR', True) |
1021 | pkgd = d.getVar('PKGD', True) | 1021 | pkgd = d.getVar('PKGD', True) |
1022 | pkgdest = d.getVar('PKGDEST', True) | 1022 | pkgdest = d.getVar('PKGDEST', True) |
1023 | if not workdir or not outdir or not pkgd or not tmpdir: | 1023 | if not workdir or not outdir or not pkgd or not tmpdir: |
1024 | bb.error("Variables incorrectly set, unable to package") | 1024 | bb.error("Variables incorrectly set, unable to package") |
1025 | return | 1025 | return |
1026 | 1026 | ||
1027 | packages = d.getVar('PACKAGES', True) | 1027 | packages = d.getVar('PACKAGES', True) |
1028 | if not packages or packages == '': | 1028 | if not packages or packages == '': |
1029 | bb.debug(1, "No packages; nothing to do") | 1029 | bb.debug(1, "No packages; nothing to do") |
1030 | return | 1030 | return |
1031 | 1031 | ||
1032 | # Construct the spec file... | 1032 | # Construct the spec file... |
1033 | srcname = strip_multilib(d.getVar('PN', True), d) | 1033 | srcname = strip_multilib(d.getVar('PN', True), d) |
1034 | outspecfile = workdir + "/" + srcname + ".spec" | 1034 | outspecfile = workdir + "/" + srcname + ".spec" |
1035 | d.setVar('OUTSPECFILE', outspecfile) | 1035 | d.setVar('OUTSPECFILE', outspecfile) |
1036 | bb.build.exec_func('write_specfile', d) | 1036 | bb.build.exec_func('write_specfile', d) |
1037 | 1037 | ||
1038 | # Construct per file dependencies file | 1038 | # Construct per file dependencies file |
1039 | def dump_filerdeps(varname, outfile, d): | 1039 | def dump_filerdeps(varname, outfile, d): |
1040 | outfile.write("#!/usr/bin/env python\n\n") | 1040 | outfile.write("#!/usr/bin/env python\n\n") |
1041 | outfile.write("# Dependency table\n") | 1041 | outfile.write("# Dependency table\n") |
1042 | outfile.write('deps = {\n') | 1042 | outfile.write('deps = {\n') |
1043 | for pkg in packages.split(): | 1043 | for pkg in packages.split(): |
1044 | dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg | 1044 | dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg |
1045 | dependsflist = (d.getVar(dependsflist_key, True) or "") | 1045 | dependsflist = (d.getVar(dependsflist_key, True) or "") |
1046 | for dfile in dependsflist.split(): | 1046 | for dfile in dependsflist.split(): |
1047 | key = "FILE" + varname + "_" + dfile + "_" + pkg | 1047 | key = "FILE" + varname + "_" + dfile + "_" + pkg |
1048 | depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "") | 1048 | depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "") |
1049 | file = dfile.replace("@underscore@", "_") | 1049 | file = dfile.replace("@underscore@", "_") |
1050 | file = file.replace("@closebrace@", "]") | 1050 | file = file.replace("@closebrace@", "]") |
1051 | file = file.replace("@openbrace@", "[") | 1051 | file = file.replace("@openbrace@", "[") |
1052 | file = file.replace("@tab@", "\t") | 1052 | file = file.replace("@tab@", "\t") |
1053 | file = file.replace("@space@", " ") | 1053 | file = file.replace("@space@", " ") |
1054 | file = file.replace("@at@", "@") | 1054 | file = file.replace("@at@", "@") |
1055 | outfile.write('"' + pkgd + file + '" : "') | 1055 | outfile.write('"' + pkgd + file + '" : "') |
1056 | for dep in depends_dict: | 1056 | for dep in depends_dict: |
1057 | ver = depends_dict[dep] | 1057 | ver = depends_dict[dep] |
1058 | if dep and ver: | 1058 | if dep and ver: |
1059 | ver = ver.replace("(","") | 1059 | ver = ver.replace("(","") |
1060 | ver = ver.replace(")","") | 1060 | ver = ver.replace(")","") |
1061 | outfile.write(dep + " " + ver + " ") | 1061 | outfile.write(dep + " " + ver + " ") |
1062 | else: | 1062 | else: |
1063 | outfile.write(dep + " ") | 1063 | outfile.write(dep + " ") |
1064 | outfile.write('",\n') | 1064 | outfile.write('",\n') |
1065 | outfile.write('}\n\n') | 1065 | outfile.write('}\n\n') |
1066 | outfile.write("import sys\n") | 1066 | outfile.write("import sys\n") |
1067 | outfile.write("while 1:\n") | 1067 | outfile.write("while 1:\n") |
1068 | outfile.write("\tline = sys.stdin.readline().strip()\n") | 1068 | outfile.write("\tline = sys.stdin.readline().strip()\n") |
1069 | outfile.write("\tif not line:\n") | 1069 | outfile.write("\tif not line:\n") |
1070 | outfile.write("\t\tsys.exit(0)\n") | 1070 | outfile.write("\t\tsys.exit(0)\n") |
1071 | outfile.write("\tif line in deps:\n") | 1071 | outfile.write("\tif line in deps:\n") |
1072 | outfile.write("\t\tprint(deps[line] + '\\n')\n") | 1072 | outfile.write("\t\tprint(deps[line] + '\\n')\n") |
1073 | 1073 | ||
1074 | # OE-core dependencies a.k.a. RPM requires | 1074 | # OE-core dependencies a.k.a. RPM requires |
1075 | outdepends = workdir + "/" + srcname + ".requires" | 1075 | outdepends = workdir + "/" + srcname + ".requires" |
1076 | 1076 | ||
1077 | try: | 1077 | try: |
1078 | from __builtin__ import file | 1078 | from __builtin__ import file |
1079 | dependsfile = file(outdepends, 'w') | 1079 | dependsfile = file(outdepends, 'w') |
1080 | except OSError: | 1080 | except OSError: |
1081 | raise bb.build.FuncFailed("unable to open spec file for writing.") | 1081 | raise bb.build.FuncFailed("unable to open spec file for writing.") |
1082 | 1082 | ||
1083 | dump_filerdeps('RDEPENDS', dependsfile, d) | 1083 | dump_filerdeps('RDEPENDS', dependsfile, d) |
1084 | 1084 | ||
1085 | dependsfile.close() | 1085 | dependsfile.close() |
1086 | os.chmod(outdepends, 0755) | 1086 | os.chmod(outdepends, 0755) |
1087 | 1087 | ||
1088 | # OE-core / RPM Provides | 1088 | # OE-core / RPM Provides |
1089 | outprovides = workdir + "/" + srcname + ".provides" | 1089 | outprovides = workdir + "/" + srcname + ".provides" |
1090 | 1090 | ||
1091 | try: | 1091 | try: |
1092 | from __builtin__ import file | 1092 | from __builtin__ import file |
1093 | providesfile = file(outprovides, 'w') | 1093 | providesfile = file(outprovides, 'w') |
1094 | except OSError: | 1094 | except OSError: |
1095 | raise bb.build.FuncFailed("unable to open spec file for writing.") | 1095 | raise bb.build.FuncFailed("unable to open spec file for writing.") |
1096 | 1096 | ||
1097 | dump_filerdeps('RPROVIDES', providesfile, d) | 1097 | dump_filerdeps('RPROVIDES', providesfile, d) |
1098 | 1098 | ||
1099 | providesfile.close() | 1099 | providesfile.close() |
1100 | os.chmod(outprovides, 0755) | 1100 | os.chmod(outprovides, 0755) |
1101 | 1101 | ||
1102 | # Setup the rpmbuild arguments... | 1102 | # Setup the rpmbuild arguments... |
1103 | rpmbuild = d.getVar('RPMBUILD', True) | 1103 | rpmbuild = d.getVar('RPMBUILD', True) |
1104 | targetsys = d.getVar('TARGET_SYS', True) | 1104 | targetsys = d.getVar('TARGET_SYS', True) |
1105 | targetvendor = d.getVar('TARGET_VENDOR', True) | 1105 | targetvendor = d.getVar('TARGET_VENDOR', True) |
1106 | package_arch = d.getVar('PACKAGE_ARCH', True) or "" | 1106 | package_arch = d.getVar('PACKAGE_ARCH', True) or "" |
1107 | if package_arch not in "all any noarch".split(): | 1107 | if package_arch not in "all any noarch".split(): |
1108 | ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_") | 1108 | ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_") |
1109 | d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch) | 1109 | d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch) |
1110 | else: | 1110 | else: |
1111 | d.setVar('PACKAGE_ARCH_EXTEND', package_arch) | 1111 | d.setVar('PACKAGE_ARCH_EXTEND', package_arch) |
1112 | pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}') | 1112 | pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}') |
1113 | pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}') | 1113 | pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}') |
1114 | magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc') | 1114 | magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc') |
1115 | bb.mkdirhier(pkgwritedir) | 1115 | bb.mkdirhier(pkgwritedir) |
1116 | os.chmod(pkgwritedir, 0755) | 1116 | os.chmod(pkgwritedir, 0755) |
1117 | 1117 | ||
1118 | cmd = rpmbuild | 1118 | cmd = rpmbuild |
1119 | cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd | 1119 | cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd |
1120 | cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'" | 1120 | cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'" |
1121 | cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'" | 1121 | cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'" |
1122 | cmd = cmd + " --define '_use_internal_dependency_generator 0'" | 1122 | cmd = cmd + " --define '_use_internal_dependency_generator 0'" |
1123 | cmd = cmd + " --define '__find_requires " + outdepends + "'" | 1123 | cmd = cmd + " --define '__find_requires " + outdepends + "'" |
1124 | cmd = cmd + " --define '__find_provides " + outprovides + "'" | 1124 | cmd = cmd + " --define '__find_provides " + outprovides + "'" |
1125 | cmd = cmd + " --define '_unpackaged_files_terminate_build 0'" | 1125 | cmd = cmd + " --define '_unpackaged_files_terminate_build 0'" |
1126 | cmd = cmd + " --define 'debug_package %{nil}'" | 1126 | cmd = cmd + " --define 'debug_package %{nil}'" |
1127 | cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'" | 1127 | cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'" |
1128 | cmd = cmd + " --define '_tmppath " + workdir + "'" | 1128 | cmd = cmd + " --define '_tmppath " + workdir + "'" |
1129 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': | 1129 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': |
1130 | cmdsrpm = cmd + " --define '_sourcedir " + workdir + "' --define '_srcrpmdir " + creat_srpm_dir(d) + "'" | 1130 | cmdsrpm = cmd + " --define '_sourcedir " + workdir + "' --define '_srcrpmdir " + creat_srpm_dir(d) + "'" |
1131 | cmdsrpm = 'fakeroot ' + cmdsrpm + " -bs " + outspecfile | 1131 | cmdsrpm = 'fakeroot ' + cmdsrpm + " -bs " + outspecfile |
1132 | cmd = cmd + " -bb " + outspecfile | 1132 | cmd = cmd + " -bb " + outspecfile |
1133 | 1133 | ||
1134 | # Build the source rpm package ! | 1134 | # Build the source rpm package ! |
1135 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': | 1135 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': |
1136 | d.setVar('SBUILDSPEC', cmdsrpm + "\n") | 1136 | d.setVar('SBUILDSPEC', cmdsrpm + "\n") |
1137 | d.setVarFlag('SBUILDSPEC', 'func', '1') | 1137 | d.setVarFlag('SBUILDSPEC', 'func', '1') |
1138 | bb.build.exec_func('SBUILDSPEC', d) | 1138 | bb.build.exec_func('SBUILDSPEC', d) |
1139 | 1139 | ||
1140 | 1140 | ||
1141 | # Build the rpm package! | 1141 | # Build the rpm package! |
1142 | d.setVar('BUILDSPEC', cmd + "\n") | 1142 | d.setVar('BUILDSPEC', cmd + "\n") |
1143 | d.setVarFlag('BUILDSPEC', 'func', '1') | 1143 | d.setVarFlag('BUILDSPEC', 'func', '1') |
1144 | bb.build.exec_func('BUILDSPEC', d) | 1144 | bb.build.exec_func('BUILDSPEC', d) |
1145 | } | 1145 | } |
1146 | 1146 | ||
1147 | python () { | 1147 | python () { |
@@ -1161,13 +1161,13 @@ do_package_write_rpm[sstate-outputdirs] = "${DEPLOY_DIR_RPM}" | |||
1161 | do_package_write_rpm[sstate-lockfile-shared] += "${DEPLOY_DIR_RPM}/rpm.lock" | 1161 | do_package_write_rpm[sstate-lockfile-shared] += "${DEPLOY_DIR_RPM}/rpm.lock" |
1162 | 1162 | ||
1163 | python do_package_write_rpm_setscene () { | 1163 | python do_package_write_rpm_setscene () { |
1164 | sstate_setscene(d) | 1164 | sstate_setscene(d) |
1165 | } | 1165 | } |
1166 | addtask do_package_write_rpm_setscene | 1166 | addtask do_package_write_rpm_setscene |
1167 | 1167 | ||
1168 | python do_package_write_rpm () { | 1168 | python do_package_write_rpm () { |
1169 | bb.build.exec_func("read_subpackage_metadata", d) | 1169 | bb.build.exec_func("read_subpackage_metadata", d) |
1170 | bb.build.exec_func("do_package_rpm", d) | 1170 | bb.build.exec_func("do_package_rpm", d) |
1171 | } | 1171 | } |
1172 | 1172 | ||
1173 | do_package_write_rpm[dirs] = "${PKGWRITEDIRRPM}" | 1173 | do_package_write_rpm[dirs] = "${PKGWRITEDIRRPM}" |
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass index 60f1aded0d..790d874c1c 100644 --- a/meta/classes/packagedata.bbclass +++ b/meta/classes/packagedata.bbclass | |||
@@ -1,13 +1,13 @@ | |||
1 | python read_subpackage_metadata () { | 1 | python read_subpackage_metadata () { |
2 | import oe.packagedata | 2 | import oe.packagedata |
3 | 3 | ||
4 | data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d) | 4 | data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d) |
5 | 5 | ||
6 | for key in data.keys(): | 6 | for key in data.keys(): |
7 | d.setVar(key, data[key]) | 7 | d.setVar(key, data[key]) |
8 | 8 | ||
9 | for pkg in d.getVar('PACKAGES', True).split(): | 9 | for pkg in d.getVar('PACKAGES', True).split(): |
10 | sdata = oe.packagedata.read_subpkgdata(pkg, d) | 10 | sdata = oe.packagedata.read_subpkgdata(pkg, d) |
11 | for key in sdata.keys(): | 11 | for key in sdata.keys(): |
12 | d.setVar(key, sdata[key]) | 12 | d.setVar(key, sdata[key]) |
13 | } | 13 | } |
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass index 3c4d997833..a724972821 100644 --- a/meta/classes/patch.bbclass +++ b/meta/classes/patch.bbclass | |||
@@ -8,164 +8,164 @@ PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_sysroot" | |||
8 | inherit terminal | 8 | inherit terminal |
9 | 9 | ||
10 | def src_patches(d, all = False ): | 10 | def src_patches(d, all = False ): |
11 | workdir = d.getVar('WORKDIR', True) | 11 | workdir = d.getVar('WORKDIR', True) |
12 | fetch = bb.fetch2.Fetch([], d) | 12 | fetch = bb.fetch2.Fetch([], d) |
13 | patches = [] | 13 | patches = [] |
14 | sources = [] | 14 | sources = [] |
15 | for url in fetch.urls: | 15 | for url in fetch.urls: |
16 | local = patch_path(url, fetch, workdir) | 16 | local = patch_path(url, fetch, workdir) |
17 | if not local: | 17 | if not local: |
18 | if all: | 18 | if all: |
19 | local = fetch.localpath(url) | 19 | local = fetch.localpath(url) |
20 | sources.append(local) | 20 | sources.append(local) |
21 | continue | 21 | continue |
22 | 22 | ||
23 | urldata = fetch.ud[url] | 23 | urldata = fetch.ud[url] |
24 | parm = urldata.parm | 24 | parm = urldata.parm |
25 | patchname = parm.get('pname') or os.path.basename(local) | 25 | patchname = parm.get('pname') or os.path.basename(local) |
26 | 26 | ||
27 | apply, reason = should_apply(parm, d) | 27 | apply, reason = should_apply(parm, d) |
28 | if not apply: | 28 | if not apply: |
29 | if reason: | 29 | if reason: |
30 | bb.note("Patch %s %s" % (patchname, reason)) | 30 | bb.note("Patch %s %s" % (patchname, reason)) |
31 | continue | 31 | continue |
32 | 32 | ||
33 | patchparm = {'patchname': patchname} | 33 | patchparm = {'patchname': patchname} |
34 | if "striplevel" in parm: | 34 | if "striplevel" in parm: |
35 | striplevel = parm["striplevel"] | 35 | striplevel = parm["striplevel"] |
36 | elif "pnum" in parm: | 36 | elif "pnum" in parm: |
37 | #bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url) | 37 | #bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url) |
38 | striplevel = parm["pnum"] | 38 | striplevel = parm["pnum"] |
39 | else: | 39 | else: |
40 | striplevel = '1' | 40 | striplevel = '1' |
41 | patchparm['striplevel'] = striplevel | 41 | patchparm['striplevel'] = striplevel |
42 | 42 | ||
43 | patchdir = parm.get('patchdir') | 43 | patchdir = parm.get('patchdir') |
44 | if patchdir: | 44 | if patchdir: |
45 | patchparm['patchdir'] = patchdir | 45 | patchparm['patchdir'] = patchdir |
46 | 46 | ||
47 | localurl = bb.encodeurl(('file', '', local, '', '', patchparm)) | 47 | localurl = bb.encodeurl(('file', '', local, '', '', patchparm)) |
48 | patches.append(localurl) | 48 | patches.append(localurl) |
49 | 49 | ||
50 | if all: | 50 | if all: |
51 | return sources | 51 | return sources |
52 | 52 | ||
53 | return patches | 53 | return patches |
54 | 54 | ||
55 | def patch_path(url, fetch, workdir): | 55 | def patch_path(url, fetch, workdir): |
56 | """Return the local path of a patch, or None if this isn't a patch""" | 56 | """Return the local path of a patch, or None if this isn't a patch""" |
57 | 57 | ||
58 | local = fetch.localpath(url) | 58 | local = fetch.localpath(url) |
59 | base, ext = os.path.splitext(os.path.basename(local)) | 59 | base, ext = os.path.splitext(os.path.basename(local)) |
60 | if ext in ('.gz', '.bz2', '.Z'): | 60 | if ext in ('.gz', '.bz2', '.Z'): |
61 | local = os.path.join(workdir, base) | 61 | local = os.path.join(workdir, base) |
62 | ext = os.path.splitext(base)[1] | 62 | ext = os.path.splitext(base)[1] |
63 | 63 | ||
64 | urldata = fetch.ud[url] | 64 | urldata = fetch.ud[url] |
65 | if "apply" in urldata.parm: | 65 | if "apply" in urldata.parm: |
66 | apply = oe.types.boolean(urldata.parm["apply"]) | 66 | apply = oe.types.boolean(urldata.parm["apply"]) |
67 | if not apply: | 67 | if not apply: |
68 | return | 68 | return |
69 | elif ext not in (".diff", ".patch"): | 69 | elif ext not in (".diff", ".patch"): |
70 | return | 70 | return |
71 | 71 | ||
72 | return local | 72 | return local |
73 | 73 | ||
74 | def should_apply(parm, d): | 74 | def should_apply(parm, d): |
75 | """Determine if we should apply the given patch""" | 75 | """Determine if we should apply the given patch""" |
76 | 76 | ||
77 | if "mindate" in parm or "maxdate" in parm: | 77 | if "mindate" in parm or "maxdate" in parm: |
78 | pn = d.getVar('PN', True) | 78 | pn = d.getVar('PN', True) |
79 | srcdate = d.getVar('SRCDATE_%s' % pn, True) | 79 | srcdate = d.getVar('SRCDATE_%s' % pn, True) |
80 | if not srcdate: | 80 | if not srcdate: |
81 | srcdate = d.getVar('SRCDATE', True) | 81 | srcdate = d.getVar('SRCDATE', True) |
82 | 82 | ||
83 | if srcdate == "now": | 83 | if srcdate == "now": |
84 | srcdate = d.getVar('DATE', True) | 84 | srcdate = d.getVar('DATE', True) |
85 | 85 | ||
86 | if "maxdate" in parm and parm["maxdate"] < srcdate: | 86 | if "maxdate" in parm and parm["maxdate"] < srcdate: |
87 | return False, 'is outdated' | 87 | return False, 'is outdated' |
88 | 88 | ||
89 | if "mindate" in parm and parm["mindate"] > srcdate: | 89 | if "mindate" in parm and parm["mindate"] > srcdate: |
90 | return False, 'is predated' | 90 | return False, 'is predated' |
91 | 91 | ||
92 | 92 | ||
93 | if "minrev" in parm: | 93 | if "minrev" in parm: |
94 | srcrev = d.getVar('SRCREV', True) | 94 | srcrev = d.getVar('SRCREV', True) |
95 | if srcrev and srcrev < parm["minrev"]: | 95 | if srcrev and srcrev < parm["minrev"]: |
96 | return False, 'applies to later revisions' | 96 | return False, 'applies to later revisions' |
97 | 97 | ||
98 | if "maxrev" in parm: | 98 | if "maxrev" in parm: |
99 | srcrev = d.getVar('SRCREV', True) | 99 | srcrev = d.getVar('SRCREV', True) |
100 | if srcrev and srcrev > parm["maxrev"]: | 100 | if srcrev and srcrev > parm["maxrev"]: |
101 | return False, 'applies to earlier revisions' | 101 | return False, 'applies to earlier revisions' |
102 | 102 | ||
103 | if "rev" in parm: | 103 | if "rev" in parm: |
104 | srcrev = d.getVar('SRCREV', True) | 104 | srcrev = d.getVar('SRCREV', True) |
105 | if srcrev and parm["rev"] not in srcrev: | 105 | if srcrev and parm["rev"] not in srcrev: |
106 | return False, "doesn't apply to revision" | 106 | return False, "doesn't apply to revision" |
107 | 107 | ||
108 | if "notrev" in parm: | 108 | if "notrev" in parm: |
109 | srcrev = d.getVar('SRCREV', True) | 109 | srcrev = d.getVar('SRCREV', True) |
110 | if srcrev and parm["notrev"] in srcrev: | 110 | if srcrev and parm["notrev"] in srcrev: |
111 | return False, "doesn't apply to revision" | 111 | return False, "doesn't apply to revision" |
112 | 112 | ||
113 | return True, None | 113 | return True, None |
114 | 114 | ||
115 | should_apply[vardepsexclude] = "DATE SRCDATE" | 115 | should_apply[vardepsexclude] = "DATE SRCDATE" |
116 | 116 | ||
117 | python patch_do_patch() { | 117 | python patch_do_patch() { |
118 | import oe.patch | 118 | import oe.patch |
119 | 119 | ||
120 | patchsetmap = { | 120 | patchsetmap = { |
121 | "patch": oe.patch.PatchTree, | 121 | "patch": oe.patch.PatchTree, |
122 | "quilt": oe.patch.QuiltTree, | 122 | "quilt": oe.patch.QuiltTree, |
123 | "git": oe.patch.GitApplyTree, | 123 | "git": oe.patch.GitApplyTree, |
124 | } | 124 | } |
125 | 125 | ||
126 | cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt'] | 126 | cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt'] |
127 | 127 | ||
128 | resolvermap = { | 128 | resolvermap = { |
129 | "noop": oe.patch.NOOPResolver, | 129 | "noop": oe.patch.NOOPResolver, |
130 | "user": oe.patch.UserResolver, | 130 | "user": oe.patch.UserResolver, |
131 | } | 131 | } |
132 | 132 | ||
133 | rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user'] | 133 | rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user'] |
134 | 134 | ||
135 | classes = {} | 135 | classes = {} |
136 | 136 | ||
137 | s = d.getVar('S', True) | 137 | s = d.getVar('S', True) |
138 | 138 | ||
139 | path = os.getenv('PATH') | 139 | path = os.getenv('PATH') |
140 | os.putenv('PATH', d.getVar('PATH', True)) | 140 | os.putenv('PATH', d.getVar('PATH', True)) |
141 | 141 | ||
142 | for patch in src_patches(d): | 142 | for patch in src_patches(d): |
143 | _, _, local, _, _, parm = bb.decodeurl(patch) | 143 | _, _, local, _, _, parm = bb.decodeurl(patch) |
144 | 144 | ||
145 | if "patchdir" in parm: | 145 | if "patchdir" in parm: |
146 | patchdir = parm["patchdir"] | 146 | patchdir = parm["patchdir"] |
147 | if not os.path.isabs(patchdir): | 147 | if not os.path.isabs(patchdir): |
148 | patchdir = os.path.join(s, patchdir) | 148 | patchdir = os.path.join(s, patchdir) |
149 | else: | 149 | else: |
150 | patchdir = s | 150 | patchdir = s |
151 | 151 | ||
152 | if not patchdir in classes: | 152 | if not patchdir in classes: |
153 | patchset = cls(patchdir, d) | 153 | patchset = cls(patchdir, d) |
154 | resolver = rcls(patchset, oe_terminal) | 154 | resolver = rcls(patchset, oe_terminal) |
155 | classes[patchdir] = (patchset, resolver) | 155 | classes[patchdir] = (patchset, resolver) |
156 | patchset.Clean() | 156 | patchset.Clean() |
157 | else: | 157 | else: |
158 | patchset, resolver = classes[patchdir] | 158 | patchset, resolver = classes[patchdir] |
159 | 159 | ||
160 | bb.note("Applying patch '%s' (%s)" % (parm['patchname'], oe.path.format_display(local, d))) | 160 | bb.note("Applying patch '%s' (%s)" % (parm['patchname'], oe.path.format_display(local, d))) |
161 | try: | 161 | try: |
162 | patchset.Import({"file":local, "strippath": parm['striplevel']}, True) | 162 | patchset.Import({"file":local, "strippath": parm['striplevel']}, True) |
163 | except Exception as exc: | 163 | except Exception as exc: |
164 | bb.fatal(str(exc)) | 164 | bb.fatal(str(exc)) |
165 | try: | 165 | try: |
166 | resolver.Resolve() | 166 | resolver.Resolve() |
167 | except bb.BBHandledException as e: | 167 | except bb.BBHandledException as e: |
168 | bb.fatal(str(e)) | 168 | bb.fatal(str(e)) |
169 | } | 169 | } |
170 | patch_do_patch[vardepsexclude] = "PATCHRESOLVE" | 170 | patch_do_patch[vardepsexclude] = "PATCHRESOLVE" |
171 | 171 | ||
diff --git a/meta/classes/pkg_metainfo.bbclass b/meta/classes/pkg_metainfo.bbclass index 4b182690f2..80f6244fca 100644 --- a/meta/classes/pkg_metainfo.bbclass +++ b/meta/classes/pkg_metainfo.bbclass | |||
@@ -1,22 +1,22 @@ | |||
1 | python do_pkg_write_metainfo () { | 1 | python do_pkg_write_metainfo () { |
2 | deploydir = d.getVar('DEPLOY_DIR', True) | 2 | deploydir = d.getVar('DEPLOY_DIR', True) |
3 | if not deploydir: | 3 | if not deploydir: |
4 | bb.error("DEPLOY_DIR not defined, unable to write package info") | 4 | bb.error("DEPLOY_DIR not defined, unable to write package info") |
5 | return | 5 | return |
6 | 6 | ||
7 | try: | 7 | try: |
8 | infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a') | 8 | infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a') |
9 | except OSError: | 9 | except OSError: |
10 | raise bb.build.FuncFailed("unable to open package-info file for writing.") | 10 | raise bb.build.FuncFailed("unable to open package-info file for writing.") |
11 | 11 | ||
12 | name = d.getVar('PN', True) | 12 | name = d.getVar('PN', True) |
13 | version = d.getVar('PV', True) | 13 | version = d.getVar('PV', True) |
14 | desc = d.getVar('DESCRIPTION', True) | 14 | desc = d.getVar('DESCRIPTION', True) |
15 | page = d.getVar('HOMEPAGE', True) | 15 | page = d.getVar('HOMEPAGE', True) |
16 | lic = d.getVar('LICENSE', True) | 16 | lic = d.getVar('LICENSE', True) |
17 | 17 | ||
18 | infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) | 18 | infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) |
19 | infofile.close() | 19 | infofile.close() |
20 | } | 20 | } |
21 | 21 | ||
22 | addtask pkg_write_metainfo after do_package before do_build \ No newline at end of file | 22 | addtask pkg_write_metainfo after do_package before do_build |
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass index ed2dca0780..9483e93f3b 100644 --- a/meta/classes/populate_sdk_base.bbclass +++ b/meta/classes/populate_sdk_base.bbclass | |||
@@ -32,29 +32,29 @@ python () { | |||
32 | } | 32 | } |
33 | 33 | ||
34 | fakeroot python do_populate_sdk() { | 34 | fakeroot python do_populate_sdk() { |
35 | bb.build.exec_func("populate_sdk_image", d) | 35 | bb.build.exec_func("populate_sdk_image", d) |
36 | 36 | ||
37 | # Handle multilibs in the SDK environment, siteconfig, etc files... | 37 | # Handle multilibs in the SDK environment, siteconfig, etc files... |
38 | localdata = bb.data.createCopy(d) | 38 | localdata = bb.data.createCopy(d) |
39 | 39 | ||
40 | # make sure we only use the WORKDIR value from 'd', or it can change | 40 | # make sure we only use the WORKDIR value from 'd', or it can change |
41 | localdata.setVar('WORKDIR', d.getVar('WORKDIR', True)) | 41 | localdata.setVar('WORKDIR', d.getVar('WORKDIR', True)) |
42 | 42 | ||
43 | # make sure we only use the SDKTARGETSYSROOT value from 'd' | 43 | # make sure we only use the SDKTARGETSYSROOT value from 'd' |
44 | localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True)) | 44 | localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True)) |
45 | 45 | ||
46 | # Process DEFAULTTUNE | 46 | # Process DEFAULTTUNE |
47 | bb.build.exec_func("create_sdk_files", localdata) | 47 | bb.build.exec_func("create_sdk_files", localdata) |
48 | 48 | ||
49 | variants = d.getVar("MULTILIB_VARIANTS", True) or "" | 49 | variants = d.getVar("MULTILIB_VARIANTS", True) or "" |
50 | for item in variants.split(): | 50 | for item in variants.split(): |
51 | # Load overrides from 'd' to avoid having to reset the value... | 51 | # Load overrides from 'd' to avoid having to reset the value... |
52 | overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item | 52 | overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item |
53 | localdata.setVar("OVERRIDES", overrides) | 53 | localdata.setVar("OVERRIDES", overrides) |
54 | bb.data.update_data(localdata) | 54 | bb.data.update_data(localdata) |
55 | bb.build.exec_func("create_sdk_files", localdata) | 55 | bb.build.exec_func("create_sdk_files", localdata) |
56 | 56 | ||
57 | bb.build.exec_func("tar_sdk", d) | 57 | bb.build.exec_func("tar_sdk", d) |
58 | } | 58 | } |
59 | 59 | ||
60 | fakeroot populate_sdk_image() { | 60 | fakeroot populate_sdk_image() { |
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass index c0a538036f..0e75ac4e38 100644 --- a/meta/classes/qemu.bbclass +++ b/meta/classes/qemu.bbclass | |||
@@ -4,12 +4,12 @@ | |||
4 | # | 4 | # |
5 | 5 | ||
6 | def qemu_target_binary(data): | 6 | def qemu_target_binary(data): |
7 | import bb | 7 | import bb |
8 | 8 | ||
9 | target_arch = data.getVar("TARGET_ARCH", True) | 9 | target_arch = data.getVar("TARGET_ARCH", True) |
10 | if target_arch in ("i486", "i586", "i686"): | 10 | if target_arch in ("i486", "i586", "i686"): |
11 | target_arch = "i386" | 11 | target_arch = "i386" |
12 | elif target_arch == "powerpc": | 12 | elif target_arch == "powerpc": |
13 | target_arch = "ppc" | 13 | target_arch = "ppc" |
14 | 14 | ||
15 | return "qemu-" + target_arch | 15 | return "qemu-" + target_arch |
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index c2c5bd5c25..d572f0edcf 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -185,18 +185,18 @@ def sstate_installpkg(ss, d): | |||
185 | staging_target = d.getVar('STAGING_DIR_TARGET', True) | 185 | staging_target = d.getVar('STAGING_DIR_TARGET', True) |
186 | staging_host = d.getVar('STAGING_DIR_HOST', True) | 186 | staging_host = d.getVar('STAGING_DIR_HOST', True) |
187 | 187 | ||
188 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): | 188 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): |
189 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging) | 189 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging) |
190 | elif bb.data.inherits_class('cross', d): | 190 | elif bb.data.inherits_class('cross', d): |
191 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging) | 191 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging) |
192 | else: | 192 | else: |
193 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) | 193 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) |
194 | 194 | ||
195 | # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed | 195 | # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed |
196 | sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd) | 196 | sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd) |
197 | 197 | ||
198 | print "Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd) | 198 | print "Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd) |
199 | subprocess.call(sstate_hardcode_cmd, shell=True) | 199 | subprocess.call(sstate_hardcode_cmd, shell=True) |
200 | 200 | ||
201 | # Need to remove this or we'd copy it into the target directory and may | 201 | # Need to remove this or we'd copy it into the target directory and may |
202 | # conflict with another writer | 202 | # conflict with another writer |
@@ -310,50 +310,50 @@ python sstate_cleanall() { | |||
310 | } | 310 | } |
311 | 311 | ||
312 | def sstate_hardcode_path(d): | 312 | def sstate_hardcode_path(d): |
313 | import subprocess | 313 | import subprocess |
314 | 314 | ||
315 | # Need to remove hardcoded paths and fix these when we install the | 315 | # Need to remove hardcoded paths and fix these when we install the |
316 | # staging packages. | 316 | # staging packages. |
317 | # | 317 | # |
318 | # Note: the logic in this function needs to match the reverse logic | 318 | # Note: the logic in this function needs to match the reverse logic |
319 | # in sstate_installpkg(ss, d) | 319 | # in sstate_installpkg(ss, d) |
320 | 320 | ||
321 | staging = d.getVar('STAGING_DIR', True) | 321 | staging = d.getVar('STAGING_DIR', True) |
322 | staging_target = d.getVar('STAGING_DIR_TARGET', True) | 322 | staging_target = d.getVar('STAGING_DIR_TARGET', True) |
323 | staging_host = d.getVar('STAGING_DIR_HOST', True) | 323 | staging_host = d.getVar('STAGING_DIR_HOST', True) |
324 | sstate_builddir = d.getVar('SSTATE_BUILDDIR', True) | 324 | sstate_builddir = d.getVar('SSTATE_BUILDDIR', True) |
325 | 325 | ||
326 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): | 326 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): |
327 | sstate_grep_cmd = "grep -l -e '%s'" % (staging) | 327 | sstate_grep_cmd = "grep -l -e '%s'" % (staging) |
328 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging) | 328 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging) |
329 | elif bb.data.inherits_class('cross', d): | 329 | elif bb.data.inherits_class('cross', d): |
330 | sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging) | 330 | sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging) |
331 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging) | 331 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging) |
332 | else: | 332 | else: |
333 | sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) | 333 | sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) |
334 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) | 334 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) |
335 | 335 | ||
336 | fixmefn = sstate_builddir + "fixmepath" | 336 | fixmefn = sstate_builddir + "fixmepath" |
337 | 337 | ||
338 | sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True) | 338 | sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True) |
339 | sstate_filelist_cmd = "tee %s" % (fixmefn) | 339 | sstate_filelist_cmd = "tee %s" % (fixmefn) |
340 | 340 | ||
341 | # fixmepath file needs relative paths, drop sstate_builddir prefix | 341 | # fixmepath file needs relative paths, drop sstate_builddir prefix |
342 | sstate_filelist_relative_cmd = "sed -i -e 's:^%s::g' %s" % (sstate_builddir, fixmefn) | 342 | sstate_filelist_relative_cmd = "sed -i -e 's:^%s::g' %s" % (sstate_builddir, fixmefn) |
343 | 343 | ||
344 | # Limit the fixpaths and sed operations based on the initial grep search | 344 | # Limit the fixpaths and sed operations based on the initial grep search |
345 | # This has the side effect of making sure the vfs cache is hot | 345 | # This has the side effect of making sure the vfs cache is hot |
346 | sstate_hardcode_cmd = "%s | xargs %s | %s | xargs --no-run-if-empty %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, sstate_sed_cmd) | 346 | sstate_hardcode_cmd = "%s | xargs %s | %s | xargs --no-run-if-empty %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, sstate_sed_cmd) |
347 | 347 | ||
348 | print "Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd) | 348 | print "Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd) |
349 | subprocess.call(sstate_hardcode_cmd, shell=True) | 349 | subprocess.call(sstate_hardcode_cmd, shell=True) |
350 | 350 | ||
351 | # If the fixmefn is empty, remove it.. | 351 | # If the fixmefn is empty, remove it.. |
352 | if os.stat(fixmefn).st_size == 0: | 352 | if os.stat(fixmefn).st_size == 0: |
353 | os.remove(fixmefn) | 353 | os.remove(fixmefn) |
354 | else: | 354 | else: |
355 | print "Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd) | 355 | print "Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd) |
356 | subprocess.call(sstate_filelist_relative_cmd, shell=True) | 356 | subprocess.call(sstate_filelist_relative_cmd, shell=True) |
357 | 357 | ||
358 | def sstate_package(ss, d): | 358 | def sstate_package(ss, d): |
359 | import oe.path | 359 | import oe.path |
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass index a98f51deb5..ee5a025a3b 100644 --- a/meta/classes/staging.bbclass +++ b/meta/classes/staging.bbclass | |||
@@ -109,7 +109,7 @@ do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_HOST}/" | |||
109 | do_populate_sysroot[stamp-extra-info] = "${MACHINE}" | 109 | do_populate_sysroot[stamp-extra-info] = "${MACHINE}" |
110 | 110 | ||
111 | python do_populate_sysroot_setscene () { | 111 | python do_populate_sysroot_setscene () { |
112 | sstate_setscene(d) | 112 | sstate_setscene(d) |
113 | } | 113 | } |
114 | addtask do_populate_sysroot_setscene | 114 | addtask do_populate_sysroot_setscene |
115 | 115 | ||
diff --git a/meta/classes/syslinux.bbclass b/meta/classes/syslinux.bbclass index 700ea53911..c4596bffce 100644 --- a/meta/classes/syslinux.bbclass +++ b/meta/classes/syslinux.bbclass | |||
@@ -54,146 +54,146 @@ syslinux_hddimg_install() { | |||
54 | } | 54 | } |
55 | 55 | ||
56 | python build_syslinux_menu () { | 56 | python build_syslinux_menu () { |
57 | import copy | 57 | import copy |
58 | import sys | 58 | import sys |
59 | 59 | ||
60 | workdir = d.getVar('WORKDIR', True) | 60 | workdir = d.getVar('WORKDIR', True) |
61 | if not workdir: | 61 | if not workdir: |
62 | bb.error("WORKDIR is not defined") | 62 | bb.error("WORKDIR is not defined") |
63 | return | 63 | return |
64 | 64 | ||
65 | labels = d.getVar('LABELS', True) | 65 | labels = d.getVar('LABELS', True) |
66 | if not labels: | 66 | if not labels: |
67 | bb.debug(1, "LABELS not defined, nothing to do") | 67 | bb.debug(1, "LABELS not defined, nothing to do") |
68 | return | 68 | return |
69 | 69 | ||
70 | if labels == []: | 70 | if labels == []: |
71 | bb.debug(1, "No labels, nothing to do") | 71 | bb.debug(1, "No labels, nothing to do") |
72 | return | 72 | return |
73 | 73 | ||
74 | cfile = d.getVar('SYSLINUXMENU', True) | 74 | cfile = d.getVar('SYSLINUXMENU', True) |
75 | if not cfile: | 75 | if not cfile: |
76 | raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') | 76 | raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') |
77 | 77 | ||
78 | try: | 78 | try: |
79 | cfgfile = file(cfile, 'w') | 79 | cfgfile = file(cfile, 'w') |
80 | except OSError: | 80 | except OSError: |
81 | raise bb.build.funcFailed('Unable to open %s' % (cfile)) | 81 | raise bb.build.funcFailed('Unable to open %s' % (cfile)) |
82 | 82 | ||
83 | # Beep the speaker and Clear the screen | 83 | # Beep the speaker and Clear the screen |
84 | cfgfile.write('\x07\x0C') | 84 | cfgfile.write('\x07\x0C') |
85 | 85 | ||
86 | # The title should be configurable | 86 | # The title should be configurable |
87 | cfgfile.write('Linux Boot Menu\n') | 87 | cfgfile.write('Linux Boot Menu\n') |
88 | cfgfile.write('The following targets are available on this image:\n') | 88 | cfgfile.write('The following targets are available on this image:\n') |
89 | cfgfile.write('\n') | 89 | cfgfile.write('\n') |
90 | 90 | ||
91 | for label in labels.split(): | 91 | for label in labels.split(): |
92 | from copy import deepcopy | 92 | from copy import deepcopy |
93 | localdata = deepcopy(d) | 93 | localdata = deepcopy(d) |
94 | 94 | ||
95 | overrides = localdata.getVar('OVERRIDES') | 95 | overrides = localdata.getVar('OVERRIDES') |
96 | if not overrides: | 96 | if not overrides: |
97 | raise bb.build.FuncFailed('OVERRIDES not defined') | 97 | raise bb.build.FuncFailed('OVERRIDES not defined') |
98 | overrides = localdata.expand(overrides) | 98 | overrides = localdata.expand(overrides) |
99 | 99 | ||
100 | localdata.setVar('OVERRIDES', label + ':' + overrides) | 100 | localdata.setVar('OVERRIDES', label + ':' + overrides) |
101 | bb.data.update_data(localdata) | 101 | bb.data.update_data(localdata) |
102 | 102 | ||
103 | usage = localdata.getVar('USAGE', True) | 103 | usage = localdata.getVar('USAGE', True) |
104 | cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) | 104 | cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) |
105 | cfgfile.write('%s\n' % (usage)) | 105 | cfgfile.write('%s\n' % (usage)) |
106 | 106 | ||
107 | del localdata | 107 | del localdata |
108 | 108 | ||
109 | cfgfile.write('\n') | 109 | cfgfile.write('\n') |
110 | cfgfile.close() | 110 | cfgfile.close() |
111 | } | 111 | } |
112 | 112 | ||
113 | python build_syslinux_cfg () { | 113 | python build_syslinux_cfg () { |
114 | import copy | 114 | import copy |
115 | import sys | 115 | import sys |
116 | 116 | ||
117 | workdir = d.getVar('WORKDIR', True) | 117 | workdir = d.getVar('WORKDIR', True) |
118 | if not workdir: | 118 | if not workdir: |
119 | bb.error("WORKDIR not defined, unable to package") | 119 | bb.error("WORKDIR not defined, unable to package") |
120 | return | 120 | return |
121 | 121 | ||
122 | labels = d.getVar('LABELS', True) | 122 | labels = d.getVar('LABELS', True) |
123 | if not labels: | 123 | if not labels: |
124 | bb.debug(1, "LABELS not defined, nothing to do") | 124 | bb.debug(1, "LABELS not defined, nothing to do") |
125 | return | 125 | return |
126 | 126 | ||
127 | if labels == []: | 127 | if labels == []: |
128 | bb.debug(1, "No labels, nothing to do") | 128 | bb.debug(1, "No labels, nothing to do") |
129 | return | 129 | return |
130 | 130 | ||
131 | cfile = d.getVar('SYSLINUXCFG', True) | 131 | cfile = d.getVar('SYSLINUXCFG', True) |
132 | if not cfile: | 132 | if not cfile: |
133 | raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') | 133 | raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') |
134 | 134 | ||
135 | try: | 135 | try: |
136 | cfgfile = file(cfile, 'w') | 136 | cfgfile = file(cfile, 'w') |
137 | except OSError: | 137 | except OSError: |
138 | raise bb.build.funcFailed('Unable to open %s' % (cfile)) | 138 | raise bb.build.funcFailed('Unable to open %s' % (cfile)) |
139 | 139 | ||
140 | cfgfile.write('# Automatically created by OE\n') | 140 | cfgfile.write('# Automatically created by OE\n') |
141 | 141 | ||
142 | opts = d.getVar('SYSLINUX_OPTS', True) | 142 | opts = d.getVar('SYSLINUX_OPTS', True) |
143 | 143 | ||
144 | if opts: | 144 | if opts: |
145 | for opt in opts.split(';'): | 145 | for opt in opts.split(';'): |
146 | cfgfile.write('%s\n' % opt) | 146 | cfgfile.write('%s\n' % opt) |
147 | 147 | ||
148 | cfgfile.write('ALLOWOPTIONS 1\n'); | 148 | cfgfile.write('ALLOWOPTIONS 1\n'); |
149 | cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) | 149 | cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) |
150 | 150 | ||
151 | timeout = d.getVar('SYSLINUX_TIMEOUT', True) | 151 | timeout = d.getVar('SYSLINUX_TIMEOUT', True) |
152 | 152 | ||
153 | if timeout: | 153 | if timeout: |
154 | cfgfile.write('TIMEOUT %s\n' % timeout) | 154 | cfgfile.write('TIMEOUT %s\n' % timeout) |
155 | else: | 155 | else: |
156 | cfgfile.write('TIMEOUT 50\n') | 156 | cfgfile.write('TIMEOUT 50\n') |
157 | 157 | ||
158 | prompt = d.getVar('SYSLINUX_PROMPT', True) | 158 | prompt = d.getVar('SYSLINUX_PROMPT', True) |
159 | if prompt: | 159 | if prompt: |
160 | cfgfile.write('PROMPT %s\n' % prompt) | 160 | cfgfile.write('PROMPT %s\n' % prompt) |
161 | else: | 161 | else: |
162 | cfgfile.write('PROMPT 1\n') | 162 | cfgfile.write('PROMPT 1\n') |
163 | 163 | ||
164 | menu = d.getVar('AUTO_SYSLINUXMENU', True) | 164 | menu = d.getVar('AUTO_SYSLINUXMENU', True) |
165 | 165 | ||
166 | # This is ugly. My bad. | 166 | # This is ugly. My bad. |
167 | 167 | ||
168 | if menu: | 168 | if menu: |
169 | bb.build.exec_func('build_syslinux_menu', d) | 169 | bb.build.exec_func('build_syslinux_menu', d) |
170 | mfile = d.getVar('SYSLINUXMENU', True) | 170 | mfile = d.getVar('SYSLINUXMENU', True) |
171 | cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) | 171 | cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) |
172 | 172 | ||
173 | for label in labels.split(): | 173 | for label in labels.split(): |
174 | localdata = bb.data.createCopy(d) | 174 | localdata = bb.data.createCopy(d) |
175 | 175 | ||
176 | overrides = localdata.getVar('OVERRIDES', True) | 176 | overrides = localdata.getVar('OVERRIDES', True) |
177 | if not overrides: | 177 | if not overrides: |
178 | raise bb.build.FuncFailed('OVERRIDES not defined') | 178 | raise bb.build.FuncFailed('OVERRIDES not defined') |
179 | 179 | ||
180 | localdata.setVar('OVERRIDES', label + ':' + overrides) | 180 | localdata.setVar('OVERRIDES', label + ':' + overrides) |
181 | bb.data.update_data(localdata) | 181 | bb.data.update_data(localdata) |
182 | 182 | ||
183 | cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label)) | 183 | cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label)) |
184 | 184 | ||
185 | append = localdata.getVar('APPEND', True) | 185 | append = localdata.getVar('APPEND', True) |
186 | initrd = localdata.getVar('INITRD', True) | 186 | initrd = localdata.getVar('INITRD', True) |
187 | 187 | ||
188 | if append: | 188 | if append: |
189 | cfgfile.write('APPEND ') | 189 | cfgfile.write('APPEND ') |
190 | 190 | ||
191 | if initrd: | 191 | if initrd: |
192 | cfgfile.write('initrd=/initrd ') | 192 | cfgfile.write('initrd=/initrd ') |
193 | 193 | ||
194 | cfgfile.write('LABEL=%s '% (label)) | 194 | cfgfile.write('LABEL=%s '% (label)) |
195 | 195 | ||
196 | cfgfile.write('%s\n' % (append)) | 196 | cfgfile.write('%s\n' % (append)) |
197 | 197 | ||
198 | cfgfile.close() | 198 | cfgfile.close() |
199 | } | 199 | } |
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass index 9f2e59f584..47215add1e 100644 --- a/meta/classes/update-alternatives.bbclass +++ b/meta/classes/update-alternatives.bbclass | |||
@@ -113,13 +113,13 @@ def update_alternatives_after_parse(d): | |||
113 | # Convert old format to new format... | 113 | # Convert old format to new format... |
114 | alt_links = d.getVar('ALTERNATIVE_LINKS', True) or "" | 114 | alt_links = d.getVar('ALTERNATIVE_LINKS', True) or "" |
115 | for alt_link in alt_links.split(): | 115 | for alt_link in alt_links.split(): |
116 | alt_name = os.path.basename(alt_link) | 116 | alt_name = os.path.basename(alt_link) |
117 | 117 | ||
118 | alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or "" | 118 | alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or "" |
119 | alternative += " " + alt_name | 119 | alternative += " " + alt_name |
120 | d.setVar('ALTERNATIVE_%s' % pn, alternative) | 120 | d.setVar('ALTERNATIVE_%s' % pn, alternative) |
121 | d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) | 121 | d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) |
122 | d.setVarFlag('ALTERNATIVE_TARGET', alt_name, alt_link) | 122 | d.setVarFlag('ALTERNATIVE_TARGET', alt_name, alt_link) |
123 | return | 123 | return |
124 | 124 | ||
125 | if d.getVar('ALTERNATIVE_NAME') != None or d.getVar('ALTERNATIVE_PATH') != None: | 125 | if d.getVar('ALTERNATIVE_NAME') != None or d.getVar('ALTERNATIVE_PATH') != None: |
@@ -128,15 +128,15 @@ def update_alternatives_after_parse(d): | |||
128 | alt_path = d.getVar('ALTERNATIVE_PATH', True) | 128 | alt_path = d.getVar('ALTERNATIVE_PATH', True) |
129 | alt_link = d.getVar('ALTERNATIVE_LINK', True) or ("%s/%s" % (d.getVar('bindir', True), alt_name)) | 129 | alt_link = d.getVar('ALTERNATIVE_LINK', True) or ("%s/%s" % (d.getVar('bindir', True), alt_name)) |
130 | if alt_name == None: | 130 | if alt_name == None: |
131 | raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE') | 131 | raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE') |
132 | if alt_path == None: | 132 | if alt_path == None: |
133 | raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE') | 133 | raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE') |
134 | 134 | ||
135 | alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or "" | 135 | alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or "" |
136 | alternative += " " + alt_name | 136 | alternative += " " + alt_name |
137 | 137 | ||
138 | # Fix the alt_path if it's relative | 138 | # Fix the alt_path if it's relative |
139 | alt_path = os.path.join(os.path.dirname(alt_link), alt_path) | 139 | alt_path = os.path.join(os.path.dirname(alt_link), alt_path) |
140 | 140 | ||
141 | d.setVar('ALTERNATIVE_%s' % pn, alternative) | 141 | d.setVar('ALTERNATIVE_%s' % pn, alternative) |
142 | d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) | 142 | d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) |
@@ -199,144 +199,144 @@ populate_packages[vardeps] += "${UPDALTVARS} ${@gen_updatealternativesvars(d)}" | |||
199 | # the split and strip steps.. packagecopy seems to be the earliest reasonable | 199 | # the split and strip steps.. packagecopy seems to be the earliest reasonable |
200 | # place. | 200 | # place. |
201 | python perform_packagecopy_append () { | 201 | python perform_packagecopy_append () { |
202 | # Check for deprecated usage... | 202 | # Check for deprecated usage... |
203 | pn = d.getVar('BPN', True) | 203 | pn = d.getVar('BPN', True) |
204 | if d.getVar('ALTERNATIVE_LINKS', True) != None: | 204 | if d.getVar('ALTERNATIVE_LINKS', True) != None: |
205 | bb.warn('%s: Use of ALTERNATIVE_LINKS is deprecated, see update-alternatives.bbclass for more info.' % pn) | 205 | bb.warn('%s: Use of ALTERNATIVE_LINKS is deprecated, see update-alternatives.bbclass for more info.' % pn) |
206 | 206 | ||
207 | if d.getVar('ALTERNATIVE_NAME', True) != None or d.getVar('ALTERNATIVE_PATH', True) != None: | 207 | if d.getVar('ALTERNATIVE_NAME', True) != None or d.getVar('ALTERNATIVE_PATH', True) != None: |
208 | bb.warn('%s: Use of ALTERNATIVE_NAME is deprecated, see update-alternatives.bbclass for more info.' % pn) | 208 | bb.warn('%s: Use of ALTERNATIVE_NAME is deprecated, see update-alternatives.bbclass for more info.' % pn) |
209 | 209 | ||
210 | # Do actual update alternatives processing | 210 | # Do actual update alternatives processing |
211 | pkgdest = d.getVar('PKGD', True) | 211 | pkgdest = d.getVar('PKGD', True) |
212 | for pkg in (d.getVar('PACKAGES', True) or "").split(): | 212 | for pkg in (d.getVar('PACKAGES', True) or "").split(): |
213 | # If the src == dest, we know we need to rename the dest by appending ${BPN} | 213 | # If the src == dest, we know we need to rename the dest by appending ${BPN} |
214 | link_rename = {} | 214 | link_rename = {} |
215 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): | 215 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): |
216 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) | 216 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) |
217 | if not alt_link: | 217 | if not alt_link: |
218 | alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name) | 218 | alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name) |
219 | d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) | 219 | d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) |
220 | 220 | ||
221 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) | 221 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) |
222 | alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link | 222 | alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link |
223 | # Sometimes alt_target is specified as relative to the link name. | 223 | # Sometimes alt_target is specified as relative to the link name. |
224 | alt_target = os.path.join(os.path.dirname(alt_link), alt_target) | 224 | alt_target = os.path.join(os.path.dirname(alt_link), alt_target) |
225 | 225 | ||
226 | # If the link and target are the same name, we need to rename the target. | 226 | # If the link and target are the same name, we need to rename the target. |
227 | if alt_link == alt_target: | 227 | if alt_link == alt_target: |
228 | src = '%s/%s' % (pkgdest, alt_target) | 228 | src = '%s/%s' % (pkgdest, alt_target) |
229 | alt_target_rename = '%s.%s' % (alt_target, pn) | 229 | alt_target_rename = '%s.%s' % (alt_target, pn) |
230 | dest = '%s/%s' % (pkgdest, alt_target_rename) | 230 | dest = '%s/%s' % (pkgdest, alt_target_rename) |
231 | if os.path.lexists(dest): | 231 | if os.path.lexists(dest): |
232 | bb.note('%s: Already renamed: %s' % (pn, alt_target_rename)) | 232 | bb.note('%s: Already renamed: %s' % (pn, alt_target_rename)) |
233 | elif os.path.lexists(src): | 233 | elif os.path.lexists(src): |
234 | if os.path.islink(src): | 234 | if os.path.islink(src): |
235 | # Delay rename of links | 235 | # Delay rename of links |
236 | link_rename[alt_target] = alt_target_rename | 236 | link_rename[alt_target] = alt_target_rename |
237 | else: | 237 | else: |
238 | bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename)) | 238 | bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename)) |
239 | os.rename(src, dest) | 239 | os.rename(src, dest) |
240 | else: | 240 | else: |
241 | bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename)) | 241 | bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename)) |
242 | continue | 242 | continue |
243 | d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename) | 243 | d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename) |
244 | 244 | ||
245 | # Process delayed link names | 245 | # Process delayed link names |
246 | # Do these after other renames so we can correct broken links | 246 | # Do these after other renames so we can correct broken links |
247 | for alt_target in link_rename: | 247 | for alt_target in link_rename: |
248 | src = '%s/%s' % (pkgdest, alt_target) | 248 | src = '%s/%s' % (pkgdest, alt_target) |
249 | dest = '%s/%s' % (pkgdest, link_rename[alt_target]) | 249 | dest = '%s/%s' % (pkgdest, link_rename[alt_target]) |
250 | link = os.readlink(src) | 250 | link = os.readlink(src) |
251 | if os.path.isabs(link): | 251 | if os.path.isabs(link): |
252 | link_target = pkgdest + os.readlink(src) | 252 | link_target = pkgdest + os.readlink(src) |
253 | else: | 253 | else: |
254 | link_target = os.path.join(os.path.dirname(src), link) | 254 | link_target = os.path.join(os.path.dirname(src), link) |
255 | 255 | ||
256 | if os.path.lexists(link_target): | 256 | if os.path.lexists(link_target): |
257 | # Ok, the link_target exists, we can rename | 257 | # Ok, the link_target exists, we can rename |
258 | bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target])) | 258 | bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target])) |
259 | os.rename(src, dest) | 259 | os.rename(src, dest) |
260 | else: | 260 | else: |
261 | # Try to resolve the broken link to link.${BPN} | 261 | # Try to resolve the broken link to link.${BPN} |
262 | link_maybe = '%s.%s' % (os.readlink(src), pn) | 262 | link_maybe = '%s.%s' % (os.readlink(src), pn) |
263 | if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)): | 263 | if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)): |
264 | # Ok, the renamed link target exists.. create a new link, and remove the original | 264 | # Ok, the renamed link target exists.. create a new link, and remove the original |
265 | bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe)) | 265 | bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe)) |
266 | os.symlink(link_maybe, dest) | 266 | os.symlink(link_maybe, dest) |
267 | os.unlink(src) | 267 | os.unlink(src) |
268 | else: | 268 | else: |
269 | bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target)) | 269 | bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target)) |
270 | } | 270 | } |
271 | 271 | ||
272 | python populate_packages_prepend () { | 272 | python populate_packages_prepend () { |
273 | pn = d.getVar('BPN', True) | 273 | pn = d.getVar('BPN', True) |
274 | 274 | ||
275 | # Do actual update alternatives processing | 275 | # Do actual update alternatives processing |
276 | pkgdest = d.getVar('PKGD', True) | 276 | pkgdest = d.getVar('PKGD', True) |
277 | for pkg in (d.getVar('PACKAGES', True) or "").split(): | 277 | for pkg in (d.getVar('PACKAGES', True) or "").split(): |
278 | # Create post install/removal scripts | 278 | # Create post install/removal scripts |
279 | alt_setup_links = "" | 279 | alt_setup_links = "" |
280 | alt_remove_links = "" | 280 | alt_remove_links = "" |
281 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): | 281 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): |
282 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) | 282 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) |
283 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) | 283 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) |
284 | alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link | 284 | alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link |
285 | # Sometimes alt_target is specified as relative to the link name. | 285 | # Sometimes alt_target is specified as relative to the link name. |
286 | alt_target = os.path.join(os.path.dirname(alt_link), alt_target) | 286 | alt_target = os.path.join(os.path.dirname(alt_link), alt_target) |
287 | 287 | ||
288 | alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True) | 288 | alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True) |
289 | alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True) | 289 | alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True) |
290 | 290 | ||
291 | # This shouldn't trigger, as it should have been resolved earlier! | 291 | # This shouldn't trigger, as it should have been resolved earlier! |
292 | if alt_link == alt_target: | 292 | if alt_link == alt_target: |
293 | bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target)) | 293 | bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target)) |
294 | alt_target = '%s.%s' % (alt_target, pn) | 294 | alt_target = '%s.%s' % (alt_target, pn) |
295 | 295 | ||
296 | if not os.path.lexists('%s/%s' % (pkgdest, alt_target)): | 296 | if not os.path.lexists('%s/%s' % (pkgdest, alt_target)): |
297 | bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target)) | 297 | bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target)) |
298 | continue | 298 | continue |
299 | 299 | ||
300 | # Default to generate shell script.. eventually we may want to change this... | 300 | # Default to generate shell script.. eventually we may want to change this... |
301 | alt_target = os.path.relpath(alt_target, os.path.dirname(alt_link)) | 301 | alt_target = os.path.relpath(alt_target, os.path.dirname(alt_link)) |
302 | 302 | ||
303 | alt_setup_links += ' update-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority) | 303 | alt_setup_links += '\tupdate-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority) |
304 | alt_remove_links += ' update-alternatives --remove %s %s\n' % (alt_name, alt_target) | 304 | alt_remove_links += '\tupdate-alternatives --remove %s %s\n' % (alt_name, alt_target) |
305 | 305 | ||
306 | if alt_setup_links: | 306 | if alt_setup_links: |
307 | bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) | 307 | bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) |
308 | bb.note('%s' % alt_setup_links) | 308 | bb.note('%s' % alt_setup_links) |
309 | postinst = (d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)) or '#!/bin/sh\n' | 309 | postinst = (d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)) or '#!/bin/sh\n' |
310 | postinst += alt_setup_links | 310 | postinst += alt_setup_links |
311 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 311 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
312 | 312 | ||
313 | bb.note('%s' % alt_remove_links) | 313 | bb.note('%s' % alt_remove_links) |
314 | postrm = (d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)) or '#!/bin/sh\n' | 314 | postrm = (d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)) or '#!/bin/sh\n' |
315 | postrm += alt_remove_links | 315 | postrm += alt_remove_links |
316 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 316 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
317 | } | 317 | } |
318 | 318 | ||
319 | python package_do_filedeps_append () { | 319 | python package_do_filedeps_append () { |
320 | pn = d.getVar('BPN', True) | 320 | pn = d.getVar('BPN', True) |
321 | pkgdest = d.getVar('PKGDEST', True) | 321 | pkgdest = d.getVar('PKGDEST', True) |
322 | 322 | ||
323 | for pkg in packages.split(): | 323 | for pkg in packages.split(): |
324 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): | 324 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): |
325 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) | 325 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) |
326 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) | 326 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) |
327 | alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link | 327 | alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link |
328 | 328 | ||
329 | if alt_link == alt_target: | 329 | if alt_link == alt_target: |
330 | bb.warn('alt_link == alt_target: %s == %s' % (alt_link, alt_target)) | 330 | bb.warn('alt_link == alt_target: %s == %s' % (alt_link, alt_target)) |
331 | alt_target = '%s.%s' % (alt_target, pn) | 331 | alt_target = '%s.%s' % (alt_target, pn) |
332 | 332 | ||
333 | if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)): | 333 | if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)): |
334 | continue | 334 | continue |
335 | 335 | ||
336 | # Add file provide | 336 | # Add file provide |
337 | trans_target = file_translate(alt_target) | 337 | trans_target = file_translate(alt_target) |
338 | d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link) | 338 | d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link) |
339 | if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""): | 339 | if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""): |
340 | d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target) | 340 | d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target) |
341 | } | 341 | } |
342 | 342 | ||
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass index bddead4a25..eef2e8caa0 100644 --- a/meta/classes/update-rc.d.bbclass +++ b/meta/classes/update-rc.d.bbclass | |||
@@ -44,42 +44,42 @@ python __anonymous() { | |||
44 | } | 44 | } |
45 | 45 | ||
46 | python populate_packages_prepend () { | 46 | python populate_packages_prepend () { |
47 | def update_rcd_package(pkg): | 47 | def update_rcd_package(pkg): |
48 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) | 48 | bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) |
49 | localdata = bb.data.createCopy(d) | 49 | localdata = bb.data.createCopy(d) |
50 | overrides = localdata.getVar("OVERRIDES", True) | 50 | overrides = localdata.getVar("OVERRIDES", True) |
51 | localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) | 51 | localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) |
52 | bb.data.update_data(localdata) | 52 | bb.data.update_data(localdata) |
53 | 53 | ||
54 | """ | 54 | """ |
55 | update_rc.d postinst is appended here because pkg_postinst may require to | 55 | update_rc.d postinst is appended here because pkg_postinst may require to |
56 | execute on the target. Not doing so may cause update_rc.d postinst invoked | 56 | execute on the target. Not doing so may cause update_rc.d postinst invoked |
57 | twice to cause unwanted warnings. | 57 | twice to cause unwanted warnings. |
58 | """ | 58 | """ |
59 | postinst = localdata.getVar('pkg_postinst', True) | 59 | postinst = localdata.getVar('pkg_postinst', True) |
60 | if not postinst: | 60 | if not postinst: |
61 | postinst = '#!/bin/sh\n' | 61 | postinst = '#!/bin/sh\n' |
62 | postinst += localdata.getVar('updatercd_postinst', True) | 62 | postinst += localdata.getVar('updatercd_postinst', True) |
63 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 63 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
64 | 64 | ||
65 | prerm = localdata.getVar('pkg_prerm', True) | 65 | prerm = localdata.getVar('pkg_prerm', True) |
66 | if not prerm: | 66 | if not prerm: |
67 | prerm = '#!/bin/sh\n' | 67 | prerm = '#!/bin/sh\n' |
68 | prerm += localdata.getVar('updatercd_prerm', True) | 68 | prerm += localdata.getVar('updatercd_prerm', True) |
69 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 69 | d.setVar('pkg_prerm_%s' % pkg, prerm) |
70 | 70 | ||
71 | postrm = localdata.getVar('pkg_postrm', True) | 71 | postrm = localdata.getVar('pkg_postrm', True) |
72 | if not postrm: | 72 | if not postrm: |
73 | postrm = '#!/bin/sh\n' | 73 | postrm = '#!/bin/sh\n' |
74 | postrm += localdata.getVar('updatercd_postrm', True) | 74 | postrm += localdata.getVar('updatercd_postrm', True) |
75 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 75 | d.setVar('pkg_postrm_%s' % pkg, postrm) |
76 | 76 | ||
77 | pkgs = d.getVar('INITSCRIPT_PACKAGES', True) | 77 | pkgs = d.getVar('INITSCRIPT_PACKAGES', True) |
78 | if pkgs == None: | 78 | if pkgs == None: |
79 | pkgs = d.getVar('UPDATERCPN', True) | 79 | pkgs = d.getVar('UPDATERCPN', True) |
80 | packages = (d.getVar('PACKAGES', True) or "").split() | 80 | packages = (d.getVar('PACKAGES', True) or "").split() |
81 | if not pkgs in packages and packages != []: | 81 | if not pkgs in packages and packages != []: |
82 | pkgs = packages[0] | 82 | pkgs = packages[0] |
83 | for pkg in pkgs.split(): | 83 | for pkg in pkgs.split(): |
84 | update_rcd_package(pkg) | 84 | update_rcd_package(pkg) |
85 | } | 85 | } |
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass index 6ae5e48420..bb8f42b357 100644 --- a/meta/classes/useradd.bbclass +++ b/meta/classes/useradd.bbclass | |||
@@ -154,61 +154,61 @@ do_package_setscene[depends] = "${USERADDSETSCENEDEPS}" | |||
154 | 154 | ||
155 | # Recipe parse-time sanity checks | 155 | # Recipe parse-time sanity checks |
156 | def update_useradd_after_parse(d): | 156 | def update_useradd_after_parse(d): |
157 | useradd_packages = d.getVar('USERADD_PACKAGES', True) | 157 | useradd_packages = d.getVar('USERADD_PACKAGES', True) |
158 | 158 | ||
159 | if not useradd_packages: | 159 | if not useradd_packages: |
160 | raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE') | 160 | raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE') |
161 | 161 | ||
162 | for pkg in useradd_packages.split(): | 162 | for pkg in useradd_packages.split(): |
163 | if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True): | 163 | if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True): |
164 | raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg) | 164 | raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg) |
165 | 165 | ||
166 | python __anonymous() { | 166 | python __anonymous() { |
167 | update_useradd_after_parse(d) | 167 | update_useradd_after_parse(d) |
168 | } | 168 | } |
169 | 169 | ||
170 | # Return a single [GROUP|USER]ADD_PARAM formatted string which includes the | 170 | # Return a single [GROUP|USER]ADD_PARAM formatted string which includes the |
171 | # [group|user]add parameters for all USERADD_PACKAGES in this recipe | 171 | # [group|user]add parameters for all USERADD_PACKAGES in this recipe |
172 | def get_all_cmd_params(d, cmd_type): | 172 | def get_all_cmd_params(d, cmd_type): |
173 | import string | 173 | import string |
174 | 174 | ||
175 | param_type = cmd_type.upper() + "ADD_PARAM_%s" | 175 | param_type = cmd_type.upper() + "ADD_PARAM_%s" |
176 | params = [] | 176 | params = [] |
177 | 177 | ||
178 | useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" | 178 | useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" |
179 | for pkg in useradd_packages.split(): | 179 | for pkg in useradd_packages.split(): |
180 | param = d.getVar(param_type % pkg, True) | 180 | param = d.getVar(param_type % pkg, True) |
181 | if param: | 181 | if param: |
182 | params.append(param) | 182 | params.append(param) |
183 | 183 | ||
184 | return string.join(params, "; ") | 184 | return string.join(params, "; ") |
185 | 185 | ||
186 | # Adds the preinst script into generated packages | 186 | # Adds the preinst script into generated packages |
187 | fakeroot python populate_packages_prepend () { | 187 | fakeroot python populate_packages_prepend () { |
188 | def update_useradd_package(pkg): | 188 | def update_useradd_package(pkg): |
189 | bb.debug(1, 'adding user/group calls to preinst for %s' % pkg) | 189 | bb.debug(1, 'adding user/group calls to preinst for %s' % pkg) |
190 | 190 | ||
191 | """ | 191 | """ |
192 | useradd preinst is appended here because pkg_preinst may be | 192 | useradd preinst is appended here because pkg_preinst may be |
193 | required to execute on the target. Not doing so may cause | 193 | required to execute on the target. Not doing so may cause |
194 | useradd preinst to be invoked twice, causing unwanted warnings. | 194 | useradd preinst to be invoked twice, causing unwanted warnings. |
195 | """ | 195 | """ |
196 | preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True) | 196 | preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True) |
197 | if not preinst: | 197 | if not preinst: |
198 | preinst = '#!/bin/sh\n' | 198 | preinst = '#!/bin/sh\n' |
199 | preinst += d.getVar('useradd_preinst', True) | 199 | preinst += d.getVar('useradd_preinst', True) |
200 | d.setVar('pkg_preinst_%s' % pkg, preinst) | 200 | d.setVar('pkg_preinst_%s' % pkg, preinst) |
201 | 201 | ||
202 | # RDEPENDS setup | 202 | # RDEPENDS setup |
203 | rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" | 203 | rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" |
204 | rdepends += ' ' + d.getVar('MLPREFIX') + 'base-passwd' | 204 | rdepends += ' ' + d.getVar('MLPREFIX') + 'base-passwd' |
205 | rdepends += ' ' + d.getVar('MLPREFIX') + 'shadow' | 205 | rdepends += ' ' + d.getVar('MLPREFIX') + 'shadow' |
206 | d.setVar("RDEPENDS_%s" % pkg, rdepends) | 206 | d.setVar("RDEPENDS_%s" % pkg, rdepends) |
207 | 207 | ||
208 | # Add the user/group preinstall scripts and RDEPENDS requirements | 208 | # Add the user/group preinstall scripts and RDEPENDS requirements |
209 | # to packages specified by USERADD_PACKAGES | 209 | # to packages specified by USERADD_PACKAGES |
210 | if not bb.data.inherits_class('nativesdk', d): | 210 | if not bb.data.inherits_class('nativesdk', d): |
211 | useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" | 211 | useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" |
212 | for pkg in useradd_packages.split(): | 212 | for pkg in useradd_packages.split(): |
213 | update_useradd_package(pkg) | 213 | update_useradd_package(pkg) |
214 | } | 214 | } |
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass index 6c2232ec57..d150ec57ba 100644 --- a/meta/classes/utility-tasks.bbclass +++ b/meta/classes/utility-tasks.bbclass | |||
@@ -1,13 +1,13 @@ | |||
1 | addtask listtasks | 1 | addtask listtasks |
2 | do_listtasks[nostamp] = "1" | 2 | do_listtasks[nostamp] = "1" |
3 | python do_listtasks() { | 3 | python do_listtasks() { |
4 | import sys | 4 | import sys |
5 | # emit variables and shell functions | 5 | # emit variables and shell functions |
6 | #bb.data.emit_env(sys.__stdout__, d) | 6 | #bb.data.emit_env(sys.__stdout__, d) |
7 | # emit the metadata which isnt valid shell | 7 | # emit the metadata which isnt valid shell |
8 | for e in d.keys(): | 8 | for e in d.keys(): |
9 | if d.getVarFlag(e, 'task'): | 9 | if d.getVarFlag(e, 'task'): |
10 | bb.plain("%s" % e) | 10 | bb.plain("%s" % e) |
11 | } | 11 | } |
12 | 12 | ||
13 | CLEANFUNCS ?= "" | 13 | CLEANFUNCS ?= "" |
@@ -15,34 +15,34 @@ CLEANFUNCS ?= "" | |||
15 | addtask clean | 15 | addtask clean |
16 | do_clean[nostamp] = "1" | 16 | do_clean[nostamp] = "1" |
17 | python do_clean() { | 17 | python do_clean() { |
18 | """clear the build and temp directories""" | 18 | """clear the build and temp directories""" |
19 | dir = d.expand("${WORKDIR}") | 19 | dir = d.expand("${WORKDIR}") |
20 | bb.note("Removing " + dir) | 20 | bb.note("Removing " + dir) |
21 | oe.path.remove(dir) | 21 | oe.path.remove(dir) |
22 | 22 | ||
23 | dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d) | 23 | dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d) |
24 | bb.note("Removing " + dir) | 24 | bb.note("Removing " + dir) |
25 | oe.path.remove(dir) | 25 | oe.path.remove(dir) |
26 | 26 | ||
27 | for f in (d.getVar('CLEANFUNCS', True) or '').split(): | 27 | for f in (d.getVar('CLEANFUNCS', True) or '').split(): |
28 | bb.build.exec_func(f, d) | 28 | bb.build.exec_func(f, d) |
29 | } | 29 | } |
30 | 30 | ||
31 | addtask checkuri | 31 | addtask checkuri |
32 | do_checkuri[nostamp] = "1" | 32 | do_checkuri[nostamp] = "1" |
33 | python do_checkuri() { | 33 | python do_checkuri() { |
34 | src_uri = (d.getVar('SRC_URI', True) or "").split() | 34 | src_uri = (d.getVar('SRC_URI', True) or "").split() |
35 | if len(src_uri) == 0: | 35 | if len(src_uri) == 0: |
36 | return | 36 | return |
37 | 37 | ||
38 | localdata = bb.data.createCopy(d) | 38 | localdata = bb.data.createCopy(d) |
39 | bb.data.update_data(localdata) | 39 | bb.data.update_data(localdata) |
40 | 40 | ||
41 | try: | 41 | try: |
42 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | 42 | fetcher = bb.fetch2.Fetch(src_uri, localdata) |
43 | fetcher.checkstatus() | 43 | fetcher.checkstatus() |
44 | except bb.fetch2.BBFetchException, e: | 44 | except bb.fetch2.BBFetchException, e: |
45 | raise bb.build.FuncFailed(e) | 45 | raise bb.build.FuncFailed(e) |
46 | } | 46 | } |
47 | 47 | ||
48 | addtask checkuriall after do_checkuri | 48 | addtask checkuriall after do_checkuri |
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass index ccf78fcfee..57406109de 100644 --- a/meta/classes/utils.bbclass +++ b/meta/classes/utils.bbclass | |||
@@ -292,77 +292,77 @@ END | |||
292 | } | 292 | } |
293 | 293 | ||
294 | def check_app_exists(app, d): | 294 | def check_app_exists(app, d): |
295 | from bb import which, data | 295 | from bb import which, data |
296 | 296 | ||
297 | app = data.expand(app, d) | 297 | app = data.expand(app, d) |
298 | path = data.getVar('PATH', d, 1) | 298 | path = data.getVar('PATH', d, 1) |
299 | return bool(which(path, app)) | 299 | return bool(which(path, app)) |
300 | 300 | ||
301 | def explode_deps(s): | 301 | def explode_deps(s): |
302 | return bb.utils.explode_deps(s) | 302 | return bb.utils.explode_deps(s) |
303 | 303 | ||
304 | def base_set_filespath(path, d): | 304 | def base_set_filespath(path, d): |
305 | filespath = [] | 305 | filespath = [] |
306 | extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") | 306 | extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") |
307 | # Don't prepend empty strings to the path list | 307 | # Don't prepend empty strings to the path list |
308 | if extrapaths != "": | 308 | if extrapaths != "": |
309 | path = extrapaths.split(":") + path | 309 | path = extrapaths.split(":") + path |
310 | # The ":" ensures we have an 'empty' override | 310 | # The ":" ensures we have an 'empty' override |
311 | overrides = (d.getVar("OVERRIDES", True) or "") + ":" | 311 | overrides = (d.getVar("OVERRIDES", True) or "") + ":" |
312 | for p in path: | 312 | for p in path: |
313 | if p != "": | 313 | if p != "": |
314 | for o in overrides.split(":"): | 314 | for o in overrides.split(":"): |
315 | filespath.append(os.path.join(p, o)) | 315 | filespath.append(os.path.join(p, o)) |
316 | return ":".join(filespath) | 316 | return ":".join(filespath) |
317 | 317 | ||
318 | def extend_variants(d, var, extend, delim=':'): | 318 | def extend_variants(d, var, extend, delim=':'): |
319 | """Return a string of all bb class extend variants for the given extend""" | 319 | """Return a string of all bb class extend variants for the given extend""" |
320 | variants = [] | 320 | variants = [] |
321 | whole = d.getVar(var, True) or "" | 321 | whole = d.getVar(var, True) or "" |
322 | for ext in whole.split(): | 322 | for ext in whole.split(): |
323 | eext = ext.split(delim) | 323 | eext = ext.split(delim) |
324 | if len(eext) > 1 and eext[0] == extend: | 324 | if len(eext) > 1 and eext[0] == extend: |
325 | variants.append(eext[1]) | 325 | variants.append(eext[1]) |
326 | return " ".join(variants) | 326 | return " ".join(variants) |
327 | 327 | ||
328 | def multilib_pkg_extend(d, pkg): | 328 | def multilib_pkg_extend(d, pkg): |
329 | variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split() | 329 | variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split() |
330 | if not variants: | 330 | if not variants: |
331 | return pkg | 331 | return pkg |
332 | pkgs = pkg | 332 | pkgs = pkg |
333 | for v in variants: | 333 | for v in variants: |
334 | pkgs = pkgs + " " + v + "-" + pkg | 334 | pkgs = pkgs + " " + v + "-" + pkg |
335 | return pkgs | 335 | return pkgs |
336 | 336 | ||
337 | def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '): | 337 | def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '): |
338 | """Return a string of all ${var} in all multilib tune configuration""" | 338 | """Return a string of all ${var} in all multilib tune configuration""" |
339 | values = [] | 339 | values = [] |
340 | value = d.getVar(var, True) or "" | 340 | value = d.getVar(var, True) or "" |
341 | if value != "": | 341 | if value != "": |
342 | if need_split: | 342 | if need_split: |
343 | for item in value.split(delim): | 343 | for item in value.split(delim): |
344 | values.append(item) | 344 | values.append(item) |
345 | else: | 345 | else: |
346 | values.append(value) | 346 | values.append(value) |
347 | variants = d.getVar("MULTILIB_VARIANTS", True) or "" | 347 | variants = d.getVar("MULTILIB_VARIANTS", True) or "" |
348 | for item in variants.split(): | 348 | for item in variants.split(): |
349 | localdata = bb.data.createCopy(d) | 349 | localdata = bb.data.createCopy(d) |
350 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item | 350 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item |
351 | localdata.setVar("OVERRIDES", overrides) | 351 | localdata.setVar("OVERRIDES", overrides) |
352 | bb.data.update_data(localdata) | 352 | bb.data.update_data(localdata) |
353 | value = localdata.getVar(var, True) or "" | 353 | value = localdata.getVar(var, True) or "" |
354 | if value != "": | 354 | if value != "": |
355 | if need_split: | 355 | if need_split: |
356 | for item in value.split(delim): | 356 | for item in value.split(delim): |
357 | values.append(item) | 357 | values.append(item) |
358 | else: | 358 | else: |
359 | values.append(value) | 359 | values.append(value) |
360 | if unique: | 360 | if unique: |
361 | #we do this to keep order as much as possible | 361 | #we do this to keep order as much as possible |
362 | ret = [] | 362 | ret = [] |
363 | for value in values: | 363 | for value in values: |
364 | if not value in ret: | 364 | if not value in ret: |
365 | ret.append(value) | 365 | ret.append(value) |
366 | else: | 366 | else: |
367 | ret = values | 367 | ret = values |
368 | return " ".join(ret) | 368 | return " ".join(ret) |