diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-05-20 11:17:05 +0100 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-06-02 08:24:00 +0100 |
commit | a7309d5790f5dac46e84d3c14959943eb2496fda (patch) | |
tree | 48e1fcb886b8ef2974bade09694356f3230fb8a8 /meta/classes | |
parent | 297438e965053b2eb56cc8ef3e59465642f10a24 (diff) | |
download | poky-a7309d5790f5dac46e84d3c14959943eb2496fda.tar.gz |
classes/lib: Update to use python3 command pipeline decoding
In python3, strings are unicode by default. We need to encode/decode
from command pipelines and other places where we interface with the
real world using the correct locales. This patch updates various
call sites to use the correct encoding/decodings.
(From OE-Core rev: bb4685af1bffe17b3aa92a6d21398f38a44ea874)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/buildhistory.bbclass | 12 | ||||
-rw-r--r-- | meta/classes/chrpath.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/externalsrc.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/insane.bbclass | 8 | ||||
-rw-r--r-- | meta/classes/libc-package.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/package.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/package_deb.bbclass | 26 | ||||
-rw-r--r-- | meta/classes/toaster.bbclass | 1 |
8 files changed, 29 insertions, 31 deletions
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index 581d532693..e3b5c44a09 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass | |||
@@ -233,7 +233,7 @@ python buildhistory_emit_pkghistory() { | |||
233 | key = item[0] | 233 | key = item[0] |
234 | if key.endswith('_' + pkg): | 234 | if key.endswith('_' + pkg): |
235 | key = key[:-len(pkg)-1] | 235 | key = key[:-len(pkg)-1] |
236 | pkgdata[key] = item[1].decode('utf-8').decode('string_escape') | 236 | pkgdata[key] = item[1] |
237 | 237 | ||
238 | pkge = pkgdata.get('PKGE', '0') | 238 | pkge = pkgdata.get('PKGE', '0') |
239 | pkgv = pkgdata['PKGV'] | 239 | pkgv = pkgdata['PKGV'] |
@@ -288,14 +288,12 @@ python buildhistory_emit_pkghistory() { | |||
288 | 288 | ||
289 | 289 | ||
290 | def write_recipehistory(rcpinfo, d): | 290 | def write_recipehistory(rcpinfo, d): |
291 | import codecs | ||
292 | |||
293 | bb.debug(2, "Writing recipe history") | 291 | bb.debug(2, "Writing recipe history") |
294 | 292 | ||
295 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 293 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) |
296 | 294 | ||
297 | infofile = os.path.join(pkghistdir, "latest") | 295 | infofile = os.path.join(pkghistdir, "latest") |
298 | with codecs.open(infofile, "w", encoding='utf8') as f: | 296 | with open(infofile, "w") as f: |
299 | if rcpinfo.pe != "0": | 297 | if rcpinfo.pe != "0": |
300 | f.write(u"PE = %s\n" % rcpinfo.pe) | 298 | f.write(u"PE = %s\n" % rcpinfo.pe) |
301 | f.write(u"PV = %s\n" % rcpinfo.pv) | 299 | f.write(u"PV = %s\n" % rcpinfo.pv) |
@@ -305,8 +303,6 @@ def write_recipehistory(rcpinfo, d): | |||
305 | 303 | ||
306 | 304 | ||
307 | def write_pkghistory(pkginfo, d): | 305 | def write_pkghistory(pkginfo, d): |
308 | import codecs | ||
309 | |||
310 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) | 306 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) |
311 | 307 | ||
312 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 308 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) |
@@ -316,7 +312,7 @@ def write_pkghistory(pkginfo, d): | |||
316 | bb.utils.mkdirhier(pkgpath) | 312 | bb.utils.mkdirhier(pkgpath) |
317 | 313 | ||
318 | infofile = os.path.join(pkgpath, "latest") | 314 | infofile = os.path.join(pkgpath, "latest") |
319 | with codecs.open(infofile, "w", encoding='utf8') as f: | 315 | with open(infofile, "w") as f: |
320 | if pkginfo.pe != "0": | 316 | if pkginfo.pe != "0": |
321 | f.write(u"PE = %s\n" % pkginfo.pe) | 317 | f.write(u"PE = %s\n" % pkginfo.pe) |
322 | f.write(u"PV = %s\n" % pkginfo.pv) | 318 | f.write(u"PV = %s\n" % pkginfo.pv) |
@@ -349,7 +345,7 @@ def write_pkghistory(pkginfo, d): | |||
349 | filevarpath = os.path.join(pkgpath, "latest.%s" % filevar) | 345 | filevarpath = os.path.join(pkgpath, "latest.%s" % filevar) |
350 | val = pkginfo.filevars[filevar] | 346 | val = pkginfo.filevars[filevar] |
351 | if val: | 347 | if val: |
352 | with codecs.open(filevarpath, "w", encoding='utf8') as f: | 348 | with open(filevarpath, "w") as f: |
353 | f.write(val) | 349 | f.write(val) |
354 | else: | 350 | else: |
355 | if os.path.exists(filevarpath): | 351 | if os.path.exists(filevarpath): |
diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass index 9c68855ab2..cdd7f27600 100644 --- a/meta/classes/chrpath.bbclass +++ b/meta/classes/chrpath.bbclass | |||
@@ -10,6 +10,8 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): | |||
10 | if p.returncode != 0: | 10 | if p.returncode != 0: |
11 | return | 11 | return |
12 | 12 | ||
13 | err = err.decode('utf-8') | ||
14 | |||
13 | # Handle RUNPATH as well as RPATH | 15 | # Handle RUNPATH as well as RPATH |
14 | err = err.replace("RUNPATH=","RPATH=") | 16 | err = err.replace("RUNPATH=","RPATH=") |
15 | # Throw away everything other than the rpath list | 17 | # Throw away everything other than the rpath list |
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass index da7eb4781c..b7140a321d 100644 --- a/meta/classes/externalsrc.bbclass +++ b/meta/classes/externalsrc.bbclass | |||
@@ -145,7 +145,7 @@ def srctree_hash_files(d): | |||
145 | env = os.environ.copy() | 145 | env = os.environ.copy() |
146 | env['GIT_INDEX_FILE'] = tmp_index.name | 146 | env['GIT_INDEX_FILE'] = tmp_index.name |
147 | subprocess.check_output(['git', 'add', '.'], cwd=s_dir, env=env) | 147 | subprocess.check_output(['git', 'add', '.'], cwd=s_dir, env=env) |
148 | sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env) | 148 | sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") |
149 | with open(oe_hash_file, 'w') as fobj: | 149 | with open(oe_hash_file, 'w') as fobj: |
150 | fobj.write(sha1) | 150 | fobj.write(sha1) |
151 | ret = oe_hash_file + ':True' | 151 | ret = oe_hash_file + ':True' |
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 71999ada34..9b2337cdbf 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
@@ -399,7 +399,7 @@ def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages) | |||
399 | sysroot_path_usr = sysroot_path + exec_prefix | 399 | sysroot_path_usr = sysroot_path + exec_prefix |
400 | 400 | ||
401 | try: | 401 | try: |
402 | ldd_output = bb.process.Popen(["prelink-rtld", "--root", sysroot_path, path], stdout=sub.PIPE).stdout.read() | 402 | ldd_output = bb.process.Popen(["prelink-rtld", "--root", sysroot_path, path], stdout=sub.PIPE).stdout.read().decode("utf-8") |
403 | except bb.process.CmdError: | 403 | except bb.process.CmdError: |
404 | error_msg = pn + ": prelink-rtld aborted when processing %s" % path | 404 | error_msg = pn + ": prelink-rtld aborted when processing %s" % path |
405 | package_qa_handle_error("unsafe-references-in-binaries", error_msg, d) | 405 | package_qa_handle_error("unsafe-references-in-binaries", error_msg, d) |
@@ -986,12 +986,12 @@ def package_qa_check_expanded_d(path,name,d,elf,messages): | |||
986 | return sane | 986 | return sane |
987 | 987 | ||
988 | def package_qa_check_encoding(keys, encode, d): | 988 | def package_qa_check_encoding(keys, encode, d): |
989 | def check_encoding(key,enc): | 989 | def check_encoding(key, enc): |
990 | sane = True | 990 | sane = True |
991 | value = d.getVar(key, True) | 991 | value = d.getVar(key, True) |
992 | if value: | 992 | if value: |
993 | try: | 993 | try: |
994 | s = unicode(value, enc) | 994 | s = value.encode(enc) |
995 | except UnicodeDecodeError as e: | 995 | except UnicodeDecodeError as e: |
996 | error_msg = "%s has non %s characters" % (key,enc) | 996 | error_msg = "%s has non %s characters" % (key,enc) |
997 | sane = False | 997 | sane = False |
@@ -1217,7 +1217,7 @@ Missing inherit gettext?""" % (gt, config)) | |||
1217 | try: | 1217 | try: |
1218 | flag = "WARNING: unrecognized options:" | 1218 | flag = "WARNING: unrecognized options:" |
1219 | log = os.path.join(d.getVar('B', True), 'config.log') | 1219 | log = os.path.join(d.getVar('B', True), 'config.log') |
1220 | output = subprocess.check_output(['grep', '-F', flag, log]).replace(', ', ' ') | 1220 | output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ') |
1221 | options = set() | 1221 | options = set() |
1222 | for line in output.splitlines(): | 1222 | for line in output.splitlines(): |
1223 | options |= set(line.partition(flag)[2].split()) | 1223 | options |= set(line.partition(flag)[2].split()) |
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass index 467d567923..70f479bb41 100644 --- a/meta/classes/libc-package.bbclass +++ b/meta/classes/libc-package.bbclass | |||
@@ -150,6 +150,7 @@ python package_do_split_gconvs () { | |||
150 | c_re = re.compile('^copy "(.*)"') | 150 | c_re = re.compile('^copy "(.*)"') |
151 | i_re = re.compile('^include "(\w+)".*') | 151 | i_re = re.compile('^include "(\w+)".*') |
152 | for l in f.readlines(): | 152 | for l in f.readlines(): |
153 | l = l.decode("latin-1") | ||
153 | m = c_re.match(l) or i_re.match(l) | 154 | m = c_re.match(l) or i_re.match(l) |
154 | if m: | 155 | if m: |
155 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) | 156 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) |
@@ -171,6 +172,7 @@ python package_do_split_gconvs () { | |||
171 | c_re = re.compile('^copy "(.*)"') | 172 | c_re = re.compile('^copy "(.*)"') |
172 | i_re = re.compile('^include "(\w+)".*') | 173 | i_re = re.compile('^include "(\w+)".*') |
173 | for l in f.readlines(): | 174 | for l in f.readlines(): |
175 | l = l.decode("latin-1") | ||
174 | m = c_re.match(l) or i_re.match(l) | 176 | m = c_re.match(l) or i_re.match(l) |
175 | if m: | 177 | if m: |
176 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) | 178 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) |
@@ -191,6 +193,7 @@ python package_do_split_gconvs () { | |||
191 | c_re = re.compile('^copy "(.*)"') | 193 | c_re = re.compile('^copy "(.*)"') |
192 | i_re = re.compile('^include "(\w+)".*') | 194 | i_re = re.compile('^include "(\w+)".*') |
193 | for l in f.readlines(): | 195 | for l in f.readlines(): |
196 | l = l.decode("latin-1") | ||
194 | m = c_re.match(l) or i_re.match(l) | 197 | m = c_re.match(l) or i_re.match(l) |
195 | if m: | 198 | if m: |
196 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) | 199 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index a4125a0e98..501004ed48 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -63,7 +63,7 @@ def legitimize_package_name(s): | |||
63 | def fixutf(m): | 63 | def fixutf(m): |
64 | cp = m.group(1) | 64 | cp = m.group(1) |
65 | if cp: | 65 | if cp: |
66 | return ('\u%s' % cp).decode('unicode_escape').encode('utf-8') | 66 | return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') |
67 | 67 | ||
68 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | 68 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. |
69 | s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) | 69 | s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) |
@@ -1259,8 +1259,8 @@ python emit_pkgdata() { | |||
1259 | def write_if_exists(f, pkg, var): | 1259 | def write_if_exists(f, pkg, var): |
1260 | def encode(str): | 1260 | def encode(str): |
1261 | import codecs | 1261 | import codecs |
1262 | c = codecs.getencoder("string_escape") | 1262 | c = codecs.getencoder("unicode_escape") |
1263 | return c(str)[0] | 1263 | return c(str)[0].decode("latin1") |
1264 | 1264 | ||
1265 | val = d.getVar('%s_%s' % (var, pkg), True) | 1265 | val = d.getVar('%s_%s' % (var, pkg), True) |
1266 | if val: | 1266 | if val: |
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index bb5220ed00..e35f427ea2 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass | |||
@@ -173,7 +173,7 @@ python do_package_deb () { | |||
173 | # Special behavior for description... | 173 | # Special behavior for description... |
174 | if 'DESCRIPTION' in fs: | 174 | if 'DESCRIPTION' in fs: |
175 | summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." | 175 | summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." |
176 | ctrlfile.write('Description: %s\n' % unicode(summary,'utf-8')) | 176 | ctrlfile.write('Description: %s\n' % summary) |
177 | description = localdata.getVar('DESCRIPTION', True) or "." | 177 | description = localdata.getVar('DESCRIPTION', True) or "." |
178 | description = textwrap.dedent(description).strip() | 178 | description = textwrap.dedent(description).strip() |
179 | if '\\n' in description: | 179 | if '\\n' in description: |
@@ -182,29 +182,25 @@ python do_package_deb () { | |||
182 | # We don't limit the width when manually indent, but we do | 182 | # We don't limit the width when manually indent, but we do |
183 | # need the textwrap.fill() to set the initial_indent and | 183 | # need the textwrap.fill() to set the initial_indent and |
184 | # subsequent_indent, so set a large width | 184 | # subsequent_indent, so set a large width |
185 | ctrlfile.write('%s\n' % unicode(textwrap.fill(t, width=100000, initial_indent=' ', subsequent_indent=' '),'utf-8')) | 185 | ctrlfile.write('%s\n' % textwrap.fill(t, width=100000, initial_indent=' ', subsequent_indent=' ')) |
186 | else: | 186 | else: |
187 | # Auto indent | 187 | # Auto indent |
188 | ctrlfile.write('%s\n' % unicode(textwrap.fill(description.strip(), width=74, initial_indent=' ', subsequent_indent=' '),'utf-8')) | 188 | ctrlfile.write('%s\n' % textwrap.fill(description.strip(), width=74, initial_indent=' ', subsequent_indent=' ')) |
189 | 189 | ||
190 | else: | 190 | else: |
191 | ctrlfile.write(unicode(c % tuple(pullData(fs, localdata)),'utf-8')) | 191 | ctrlfile.write(c % tuple(pullData(fs, localdata))) |
192 | except KeyError: | 192 | except KeyError: |
193 | import sys | 193 | import sys |
194 | (type, value, traceback) = sys.exc_info() | 194 | (type, value, traceback) = sys.exc_info() |
195 | bb.utils.unlockfile(lf) | 195 | bb.utils.unlockfile(lf) |
196 | ctrlfile.close() | 196 | ctrlfile.close() |
197 | raise bb.build.FuncFailed("Missing field for deb generation: %s" % value) | 197 | raise bb.build.FuncFailed("Missing field for deb generation: %s" % value) |
198 | except UnicodeDecodeError: | ||
199 | bb.utils.unlockfile(lf) | ||
200 | ctrlfile.close() | ||
201 | raise bb.build.FuncFailed("Non UTF-8 characters found in one of the fields") | ||
202 | 198 | ||
203 | # more fields | 199 | # more fields |
204 | 200 | ||
205 | custom_fields_chunk = get_package_additional_metadata("deb", localdata) | 201 | custom_fields_chunk = get_package_additional_metadata("deb", localdata) |
206 | if custom_fields_chunk is not None: | 202 | if custom_fields_chunk is not None: |
207 | ctrlfile.write(unicode(custom_fields_chunk)) | 203 | ctrlfile.write(custom_fields_chunk) |
208 | ctrlfile.write("\n") | 204 | ctrlfile.write("\n") |
209 | 205 | ||
210 | mapping_rename_hook(localdata) | 206 | mapping_rename_hook(localdata) |
@@ -255,17 +251,17 @@ python do_package_deb () { | |||
255 | rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") | 251 | rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") |
256 | debian_cmp_remap(rconflicts) | 252 | debian_cmp_remap(rconflicts) |
257 | if rdepends: | 253 | if rdepends: |
258 | ctrlfile.write("Depends: %s\n" % unicode(bb.utils.join_deps(rdepends))) | 254 | ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) |
259 | if rsuggests: | 255 | if rsuggests: |
260 | ctrlfile.write("Suggests: %s\n" % unicode(bb.utils.join_deps(rsuggests))) | 256 | ctrlfile.write("Suggests: %s\n" % bb.utils.join_deps(rsuggests)) |
261 | if rrecommends: | 257 | if rrecommends: |
262 | ctrlfile.write("Recommends: %s\n" % unicode(bb.utils.join_deps(rrecommends))) | 258 | ctrlfile.write("Recommends: %s\n" % bb.utils.join_deps(rrecommends)) |
263 | if rprovides: | 259 | if rprovides: |
264 | ctrlfile.write("Provides: %s\n" % unicode(bb.utils.join_deps(rprovides))) | 260 | ctrlfile.write("Provides: %s\n" % bb.utils.join_deps(rprovides)) |
265 | if rreplaces: | 261 | if rreplaces: |
266 | ctrlfile.write("Replaces: %s\n" % unicode(bb.utils.join_deps(rreplaces))) | 262 | ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) |
267 | if rconflicts: | 263 | if rconflicts: |
268 | ctrlfile.write("Conflicts: %s\n" % unicode(bb.utils.join_deps(rconflicts))) | 264 | ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) |
269 | ctrlfile.close() | 265 | ctrlfile.close() |
270 | 266 | ||
271 | for script in ["preinst", "postinst", "prerm", "postrm"]: | 267 | for script in ["preinst", "postinst", "prerm", "postrm"]: |
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass index 1a70f14a92..1878fe095d 100644 --- a/meta/classes/toaster.bbclass +++ b/meta/classes/toaster.bbclass | |||
@@ -33,6 +33,7 @@ python toaster_layerinfo_dumpdata() { | |||
33 | 33 | ||
34 | def _get_git_branch(layer_path): | 34 | def _get_git_branch(layer_path): |
35 | branch = subprocess.Popen("git symbolic-ref HEAD 2>/dev/null ", cwd=layer_path, shell=True, stdout=subprocess.PIPE).communicate()[0] | 35 | branch = subprocess.Popen("git symbolic-ref HEAD 2>/dev/null ", cwd=layer_path, shell=True, stdout=subprocess.PIPE).communicate()[0] |
36 | branch = branch.decode('utf-8') | ||
36 | branch = branch.replace('refs/heads/', '').rstrip() | 37 | branch = branch.replace('refs/heads/', '').rstrip() |
37 | return branch | 38 | return branch |
38 | 39 | ||