diff options
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/base.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/buildhistory.bbclass | 21 | ||||
-rw-r--r-- | meta/classes/chrpath.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/insane.bbclass | 11 | ||||
-rw-r--r-- | meta/classes/kernel-uimage.bbclass | 12 | ||||
-rw-r--r-- | meta/classes/libc-package.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/license.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/metadata_scm.bbclass | 9 | ||||
-rw-r--r-- | meta/classes/package.bbclass | 10 | ||||
-rw-r--r-- | meta/classes/package_deb.bbclass | 30 | ||||
-rw-r--r-- | meta/classes/package_ipk.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/report-error.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/sstate.bbclass | 5 | ||||
-rw-r--r-- | meta/classes/staging.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/terminal.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/toaster.bbclass | 1 | ||||
-rw-r--r-- | meta/classes/utility-tasks.bbclass | 2 |
17 files changed, 64 insertions, 61 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index e5c58b1e20..4b6aab4562 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -10,7 +10,7 @@ inherit utility-tasks | |||
10 | inherit metadata_scm | 10 | inherit metadata_scm |
11 | inherit logging | 11 | inherit logging |
12 | 12 | ||
13 | OE_IMPORTS += "os sys time oe.path oe.utils oe.data oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath" | 13 | OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath" |
14 | OE_IMPORTS[type] = "list" | 14 | OE_IMPORTS[type] = "list" |
15 | 15 | ||
16 | def oe_import(d): | 16 | def oe_import(d): |
@@ -658,7 +658,7 @@ python do_cleanall() { | |||
658 | try: | 658 | try: |
659 | fetcher = bb.fetch2.Fetch(src_uri, d) | 659 | fetcher = bb.fetch2.Fetch(src_uri, d) |
660 | fetcher.clean() | 660 | fetcher.clean() |
661 | except bb.fetch2.BBFetchException, e: | 661 | except bb.fetch2.BBFetchException as e: |
662 | raise bb.build.FuncFailed(e) | 662 | raise bb.build.FuncFailed(e) |
663 | } | 663 | } |
664 | do_cleanall[nostamp] = "1" | 664 | do_cleanall[nostamp] = "1" |
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index 581d532693..3db92d4de7 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass | |||
@@ -64,6 +64,11 @@ python buildhistory_emit_pkghistory() { | |||
64 | if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: | 64 | if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: |
65 | return 0 | 65 | return 0 |
66 | 66 | ||
67 | import re | ||
68 | import sys | ||
69 | if sys.version > '3': | ||
70 | long = int | ||
71 | |||
67 | if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): | 72 | if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): |
68 | return 0 | 73 | return 0 |
69 | 74 | ||
@@ -233,7 +238,7 @@ python buildhistory_emit_pkghistory() { | |||
233 | key = item[0] | 238 | key = item[0] |
234 | if key.endswith('_' + pkg): | 239 | if key.endswith('_' + pkg): |
235 | key = key[:-len(pkg)-1] | 240 | key = key[:-len(pkg)-1] |
236 | pkgdata[key] = item[1].decode('utf-8').decode('string_escape') | 241 | pkgdata[key] = item[1] |
237 | 242 | ||
238 | pkge = pkgdata.get('PKGE', '0') | 243 | pkge = pkgdata.get('PKGE', '0') |
239 | pkgv = pkgdata['PKGV'] | 244 | pkgv = pkgdata['PKGV'] |
@@ -274,7 +279,7 @@ python buildhistory_emit_pkghistory() { | |||
274 | # Gather information about packaged files | 279 | # Gather information about packaged files |
275 | val = pkgdata.get('FILES_INFO', '') | 280 | val = pkgdata.get('FILES_INFO', '') |
276 | dictval = json.loads(val) | 281 | dictval = json.loads(val) |
277 | filelist = dictval.keys() | 282 | filelist = list(dictval.keys()) |
278 | filelist.sort() | 283 | filelist.sort() |
279 | pkginfo.filelist = " ".join(filelist) | 284 | pkginfo.filelist = " ".join(filelist) |
280 | 285 | ||
@@ -288,14 +293,12 @@ python buildhistory_emit_pkghistory() { | |||
288 | 293 | ||
289 | 294 | ||
290 | def write_recipehistory(rcpinfo, d): | 295 | def write_recipehistory(rcpinfo, d): |
291 | import codecs | ||
292 | |||
293 | bb.debug(2, "Writing recipe history") | 296 | bb.debug(2, "Writing recipe history") |
294 | 297 | ||
295 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 298 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) |
296 | 299 | ||
297 | infofile = os.path.join(pkghistdir, "latest") | 300 | infofile = os.path.join(pkghistdir, "latest") |
298 | with codecs.open(infofile, "w", encoding='utf8') as f: | 301 | with open(infofile, "w") as f: |
299 | if rcpinfo.pe != "0": | 302 | if rcpinfo.pe != "0": |
300 | f.write(u"PE = %s\n" % rcpinfo.pe) | 303 | f.write(u"PE = %s\n" % rcpinfo.pe) |
301 | f.write(u"PV = %s\n" % rcpinfo.pv) | 304 | f.write(u"PV = %s\n" % rcpinfo.pv) |
@@ -305,8 +308,6 @@ def write_recipehistory(rcpinfo, d): | |||
305 | 308 | ||
306 | 309 | ||
307 | def write_pkghistory(pkginfo, d): | 310 | def write_pkghistory(pkginfo, d): |
308 | import codecs | ||
309 | |||
310 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) | 311 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) |
311 | 312 | ||
312 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 313 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) |
@@ -316,7 +317,7 @@ def write_pkghistory(pkginfo, d): | |||
316 | bb.utils.mkdirhier(pkgpath) | 317 | bb.utils.mkdirhier(pkgpath) |
317 | 318 | ||
318 | infofile = os.path.join(pkgpath, "latest") | 319 | infofile = os.path.join(pkgpath, "latest") |
319 | with codecs.open(infofile, "w", encoding='utf8') as f: | 320 | with open(infofile, "w") as f: |
320 | if pkginfo.pe != "0": | 321 | if pkginfo.pe != "0": |
321 | f.write(u"PE = %s\n" % pkginfo.pe) | 322 | f.write(u"PE = %s\n" % pkginfo.pe) |
322 | f.write(u"PV = %s\n" % pkginfo.pv) | 323 | f.write(u"PV = %s\n" % pkginfo.pv) |
@@ -349,7 +350,7 @@ def write_pkghistory(pkginfo, d): | |||
349 | filevarpath = os.path.join(pkgpath, "latest.%s" % filevar) | 350 | filevarpath = os.path.join(pkgpath, "latest.%s" % filevar) |
350 | val = pkginfo.filevars[filevar] | 351 | val = pkginfo.filevars[filevar] |
351 | if val: | 352 | if val: |
352 | with codecs.open(filevarpath, "w", encoding='utf8') as f: | 353 | with open(filevarpath, "w") as f: |
353 | f.write(val) | 354 | f.write(val) |
354 | else: | 355 | else: |
355 | if os.path.exists(filevarpath): | 356 | if os.path.exists(filevarpath): |
@@ -842,7 +843,7 @@ python write_srcrev() { | |||
842 | f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev)) | 843 | f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev)) |
843 | f.write('SRCREV_%s = "%s"\n' % (name, srcrev)) | 844 | f.write('SRCREV_%s = "%s"\n' % (name, srcrev)) |
844 | else: | 845 | else: |
845 | f.write('SRCREV = "%s"\n' % srcrevs.itervalues().next()) | 846 | f.write('SRCREV = "%s"\n' % srcrevs.values()) |
846 | if len(tag_srcrevs) > 0: | 847 | if len(tag_srcrevs) > 0: |
847 | for name, srcrev in tag_srcrevs.items(): | 848 | for name, srcrev in tag_srcrevs.items(): |
848 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) | 849 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) |
diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass index 9c68855ab2..0182062adf 100644 --- a/meta/classes/chrpath.bbclass +++ b/meta/classes/chrpath.bbclass | |||
@@ -10,6 +10,8 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): | |||
10 | if p.returncode != 0: | 10 | if p.returncode != 0: |
11 | return | 11 | return |
12 | 12 | ||
13 | err = err.decode('ascii') | ||
14 | |||
13 | # Handle RUNPATH as well as RPATH | 15 | # Handle RUNPATH as well as RPATH |
14 | err = err.replace("RUNPATH=","RPATH=") | 16 | err = err.replace("RUNPATH=","RPATH=") |
15 | # Throw away everything other than the rpath list | 17 | # Throw away everything other than the rpath list |
@@ -103,7 +105,6 @@ def process_dir (rootdir, directory, d): | |||
103 | # Temporarily make the file writeable so we can chrpath it | 105 | # Temporarily make the file writeable so we can chrpath it |
104 | os.chmod(fpath, perms|stat.S_IRWXU) | 106 | os.chmod(fpath, perms|stat.S_IRWXU) |
105 | process_file(cmd, fpath, rootdir, baseprefix, tmpdir, d) | 107 | process_file(cmd, fpath, rootdir, baseprefix, tmpdir, d) |
106 | |||
107 | if perms: | 108 | if perms: |
108 | os.chmod(fpath, perms) | 109 | os.chmod(fpath, perms) |
109 | 110 | ||
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 0b151c214d..d4b9825c93 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
@@ -177,9 +177,8 @@ def package_qa_write_error(type, error, d): | |||
177 | logfile = d.getVar('QA_LOGFILE', True) | 177 | logfile = d.getVar('QA_LOGFILE', True) |
178 | if logfile: | 178 | if logfile: |
179 | p = d.getVar('P', True) | 179 | p = d.getVar('P', True) |
180 | f = file( logfile, "a+") | 180 | with open(logfile, "a+") as f: |
181 | print >> f, "%s: %s [%s]" % (p, error, type) | 181 | f.write("%s: %s [%s]" % (p, error, type)) |
182 | f.close() | ||
183 | 182 | ||
184 | def package_qa_handle_error(error_class, error_msg, d): | 183 | def package_qa_handle_error(error_class, error_msg, d): |
185 | package_qa_write_error(error_class, error_msg, d) | 184 | package_qa_write_error(error_class, error_msg, d) |
@@ -987,12 +986,12 @@ def package_qa_check_expanded_d(path,name,d,elf,messages): | |||
987 | return sane | 986 | return sane |
988 | 987 | ||
989 | def package_qa_check_encoding(keys, encode, d): | 988 | def package_qa_check_encoding(keys, encode, d): |
990 | def check_encoding(key,enc): | 989 | def check_encoding(key, enc): |
991 | sane = True | 990 | sane = True |
992 | value = d.getVar(key, True) | 991 | value = d.getVar(key, True) |
993 | if value: | 992 | if value: |
994 | try: | 993 | try: |
995 | s = unicode(value, enc) | 994 | s = value.encode(enc) |
996 | except UnicodeDecodeError as e: | 995 | except UnicodeDecodeError as e: |
997 | error_msg = "%s has non %s characters" % (key,enc) | 996 | error_msg = "%s has non %s characters" % (key,enc) |
998 | sane = False | 997 | sane = False |
@@ -1218,7 +1217,7 @@ Missing inherit gettext?""" % (gt, config)) | |||
1218 | try: | 1217 | try: |
1219 | flag = "WARNING: unrecognized options:" | 1218 | flag = "WARNING: unrecognized options:" |
1220 | log = os.path.join(d.getVar('B', True), 'config.log') | 1219 | log = os.path.join(d.getVar('B', True), 'config.log') |
1221 | output = subprocess.check_output(['grep', '-F', flag, log]).replace(', ', ' ') | 1220 | output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ') |
1222 | options = set() | 1221 | options = set() |
1223 | for line in output.splitlines(): | 1222 | for line in output.splitlines(): |
1224 | options |= set(line.partition(flag)[2].split()) | 1223 | options |= set(line.partition(flag)[2].split()) |
diff --git a/meta/classes/kernel-uimage.bbclass b/meta/classes/kernel-uimage.bbclass index de7ca38e94..543f5432ba 100644 --- a/meta/classes/kernel-uimage.bbclass +++ b/meta/classes/kernel-uimage.bbclass | |||
@@ -7,12 +7,12 @@ python __anonymous () { | |||
7 | depends = "%s u-boot-mkimage-native" % depends | 7 | depends = "%s u-boot-mkimage-native" % depends |
8 | d.setVar("DEPENDS", depends) | 8 | d.setVar("DEPENDS", depends) |
9 | 9 | ||
10 | # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal | 10 | # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal |
11 | # to kernel.bbclass . We override the variable here, since we need | 11 | # to kernel.bbclass . We override the variable here, since we need |
12 | # to build uImage using the kernel build system if and only if | 12 | # to build uImage using the kernel build system if and only if |
13 | # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into | 13 | # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into |
14 | # the uImage . | 14 | # the uImage . |
15 | if d.getVar("KEEPUIMAGE", True) != 'yes': | 15 | if d.getVar("KEEPUIMAGE", True) != 'yes': |
16 | d.setVar("KERNEL_IMAGETYPE_FOR_MAKE", "vmlinux") | 16 | d.setVar("KERNEL_IMAGETYPE_FOR_MAKE", "vmlinux") |
17 | } | 17 | } |
18 | 18 | ||
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass index 467d567923..70f479bb41 100644 --- a/meta/classes/libc-package.bbclass +++ b/meta/classes/libc-package.bbclass | |||
@@ -150,6 +150,7 @@ python package_do_split_gconvs () { | |||
150 | c_re = re.compile('^copy "(.*)"') | 150 | c_re = re.compile('^copy "(.*)"') |
151 | i_re = re.compile('^include "(\w+)".*') | 151 | i_re = re.compile('^include "(\w+)".*') |
152 | for l in f.readlines(): | 152 | for l in f.readlines(): |
153 | l = l.decode("latin-1") | ||
153 | m = c_re.match(l) or i_re.match(l) | 154 | m = c_re.match(l) or i_re.match(l) |
154 | if m: | 155 | if m: |
155 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) | 156 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) |
@@ -171,6 +172,7 @@ python package_do_split_gconvs () { | |||
171 | c_re = re.compile('^copy "(.*)"') | 172 | c_re = re.compile('^copy "(.*)"') |
172 | i_re = re.compile('^include "(\w+)".*') | 173 | i_re = re.compile('^include "(\w+)".*') |
173 | for l in f.readlines(): | 174 | for l in f.readlines(): |
175 | l = l.decode("latin-1") | ||
174 | m = c_re.match(l) or i_re.match(l) | 176 | m = c_re.match(l) or i_re.match(l) |
175 | if m: | 177 | if m: |
176 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) | 178 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) |
@@ -191,6 +193,7 @@ python package_do_split_gconvs () { | |||
191 | c_re = re.compile('^copy "(.*)"') | 193 | c_re = re.compile('^copy "(.*)"') |
192 | i_re = re.compile('^include "(\w+)".*') | 194 | i_re = re.compile('^include "(\w+)".*') |
193 | for l in f.readlines(): | 195 | for l in f.readlines(): |
196 | l = l.decode("latin-1") | ||
194 | m = c_re.match(l) or i_re.match(l) | 197 | m = c_re.match(l) or i_re.match(l) |
195 | if m: | 198 | if m: |
196 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) | 199 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) |
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass index 69335d650d..10d6ed853a 100644 --- a/meta/classes/license.bbclass +++ b/meta/classes/license.bbclass | |||
@@ -200,7 +200,7 @@ def get_deployed_dependencies(d): | |||
200 | # it might contain the bootloader. | 200 | # it might contain the bootloader. |
201 | taskdata = d.getVar("BB_TASKDEPDATA", False) | 201 | taskdata = d.getVar("BB_TASKDEPDATA", False) |
202 | depends = list(set([dep[0] for dep | 202 | depends = list(set([dep[0] for dep |
203 | in taskdata.itervalues() | 203 | in list(taskdata.values()) |
204 | if not dep[0].endswith("-native")])) | 204 | if not dep[0].endswith("-native")])) |
205 | extra_depends = d.getVar("EXTRA_IMAGEDEPENDS", True) | 205 | extra_depends = d.getVar("EXTRA_IMAGEDEPENDS", True) |
206 | boot_depends = get_boot_dependencies(d) | 206 | boot_depends = get_boot_dependencies(d) |
@@ -261,7 +261,7 @@ def get_boot_dependencies(d): | |||
261 | depends.append(dep) | 261 | depends.append(dep) |
262 | # We need to search for the provider of the dependency | 262 | # We need to search for the provider of the dependency |
263 | else: | 263 | else: |
264 | for taskdep in taskdepdata.itervalues(): | 264 | for taskdep in taskdepdata.values(): |
265 | # The fifth field contains what the task provides | 265 | # The fifth field contains what the task provides |
266 | if dep in taskdep[4]: | 266 | if dep in taskdep[4]: |
267 | info_file = os.path.join( | 267 | info_file = os.path.join( |
@@ -635,7 +635,7 @@ def check_license_format(d): | |||
635 | licenses = d.getVar('LICENSE', True) | 635 | licenses = d.getVar('LICENSE', True) |
636 | from oe.license import license_operator, license_operator_chars, license_pattern | 636 | from oe.license import license_operator, license_operator_chars, license_pattern |
637 | 637 | ||
638 | elements = filter(lambda x: x.strip(), license_operator.split(licenses)) | 638 | elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) |
639 | for pos, element in enumerate(elements): | 639 | for pos, element in enumerate(elements): |
640 | if license_pattern.match(element): | 640 | if license_pattern.match(element): |
641 | if pos > 0 and license_pattern.match(elements[pos - 1]): | 641 | if pos > 0 and license_pattern.match(elements[pos - 1]): |
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass index 2e6fac209a..257df3c99a 100644 --- a/meta/classes/metadata_scm.bbclass +++ b/meta/classes/metadata_scm.bbclass | |||
@@ -69,8 +69,8 @@ def base_get_metadata_git_branch(path, d): | |||
69 | try: | 69 | try: |
70 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) | 70 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) |
71 | except bb.process.ExecutionError: | 71 | except bb.process.ExecutionError: |
72 | rev = '<unknown>' | 72 | return '<unknown>' |
73 | return rev.strip() | 73 | return rev.strip().decode("utf-8") |
74 | 74 | ||
75 | def base_get_metadata_git_revision(path, d): | 75 | def base_get_metadata_git_revision(path, d): |
76 | import bb.process | 76 | import bb.process |
@@ -78,5 +78,6 @@ def base_get_metadata_git_revision(path, d): | |||
78 | try: | 78 | try: |
79 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) | 79 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) |
80 | except bb.process.ExecutionError: | 80 | except bb.process.ExecutionError: |
81 | rev = '<unknown>' | 81 | return '<unknown>' |
82 | return rev.strip() | 82 | return rev.strip().decode("utf-8") |
83 | |||
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index ffd4eff7b1..f2df923273 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -63,7 +63,7 @@ def legitimize_package_name(s): | |||
63 | def fixutf(m): | 63 | def fixutf(m): |
64 | cp = m.group(1) | 64 | cp = m.group(1) |
65 | if cp: | 65 | if cp: |
66 | return ('\u%s' % cp).decode('unicode_escape').encode('utf-8') | 66 | return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') |
67 | 67 | ||
68 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | 68 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. |
69 | s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) | 69 | s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) |
@@ -1258,8 +1258,8 @@ python emit_pkgdata() { | |||
1258 | def write_if_exists(f, pkg, var): | 1258 | def write_if_exists(f, pkg, var): |
1259 | def encode(str): | 1259 | def encode(str): |
1260 | import codecs | 1260 | import codecs |
1261 | c = codecs.getencoder("string_escape") | 1261 | c = codecs.getencoder("unicode_escape") |
1262 | return c(str)[0] | 1262 | return c(str)[0].decode("latin1") |
1263 | 1263 | ||
1264 | val = d.getVar('%s_%s' % (var, pkg), True) | 1264 | val = d.getVar('%s_%s' % (var, pkg), True) |
1265 | if val: | 1265 | if val: |
@@ -1503,7 +1503,7 @@ python package_do_shlibs() { | |||
1503 | m = re.match("\s+RPATH\s+([^\s]*)", l) | 1503 | m = re.match("\s+RPATH\s+([^\s]*)", l) |
1504 | if m: | 1504 | if m: |
1505 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") | 1505 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") |
1506 | rpath = map(os.path.normpath, rpaths) | 1506 | rpath = list(map(os.path.normpath, rpaths)) |
1507 | for l in lines: | 1507 | for l in lines: |
1508 | m = re.match("\s+NEEDED\s+([^\s]*)", l) | 1508 | m = re.match("\s+NEEDED\s+([^\s]*)", l) |
1509 | if m: | 1509 | if m: |
@@ -1673,7 +1673,7 @@ python package_do_shlibs() { | |||
1673 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) | 1673 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) |
1674 | continue | 1674 | continue |
1675 | if n[0] in shlib_provider.keys(): | 1675 | if n[0] in shlib_provider.keys(): |
1676 | shlib_provider_path = list() | 1676 | shlib_provider_path = [] |
1677 | for k in shlib_provider[n[0]].keys(): | 1677 | for k in shlib_provider[n[0]].keys(): |
1678 | shlib_provider_path.append(k) | 1678 | shlib_provider_path.append(k) |
1679 | match = None | 1679 | match = None |
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index e1d05a74c2..e35f427ea2 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass | |||
@@ -117,7 +117,7 @@ python do_package_deb () { | |||
117 | 117 | ||
118 | controldir = os.path.join(root, 'DEBIAN') | 118 | controldir = os.path.join(root, 'DEBIAN') |
119 | bb.utils.mkdirhier(controldir) | 119 | bb.utils.mkdirhier(controldir) |
120 | os.chmod(controldir, 0755) | 120 | os.chmod(controldir, 0o755) |
121 | try: | 121 | try: |
122 | import codecs | 122 | import codecs |
123 | ctrlfile = codecs.open(os.path.join(controldir, 'control'), 'w', 'utf-8') | 123 | ctrlfile = codecs.open(os.path.join(controldir, 'control'), 'w', 'utf-8') |
@@ -173,7 +173,7 @@ python do_package_deb () { | |||
173 | # Special behavior for description... | 173 | # Special behavior for description... |
174 | if 'DESCRIPTION' in fs: | 174 | if 'DESCRIPTION' in fs: |
175 | summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." | 175 | summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." |
176 | ctrlfile.write('Description: %s\n' % unicode(summary,'utf-8')) | 176 | ctrlfile.write('Description: %s\n' % summary) |
177 | description = localdata.getVar('DESCRIPTION', True) or "." | 177 | description = localdata.getVar('DESCRIPTION', True) or "." |
178 | description = textwrap.dedent(description).strip() | 178 | description = textwrap.dedent(description).strip() |
179 | if '\\n' in description: | 179 | if '\\n' in description: |
@@ -182,29 +182,25 @@ python do_package_deb () { | |||
182 | # We don't limit the width when manually indent, but we do | 182 | # We don't limit the width when manually indent, but we do |
183 | # need the textwrap.fill() to set the initial_indent and | 183 | # need the textwrap.fill() to set the initial_indent and |
184 | # subsequent_indent, so set a large width | 184 | # subsequent_indent, so set a large width |
185 | ctrlfile.write('%s\n' % unicode(textwrap.fill(t, width=100000, initial_indent=' ', subsequent_indent=' '),'utf-8')) | 185 | ctrlfile.write('%s\n' % textwrap.fill(t, width=100000, initial_indent=' ', subsequent_indent=' ')) |
186 | else: | 186 | else: |
187 | # Auto indent | 187 | # Auto indent |
188 | ctrlfile.write('%s\n' % unicode(textwrap.fill(description.strip(), width=74, initial_indent=' ', subsequent_indent=' '),'utf-8')) | 188 | ctrlfile.write('%s\n' % textwrap.fill(description.strip(), width=74, initial_indent=' ', subsequent_indent=' ')) |
189 | 189 | ||
190 | else: | 190 | else: |
191 | ctrlfile.write(unicode(c % tuple(pullData(fs, localdata)),'utf-8')) | 191 | ctrlfile.write(c % tuple(pullData(fs, localdata))) |
192 | except KeyError: | 192 | except KeyError: |
193 | import sys | 193 | import sys |
194 | (type, value, traceback) = sys.exc_info() | 194 | (type, value, traceback) = sys.exc_info() |
195 | bb.utils.unlockfile(lf) | 195 | bb.utils.unlockfile(lf) |
196 | ctrlfile.close() | 196 | ctrlfile.close() |
197 | raise bb.build.FuncFailed("Missing field for deb generation: %s" % value) | 197 | raise bb.build.FuncFailed("Missing field for deb generation: %s" % value) |
198 | except UnicodeDecodeError: | ||
199 | bb.utils.unlockfile(lf) | ||
200 | ctrlfile.close() | ||
201 | raise bb.build.FuncFailed("Non UTF-8 characters found in one of the fields") | ||
202 | 198 | ||
203 | # more fields | 199 | # more fields |
204 | 200 | ||
205 | custom_fields_chunk = get_package_additional_metadata("deb", localdata) | 201 | custom_fields_chunk = get_package_additional_metadata("deb", localdata) |
206 | if custom_fields_chunk is not None: | 202 | if custom_fields_chunk is not None: |
207 | ctrlfile.write(unicode(custom_fields_chunk)) | 203 | ctrlfile.write(custom_fields_chunk) |
208 | ctrlfile.write("\n") | 204 | ctrlfile.write("\n") |
209 | 205 | ||
210 | mapping_rename_hook(localdata) | 206 | mapping_rename_hook(localdata) |
@@ -255,17 +251,17 @@ python do_package_deb () { | |||
255 | rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") | 251 | rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") |
256 | debian_cmp_remap(rconflicts) | 252 | debian_cmp_remap(rconflicts) |
257 | if rdepends: | 253 | if rdepends: |
258 | ctrlfile.write("Depends: %s\n" % unicode(bb.utils.join_deps(rdepends))) | 254 | ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) |
259 | if rsuggests: | 255 | if rsuggests: |
260 | ctrlfile.write("Suggests: %s\n" % unicode(bb.utils.join_deps(rsuggests))) | 256 | ctrlfile.write("Suggests: %s\n" % bb.utils.join_deps(rsuggests)) |
261 | if rrecommends: | 257 | if rrecommends: |
262 | ctrlfile.write("Recommends: %s\n" % unicode(bb.utils.join_deps(rrecommends))) | 258 | ctrlfile.write("Recommends: %s\n" % bb.utils.join_deps(rrecommends)) |
263 | if rprovides: | 259 | if rprovides: |
264 | ctrlfile.write("Provides: %s\n" % unicode(bb.utils.join_deps(rprovides))) | 260 | ctrlfile.write("Provides: %s\n" % bb.utils.join_deps(rprovides)) |
265 | if rreplaces: | 261 | if rreplaces: |
266 | ctrlfile.write("Replaces: %s\n" % unicode(bb.utils.join_deps(rreplaces))) | 262 | ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) |
267 | if rconflicts: | 263 | if rconflicts: |
268 | ctrlfile.write("Conflicts: %s\n" % unicode(bb.utils.join_deps(rconflicts))) | 264 | ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) |
269 | ctrlfile.close() | 265 | ctrlfile.close() |
270 | 266 | ||
271 | for script in ["preinst", "postinst", "prerm", "postrm"]: | 267 | for script in ["preinst", "postinst", "prerm", "postrm"]: |
@@ -293,7 +289,7 @@ python do_package_deb () { | |||
293 | scriptfile.write(scriptvar[pos:]) | 289 | scriptfile.write(scriptvar[pos:]) |
294 | scriptfile.write('\n') | 290 | scriptfile.write('\n') |
295 | scriptfile.close() | 291 | scriptfile.close() |
296 | os.chmod(os.path.join(controldir, script), 0755) | 292 | os.chmod(os.path.join(controldir, script), 0o755) |
297 | 293 | ||
298 | conffiles_str = ' '.join(get_conffiles(pkg, d)) | 294 | conffiles_str = ' '.join(get_conffiles(pkg, d)) |
299 | if conffiles_str: | 295 | if conffiles_str: |
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index f1ad1d5c17..08f7020a9e 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass | |||
@@ -225,7 +225,7 @@ python do_package_ipk () { | |||
225 | raise bb.build.FuncFailed("unable to open %s script file for writing." % script) | 225 | raise bb.build.FuncFailed("unable to open %s script file for writing." % script) |
226 | scriptfile.write(scriptvar) | 226 | scriptfile.write(scriptvar) |
227 | scriptfile.close() | 227 | scriptfile.close() |
228 | os.chmod(os.path.join(controldir, script), 0755) | 228 | os.chmod(os.path.join(controldir, script), 0o755) |
229 | 229 | ||
230 | conffiles_str = ' '.join(get_conffiles(pkg, d)) | 230 | conffiles_str = ' '.join(get_conffiles(pkg, d)) |
231 | if conffiles_str: | 231 | if conffiles_str: |
diff --git a/meta/classes/report-error.bbclass b/meta/classes/report-error.bbclass index 20d2bef797..5cbf8f07fd 100644 --- a/meta/classes/report-error.bbclass +++ b/meta/classes/report-error.bbclass | |||
@@ -43,7 +43,7 @@ python errorreport_handler () { | |||
43 | data['target_sys'] = e.data.getVar("TARGET_SYS", True) | 43 | data['target_sys'] = e.data.getVar("TARGET_SYS", True) |
44 | data['failures'] = [] | 44 | data['failures'] = [] |
45 | data['component'] = e.getPkgs()[0] | 45 | data['component'] = e.getPkgs()[0] |
46 | data['branch_commit'] = base_detect_branch(e.data) + ": " + base_detect_revision(e.data) | 46 | data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data)) |
47 | lock = bb.utils.lockfile(datafile + '.lock') | 47 | lock = bb.utils.lockfile(datafile + '.lock') |
48 | errorreport_savedata(e, data, "error-report.txt") | 48 | errorreport_savedata(e, data, "error-report.txt") |
49 | bb.utils.unlockfile(lock) | 49 | bb.utils.unlockfile(lock) |
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index ab8ca6e215..6491bad204 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -235,10 +235,11 @@ def sstate_install(ss, d): | |||
235 | bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") | 235 | bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") |
236 | 236 | ||
237 | # Write out the manifest | 237 | # Write out the manifest |
238 | f = open(manifest, "w") | 238 | f = open(manifest, "w", encoding="utf-8", errors="surrogateescape") |
239 | for file in sharedfiles: | 239 | for file in sharedfiles: |
240 | f.write(file + "\n") | 240 | f.write(file + "\n") |
241 | 241 | ||
242 | |||
242 | # We want to ensure that directories appear at the end of the manifest | 243 | # We want to ensure that directories appear at the end of the manifest |
243 | # so that when we test to see if they should be deleted any contents | 244 | # so that when we test to see if they should be deleted any contents |
244 | # added by the task will have been removed first. | 245 | # added by the task will have been removed first. |
@@ -382,7 +383,7 @@ def sstate_clean_cachefiles(d): | |||
382 | def sstate_clean_manifest(manifest, d): | 383 | def sstate_clean_manifest(manifest, d): |
383 | import oe.path | 384 | import oe.path |
384 | 385 | ||
385 | mfile = open(manifest) | 386 | mfile = open(manifest, encoding="utf-8") |
386 | entries = mfile.readlines() | 387 | entries = mfile.readlines() |
387 | mfile.close() | 388 | mfile.close() |
388 | 389 | ||
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass index a0f82be4ab..30f2b507e1 100644 --- a/meta/classes/staging.bbclass +++ b/meta/classes/staging.bbclass | |||
@@ -187,7 +187,7 @@ def sysroot_checkhashes(covered, tasknames, fnids, fns, d, invalidtasks = None): | |||
187 | problems = set() | 187 | problems = set() |
188 | configurefnids = set() | 188 | configurefnids = set() |
189 | if not invalidtasks: | 189 | if not invalidtasks: |
190 | invalidtasks = xrange(len(tasknames)) | 190 | invalidtasks = range(len(tasknames)) |
191 | for task in invalidtasks: | 191 | for task in invalidtasks: |
192 | if tasknames[task] == "do_configure" and task not in covered: | 192 | if tasknames[task] == "do_configure" and task not in covered: |
193 | configurefnids.add(fnids[task]) | 193 | configurefnids.add(fnids[task]) |
diff --git a/meta/classes/terminal.bbclass b/meta/classes/terminal.bbclass index a94f755a40..d1cc4d2c16 100644 --- a/meta/classes/terminal.bbclass +++ b/meta/classes/terminal.bbclass | |||
@@ -73,7 +73,7 @@ def oe_terminal(command, title, d): | |||
73 | # Replace command with an executable wrapper script | 73 | # Replace command with an executable wrapper script |
74 | command = emit_terminal_func(command, envdata, d) | 74 | command = emit_terminal_func(command, envdata, d) |
75 | 75 | ||
76 | terminal = oe.data.typed_value('OE_TERMINAL', d).lower() | 76 | terminal = "auto" #oe.data.typed_value('OE_TERMINAL', d).lower() |
77 | if terminal == 'none': | 77 | if terminal == 'none': |
78 | bb.fatal('Devshell usage disabled with OE_TERMINAL') | 78 | bb.fatal('Devshell usage disabled with OE_TERMINAL') |
79 | elif terminal != 'auto': | 79 | elif terminal != 'auto': |
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass index 1a70f14a92..1878fe095d 100644 --- a/meta/classes/toaster.bbclass +++ b/meta/classes/toaster.bbclass | |||
@@ -33,6 +33,7 @@ python toaster_layerinfo_dumpdata() { | |||
33 | 33 | ||
34 | def _get_git_branch(layer_path): | 34 | def _get_git_branch(layer_path): |
35 | branch = subprocess.Popen("git symbolic-ref HEAD 2>/dev/null ", cwd=layer_path, shell=True, stdout=subprocess.PIPE).communicate()[0] | 35 | branch = subprocess.Popen("git symbolic-ref HEAD 2>/dev/null ", cwd=layer_path, shell=True, stdout=subprocess.PIPE).communicate()[0] |
36 | branch = branch.decode('utf-8') | ||
36 | branch = branch.replace('refs/heads/', '').rstrip() | 37 | branch = branch.replace('refs/heads/', '').rstrip() |
37 | return branch | 38 | return branch |
38 | 39 | ||
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass index 5bcfd0b72c..7bc584abb9 100644 --- a/meta/classes/utility-tasks.bbclass +++ b/meta/classes/utility-tasks.bbclass | |||
@@ -46,7 +46,7 @@ python do_checkuri() { | |||
46 | try: | 46 | try: |
47 | fetcher = bb.fetch2.Fetch(src_uri, d) | 47 | fetcher = bb.fetch2.Fetch(src_uri, d) |
48 | fetcher.checkstatus() | 48 | fetcher.checkstatus() |
49 | except bb.fetch2.BBFetchException, e: | 49 | except bb.fetch2.BBFetchException as e: |
50 | raise bb.build.FuncFailed(e) | 50 | raise bb.build.FuncFailed(e) |
51 | } | 51 | } |
52 | 52 | ||