diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2013-05-09 16:31:22 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-05-16 23:32:39 +0100 |
commit | 1fc840ffc0267ecf3a15c4a59ab44869ef1d6339 (patch) | |
tree | b2932a0d0d7146bde799db47497236063050fc84 /meta | |
parent | 22c8ed6484e0ee9328e3844c9e794f3d89ebb9f7 (diff) | |
download | poky-1fc840ffc0267ecf3a15c4a59ab44869ef1d6339.tar.gz |
meta: python3 megapatch
This needs splutting into smaller units, WIP atm.
(From OE-Core rev: 21529228a7dca96a6a1b44ed9380c523efdeeb3e)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta')
66 files changed, 304 insertions, 275 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index e5c58b1e20..4b6aab4562 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -10,7 +10,7 @@ inherit utility-tasks | |||
10 | inherit metadata_scm | 10 | inherit metadata_scm |
11 | inherit logging | 11 | inherit logging |
12 | 12 | ||
13 | OE_IMPORTS += "os sys time oe.path oe.utils oe.data oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath" | 13 | OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath" |
14 | OE_IMPORTS[type] = "list" | 14 | OE_IMPORTS[type] = "list" |
15 | 15 | ||
16 | def oe_import(d): | 16 | def oe_import(d): |
@@ -658,7 +658,7 @@ python do_cleanall() { | |||
658 | try: | 658 | try: |
659 | fetcher = bb.fetch2.Fetch(src_uri, d) | 659 | fetcher = bb.fetch2.Fetch(src_uri, d) |
660 | fetcher.clean() | 660 | fetcher.clean() |
661 | except bb.fetch2.BBFetchException, e: | 661 | except bb.fetch2.BBFetchException as e: |
662 | raise bb.build.FuncFailed(e) | 662 | raise bb.build.FuncFailed(e) |
663 | } | 663 | } |
664 | do_cleanall[nostamp] = "1" | 664 | do_cleanall[nostamp] = "1" |
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index 581d532693..3db92d4de7 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass | |||
@@ -64,6 +64,11 @@ python buildhistory_emit_pkghistory() { | |||
64 | if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: | 64 | if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: |
65 | return 0 | 65 | return 0 |
66 | 66 | ||
67 | import re | ||
68 | import sys | ||
69 | if sys.version > '3': | ||
70 | long = int | ||
71 | |||
67 | if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): | 72 | if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): |
68 | return 0 | 73 | return 0 |
69 | 74 | ||
@@ -233,7 +238,7 @@ python buildhistory_emit_pkghistory() { | |||
233 | key = item[0] | 238 | key = item[0] |
234 | if key.endswith('_' + pkg): | 239 | if key.endswith('_' + pkg): |
235 | key = key[:-len(pkg)-1] | 240 | key = key[:-len(pkg)-1] |
236 | pkgdata[key] = item[1].decode('utf-8').decode('string_escape') | 241 | pkgdata[key] = item[1] |
237 | 242 | ||
238 | pkge = pkgdata.get('PKGE', '0') | 243 | pkge = pkgdata.get('PKGE', '0') |
239 | pkgv = pkgdata['PKGV'] | 244 | pkgv = pkgdata['PKGV'] |
@@ -274,7 +279,7 @@ python buildhistory_emit_pkghistory() { | |||
274 | # Gather information about packaged files | 279 | # Gather information about packaged files |
275 | val = pkgdata.get('FILES_INFO', '') | 280 | val = pkgdata.get('FILES_INFO', '') |
276 | dictval = json.loads(val) | 281 | dictval = json.loads(val) |
277 | filelist = dictval.keys() | 282 | filelist = list(dictval.keys()) |
278 | filelist.sort() | 283 | filelist.sort() |
279 | pkginfo.filelist = " ".join(filelist) | 284 | pkginfo.filelist = " ".join(filelist) |
280 | 285 | ||
@@ -288,14 +293,12 @@ python buildhistory_emit_pkghistory() { | |||
288 | 293 | ||
289 | 294 | ||
290 | def write_recipehistory(rcpinfo, d): | 295 | def write_recipehistory(rcpinfo, d): |
291 | import codecs | ||
292 | |||
293 | bb.debug(2, "Writing recipe history") | 296 | bb.debug(2, "Writing recipe history") |
294 | 297 | ||
295 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 298 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) |
296 | 299 | ||
297 | infofile = os.path.join(pkghistdir, "latest") | 300 | infofile = os.path.join(pkghistdir, "latest") |
298 | with codecs.open(infofile, "w", encoding='utf8') as f: | 301 | with open(infofile, "w") as f: |
299 | if rcpinfo.pe != "0": | 302 | if rcpinfo.pe != "0": |
300 | f.write(u"PE = %s\n" % rcpinfo.pe) | 303 | f.write(u"PE = %s\n" % rcpinfo.pe) |
301 | f.write(u"PV = %s\n" % rcpinfo.pv) | 304 | f.write(u"PV = %s\n" % rcpinfo.pv) |
@@ -305,8 +308,6 @@ def write_recipehistory(rcpinfo, d): | |||
305 | 308 | ||
306 | 309 | ||
307 | def write_pkghistory(pkginfo, d): | 310 | def write_pkghistory(pkginfo, d): |
308 | import codecs | ||
309 | |||
310 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) | 311 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) |
311 | 312 | ||
312 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 313 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) |
@@ -316,7 +317,7 @@ def write_pkghistory(pkginfo, d): | |||
316 | bb.utils.mkdirhier(pkgpath) | 317 | bb.utils.mkdirhier(pkgpath) |
317 | 318 | ||
318 | infofile = os.path.join(pkgpath, "latest") | 319 | infofile = os.path.join(pkgpath, "latest") |
319 | with codecs.open(infofile, "w", encoding='utf8') as f: | 320 | with open(infofile, "w") as f: |
320 | if pkginfo.pe != "0": | 321 | if pkginfo.pe != "0": |
321 | f.write(u"PE = %s\n" % pkginfo.pe) | 322 | f.write(u"PE = %s\n" % pkginfo.pe) |
322 | f.write(u"PV = %s\n" % pkginfo.pv) | 323 | f.write(u"PV = %s\n" % pkginfo.pv) |
@@ -349,7 +350,7 @@ def write_pkghistory(pkginfo, d): | |||
349 | filevarpath = os.path.join(pkgpath, "latest.%s" % filevar) | 350 | filevarpath = os.path.join(pkgpath, "latest.%s" % filevar) |
350 | val = pkginfo.filevars[filevar] | 351 | val = pkginfo.filevars[filevar] |
351 | if val: | 352 | if val: |
352 | with codecs.open(filevarpath, "w", encoding='utf8') as f: | 353 | with open(filevarpath, "w") as f: |
353 | f.write(val) | 354 | f.write(val) |
354 | else: | 355 | else: |
355 | if os.path.exists(filevarpath): | 356 | if os.path.exists(filevarpath): |
@@ -842,7 +843,7 @@ python write_srcrev() { | |||
842 | f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev)) | 843 | f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev)) |
843 | f.write('SRCREV_%s = "%s"\n' % (name, srcrev)) | 844 | f.write('SRCREV_%s = "%s"\n' % (name, srcrev)) |
844 | else: | 845 | else: |
845 | f.write('SRCREV = "%s"\n' % srcrevs.itervalues().next()) | 846 | f.write('SRCREV = "%s"\n' % srcrevs.values()) |
846 | if len(tag_srcrevs) > 0: | 847 | if len(tag_srcrevs) > 0: |
847 | for name, srcrev in tag_srcrevs.items(): | 848 | for name, srcrev in tag_srcrevs.items(): |
848 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) | 849 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) |
diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass index 9c68855ab2..0182062adf 100644 --- a/meta/classes/chrpath.bbclass +++ b/meta/classes/chrpath.bbclass | |||
@@ -10,6 +10,8 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): | |||
10 | if p.returncode != 0: | 10 | if p.returncode != 0: |
11 | return | 11 | return |
12 | 12 | ||
13 | err = err.decode('ascii') | ||
14 | |||
13 | # Handle RUNPATH as well as RPATH | 15 | # Handle RUNPATH as well as RPATH |
14 | err = err.replace("RUNPATH=","RPATH=") | 16 | err = err.replace("RUNPATH=","RPATH=") |
15 | # Throw away everything other than the rpath list | 17 | # Throw away everything other than the rpath list |
@@ -103,7 +105,6 @@ def process_dir (rootdir, directory, d): | |||
103 | # Temporarily make the file writeable so we can chrpath it | 105 | # Temporarily make the file writeable so we can chrpath it |
104 | os.chmod(fpath, perms|stat.S_IRWXU) | 106 | os.chmod(fpath, perms|stat.S_IRWXU) |
105 | process_file(cmd, fpath, rootdir, baseprefix, tmpdir, d) | 107 | process_file(cmd, fpath, rootdir, baseprefix, tmpdir, d) |
106 | |||
107 | if perms: | 108 | if perms: |
108 | os.chmod(fpath, perms) | 109 | os.chmod(fpath, perms) |
109 | 110 | ||
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 0b151c214d..d4b9825c93 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
@@ -177,9 +177,8 @@ def package_qa_write_error(type, error, d): | |||
177 | logfile = d.getVar('QA_LOGFILE', True) | 177 | logfile = d.getVar('QA_LOGFILE', True) |
178 | if logfile: | 178 | if logfile: |
179 | p = d.getVar('P', True) | 179 | p = d.getVar('P', True) |
180 | f = file( logfile, "a+") | 180 | with open(logfile, "a+") as f: |
181 | print >> f, "%s: %s [%s]" % (p, error, type) | 181 | f.write("%s: %s [%s]" % (p, error, type)) |
182 | f.close() | ||
183 | 182 | ||
184 | def package_qa_handle_error(error_class, error_msg, d): | 183 | def package_qa_handle_error(error_class, error_msg, d): |
185 | package_qa_write_error(error_class, error_msg, d) | 184 | package_qa_write_error(error_class, error_msg, d) |
@@ -987,12 +986,12 @@ def package_qa_check_expanded_d(path,name,d,elf,messages): | |||
987 | return sane | 986 | return sane |
988 | 987 | ||
989 | def package_qa_check_encoding(keys, encode, d): | 988 | def package_qa_check_encoding(keys, encode, d): |
990 | def check_encoding(key,enc): | 989 | def check_encoding(key, enc): |
991 | sane = True | 990 | sane = True |
992 | value = d.getVar(key, True) | 991 | value = d.getVar(key, True) |
993 | if value: | 992 | if value: |
994 | try: | 993 | try: |
995 | s = unicode(value, enc) | 994 | s = value.encode(enc) |
996 | except UnicodeDecodeError as e: | 995 | except UnicodeDecodeError as e: |
997 | error_msg = "%s has non %s characters" % (key,enc) | 996 | error_msg = "%s has non %s characters" % (key,enc) |
998 | sane = False | 997 | sane = False |
@@ -1218,7 +1217,7 @@ Missing inherit gettext?""" % (gt, config)) | |||
1218 | try: | 1217 | try: |
1219 | flag = "WARNING: unrecognized options:" | 1218 | flag = "WARNING: unrecognized options:" |
1220 | log = os.path.join(d.getVar('B', True), 'config.log') | 1219 | log = os.path.join(d.getVar('B', True), 'config.log') |
1221 | output = subprocess.check_output(['grep', '-F', flag, log]).replace(', ', ' ') | 1220 | output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ') |
1222 | options = set() | 1221 | options = set() |
1223 | for line in output.splitlines(): | 1222 | for line in output.splitlines(): |
1224 | options |= set(line.partition(flag)[2].split()) | 1223 | options |= set(line.partition(flag)[2].split()) |
diff --git a/meta/classes/kernel-uimage.bbclass b/meta/classes/kernel-uimage.bbclass index de7ca38e94..543f5432ba 100644 --- a/meta/classes/kernel-uimage.bbclass +++ b/meta/classes/kernel-uimage.bbclass | |||
@@ -7,12 +7,12 @@ python __anonymous () { | |||
7 | depends = "%s u-boot-mkimage-native" % depends | 7 | depends = "%s u-boot-mkimage-native" % depends |
8 | d.setVar("DEPENDS", depends) | 8 | d.setVar("DEPENDS", depends) |
9 | 9 | ||
10 | # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal | 10 | # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal |
11 | # to kernel.bbclass . We override the variable here, since we need | 11 | # to kernel.bbclass . We override the variable here, since we need |
12 | # to build uImage using the kernel build system if and only if | 12 | # to build uImage using the kernel build system if and only if |
13 | # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into | 13 | # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into |
14 | # the uImage . | 14 | # the uImage . |
15 | if d.getVar("KEEPUIMAGE", True) != 'yes': | 15 | if d.getVar("KEEPUIMAGE", True) != 'yes': |
16 | d.setVar("KERNEL_IMAGETYPE_FOR_MAKE", "vmlinux") | 16 | d.setVar("KERNEL_IMAGETYPE_FOR_MAKE", "vmlinux") |
17 | } | 17 | } |
18 | 18 | ||
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass index 467d567923..70f479bb41 100644 --- a/meta/classes/libc-package.bbclass +++ b/meta/classes/libc-package.bbclass | |||
@@ -150,6 +150,7 @@ python package_do_split_gconvs () { | |||
150 | c_re = re.compile('^copy "(.*)"') | 150 | c_re = re.compile('^copy "(.*)"') |
151 | i_re = re.compile('^include "(\w+)".*') | 151 | i_re = re.compile('^include "(\w+)".*') |
152 | for l in f.readlines(): | 152 | for l in f.readlines(): |
153 | l = l.decode("latin-1") | ||
153 | m = c_re.match(l) or i_re.match(l) | 154 | m = c_re.match(l) or i_re.match(l) |
154 | if m: | 155 | if m: |
155 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) | 156 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) |
@@ -171,6 +172,7 @@ python package_do_split_gconvs () { | |||
171 | c_re = re.compile('^copy "(.*)"') | 172 | c_re = re.compile('^copy "(.*)"') |
172 | i_re = re.compile('^include "(\w+)".*') | 173 | i_re = re.compile('^include "(\w+)".*') |
173 | for l in f.readlines(): | 174 | for l in f.readlines(): |
175 | l = l.decode("latin-1") | ||
174 | m = c_re.match(l) or i_re.match(l) | 176 | m = c_re.match(l) or i_re.match(l) |
175 | if m: | 177 | if m: |
176 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) | 178 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) |
@@ -191,6 +193,7 @@ python package_do_split_gconvs () { | |||
191 | c_re = re.compile('^copy "(.*)"') | 193 | c_re = re.compile('^copy "(.*)"') |
192 | i_re = re.compile('^include "(\w+)".*') | 194 | i_re = re.compile('^include "(\w+)".*') |
193 | for l in f.readlines(): | 195 | for l in f.readlines(): |
196 | l = l.decode("latin-1") | ||
194 | m = c_re.match(l) or i_re.match(l) | 197 | m = c_re.match(l) or i_re.match(l) |
195 | if m: | 198 | if m: |
196 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) | 199 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) |
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass index 69335d650d..10d6ed853a 100644 --- a/meta/classes/license.bbclass +++ b/meta/classes/license.bbclass | |||
@@ -200,7 +200,7 @@ def get_deployed_dependencies(d): | |||
200 | # it might contain the bootloader. | 200 | # it might contain the bootloader. |
201 | taskdata = d.getVar("BB_TASKDEPDATA", False) | 201 | taskdata = d.getVar("BB_TASKDEPDATA", False) |
202 | depends = list(set([dep[0] for dep | 202 | depends = list(set([dep[0] for dep |
203 | in taskdata.itervalues() | 203 | in list(taskdata.values()) |
204 | if not dep[0].endswith("-native")])) | 204 | if not dep[0].endswith("-native")])) |
205 | extra_depends = d.getVar("EXTRA_IMAGEDEPENDS", True) | 205 | extra_depends = d.getVar("EXTRA_IMAGEDEPENDS", True) |
206 | boot_depends = get_boot_dependencies(d) | 206 | boot_depends = get_boot_dependencies(d) |
@@ -261,7 +261,7 @@ def get_boot_dependencies(d): | |||
261 | depends.append(dep) | 261 | depends.append(dep) |
262 | # We need to search for the provider of the dependency | 262 | # We need to search for the provider of the dependency |
263 | else: | 263 | else: |
264 | for taskdep in taskdepdata.itervalues(): | 264 | for taskdep in taskdepdata.values(): |
265 | # The fifth field contains what the task provides | 265 | # The fifth field contains what the task provides |
266 | if dep in taskdep[4]: | 266 | if dep in taskdep[4]: |
267 | info_file = os.path.join( | 267 | info_file = os.path.join( |
@@ -635,7 +635,7 @@ def check_license_format(d): | |||
635 | licenses = d.getVar('LICENSE', True) | 635 | licenses = d.getVar('LICENSE', True) |
636 | from oe.license import license_operator, license_operator_chars, license_pattern | 636 | from oe.license import license_operator, license_operator_chars, license_pattern |
637 | 637 | ||
638 | elements = filter(lambda x: x.strip(), license_operator.split(licenses)) | 638 | elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) |
639 | for pos, element in enumerate(elements): | 639 | for pos, element in enumerate(elements): |
640 | if license_pattern.match(element): | 640 | if license_pattern.match(element): |
641 | if pos > 0 and license_pattern.match(elements[pos - 1]): | 641 | if pos > 0 and license_pattern.match(elements[pos - 1]): |
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass index 2e6fac209a..257df3c99a 100644 --- a/meta/classes/metadata_scm.bbclass +++ b/meta/classes/metadata_scm.bbclass | |||
@@ -69,8 +69,8 @@ def base_get_metadata_git_branch(path, d): | |||
69 | try: | 69 | try: |
70 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) | 70 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) |
71 | except bb.process.ExecutionError: | 71 | except bb.process.ExecutionError: |
72 | rev = '<unknown>' | 72 | return '<unknown>' |
73 | return rev.strip() | 73 | return rev.strip().decode("utf-8") |
74 | 74 | ||
75 | def base_get_metadata_git_revision(path, d): | 75 | def base_get_metadata_git_revision(path, d): |
76 | import bb.process | 76 | import bb.process |
@@ -78,5 +78,6 @@ def base_get_metadata_git_revision(path, d): | |||
78 | try: | 78 | try: |
79 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) | 79 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) |
80 | except bb.process.ExecutionError: | 80 | except bb.process.ExecutionError: |
81 | rev = '<unknown>' | 81 | return '<unknown>' |
82 | return rev.strip() | 82 | return rev.strip().decode("utf-8") |
83 | |||
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index ffd4eff7b1..f2df923273 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -63,7 +63,7 @@ def legitimize_package_name(s): | |||
63 | def fixutf(m): | 63 | def fixutf(m): |
64 | cp = m.group(1) | 64 | cp = m.group(1) |
65 | if cp: | 65 | if cp: |
66 | return ('\u%s' % cp).decode('unicode_escape').encode('utf-8') | 66 | return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') |
67 | 67 | ||
68 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | 68 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. |
69 | s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) | 69 | s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) |
@@ -1258,8 +1258,8 @@ python emit_pkgdata() { | |||
1258 | def write_if_exists(f, pkg, var): | 1258 | def write_if_exists(f, pkg, var): |
1259 | def encode(str): | 1259 | def encode(str): |
1260 | import codecs | 1260 | import codecs |
1261 | c = codecs.getencoder("string_escape") | 1261 | c = codecs.getencoder("unicode_escape") |
1262 | return c(str)[0] | 1262 | return c(str)[0].decode("latin1") |
1263 | 1263 | ||
1264 | val = d.getVar('%s_%s' % (var, pkg), True) | 1264 | val = d.getVar('%s_%s' % (var, pkg), True) |
1265 | if val: | 1265 | if val: |
@@ -1503,7 +1503,7 @@ python package_do_shlibs() { | |||
1503 | m = re.match("\s+RPATH\s+([^\s]*)", l) | 1503 | m = re.match("\s+RPATH\s+([^\s]*)", l) |
1504 | if m: | 1504 | if m: |
1505 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") | 1505 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") |
1506 | rpath = map(os.path.normpath, rpaths) | 1506 | rpath = list(map(os.path.normpath, rpaths)) |
1507 | for l in lines: | 1507 | for l in lines: |
1508 | m = re.match("\s+NEEDED\s+([^\s]*)", l) | 1508 | m = re.match("\s+NEEDED\s+([^\s]*)", l) |
1509 | if m: | 1509 | if m: |
@@ -1673,7 +1673,7 @@ python package_do_shlibs() { | |||
1673 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) | 1673 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) |
1674 | continue | 1674 | continue |
1675 | if n[0] in shlib_provider.keys(): | 1675 | if n[0] in shlib_provider.keys(): |
1676 | shlib_provider_path = list() | 1676 | shlib_provider_path = [] |
1677 | for k in shlib_provider[n[0]].keys(): | 1677 | for k in shlib_provider[n[0]].keys(): |
1678 | shlib_provider_path.append(k) | 1678 | shlib_provider_path.append(k) |
1679 | match = None | 1679 | match = None |
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index e1d05a74c2..e35f427ea2 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass | |||
@@ -117,7 +117,7 @@ python do_package_deb () { | |||
117 | 117 | ||
118 | controldir = os.path.join(root, 'DEBIAN') | 118 | controldir = os.path.join(root, 'DEBIAN') |
119 | bb.utils.mkdirhier(controldir) | 119 | bb.utils.mkdirhier(controldir) |
120 | os.chmod(controldir, 0755) | 120 | os.chmod(controldir, 0o755) |
121 | try: | 121 | try: |
122 | import codecs | 122 | import codecs |
123 | ctrlfile = codecs.open(os.path.join(controldir, 'control'), 'w', 'utf-8') | 123 | ctrlfile = codecs.open(os.path.join(controldir, 'control'), 'w', 'utf-8') |
@@ -173,7 +173,7 @@ python do_package_deb () { | |||
173 | # Special behavior for description... | 173 | # Special behavior for description... |
174 | if 'DESCRIPTION' in fs: | 174 | if 'DESCRIPTION' in fs: |
175 | summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." | 175 | summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." |
176 | ctrlfile.write('Description: %s\n' % unicode(summary,'utf-8')) | 176 | ctrlfile.write('Description: %s\n' % summary) |
177 | description = localdata.getVar('DESCRIPTION', True) or "." | 177 | description = localdata.getVar('DESCRIPTION', True) or "." |
178 | description = textwrap.dedent(description).strip() | 178 | description = textwrap.dedent(description).strip() |
179 | if '\\n' in description: | 179 | if '\\n' in description: |
@@ -182,29 +182,25 @@ python do_package_deb () { | |||
182 | # We don't limit the width when manually indent, but we do | 182 | # We don't limit the width when manually indent, but we do |
183 | # need the textwrap.fill() to set the initial_indent and | 183 | # need the textwrap.fill() to set the initial_indent and |
184 | # subsequent_indent, so set a large width | 184 | # subsequent_indent, so set a large width |
185 | ctrlfile.write('%s\n' % unicode(textwrap.fill(t, width=100000, initial_indent=' ', subsequent_indent=' '),'utf-8')) | 185 | ctrlfile.write('%s\n' % textwrap.fill(t, width=100000, initial_indent=' ', subsequent_indent=' ')) |
186 | else: | 186 | else: |
187 | # Auto indent | 187 | # Auto indent |
188 | ctrlfile.write('%s\n' % unicode(textwrap.fill(description.strip(), width=74, initial_indent=' ', subsequent_indent=' '),'utf-8')) | 188 | ctrlfile.write('%s\n' % textwrap.fill(description.strip(), width=74, initial_indent=' ', subsequent_indent=' ')) |
189 | 189 | ||
190 | else: | 190 | else: |
191 | ctrlfile.write(unicode(c % tuple(pullData(fs, localdata)),'utf-8')) | 191 | ctrlfile.write(c % tuple(pullData(fs, localdata))) |
192 | except KeyError: | 192 | except KeyError: |
193 | import sys | 193 | import sys |
194 | (type, value, traceback) = sys.exc_info() | 194 | (type, value, traceback) = sys.exc_info() |
195 | bb.utils.unlockfile(lf) | 195 | bb.utils.unlockfile(lf) |
196 | ctrlfile.close() | 196 | ctrlfile.close() |
197 | raise bb.build.FuncFailed("Missing field for deb generation: %s" % value) | 197 | raise bb.build.FuncFailed("Missing field for deb generation: %s" % value) |
198 | except UnicodeDecodeError: | ||
199 | bb.utils.unlockfile(lf) | ||
200 | ctrlfile.close() | ||
201 | raise bb.build.FuncFailed("Non UTF-8 characters found in one of the fields") | ||
202 | 198 | ||
203 | # more fields | 199 | # more fields |
204 | 200 | ||
205 | custom_fields_chunk = get_package_additional_metadata("deb", localdata) | 201 | custom_fields_chunk = get_package_additional_metadata("deb", localdata) |
206 | if custom_fields_chunk is not None: | 202 | if custom_fields_chunk is not None: |
207 | ctrlfile.write(unicode(custom_fields_chunk)) | 203 | ctrlfile.write(custom_fields_chunk) |
208 | ctrlfile.write("\n") | 204 | ctrlfile.write("\n") |
209 | 205 | ||
210 | mapping_rename_hook(localdata) | 206 | mapping_rename_hook(localdata) |
@@ -255,17 +251,17 @@ python do_package_deb () { | |||
255 | rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") | 251 | rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") |
256 | debian_cmp_remap(rconflicts) | 252 | debian_cmp_remap(rconflicts) |
257 | if rdepends: | 253 | if rdepends: |
258 | ctrlfile.write("Depends: %s\n" % unicode(bb.utils.join_deps(rdepends))) | 254 | ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) |
259 | if rsuggests: | 255 | if rsuggests: |
260 | ctrlfile.write("Suggests: %s\n" % unicode(bb.utils.join_deps(rsuggests))) | 256 | ctrlfile.write("Suggests: %s\n" % bb.utils.join_deps(rsuggests)) |
261 | if rrecommends: | 257 | if rrecommends: |
262 | ctrlfile.write("Recommends: %s\n" % unicode(bb.utils.join_deps(rrecommends))) | 258 | ctrlfile.write("Recommends: %s\n" % bb.utils.join_deps(rrecommends)) |
263 | if rprovides: | 259 | if rprovides: |
264 | ctrlfile.write("Provides: %s\n" % unicode(bb.utils.join_deps(rprovides))) | 260 | ctrlfile.write("Provides: %s\n" % bb.utils.join_deps(rprovides)) |
265 | if rreplaces: | 261 | if rreplaces: |
266 | ctrlfile.write("Replaces: %s\n" % unicode(bb.utils.join_deps(rreplaces))) | 262 | ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) |
267 | if rconflicts: | 263 | if rconflicts: |
268 | ctrlfile.write("Conflicts: %s\n" % unicode(bb.utils.join_deps(rconflicts))) | 264 | ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) |
269 | ctrlfile.close() | 265 | ctrlfile.close() |
270 | 266 | ||
271 | for script in ["preinst", "postinst", "prerm", "postrm"]: | 267 | for script in ["preinst", "postinst", "prerm", "postrm"]: |
@@ -293,7 +289,7 @@ python do_package_deb () { | |||
293 | scriptfile.write(scriptvar[pos:]) | 289 | scriptfile.write(scriptvar[pos:]) |
294 | scriptfile.write('\n') | 290 | scriptfile.write('\n') |
295 | scriptfile.close() | 291 | scriptfile.close() |
296 | os.chmod(os.path.join(controldir, script), 0755) | 292 | os.chmod(os.path.join(controldir, script), 0o755) |
297 | 293 | ||
298 | conffiles_str = ' '.join(get_conffiles(pkg, d)) | 294 | conffiles_str = ' '.join(get_conffiles(pkg, d)) |
299 | if conffiles_str: | 295 | if conffiles_str: |
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index f1ad1d5c17..08f7020a9e 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass | |||
@@ -225,7 +225,7 @@ python do_package_ipk () { | |||
225 | raise bb.build.FuncFailed("unable to open %s script file for writing." % script) | 225 | raise bb.build.FuncFailed("unable to open %s script file for writing." % script) |
226 | scriptfile.write(scriptvar) | 226 | scriptfile.write(scriptvar) |
227 | scriptfile.close() | 227 | scriptfile.close() |
228 | os.chmod(os.path.join(controldir, script), 0755) | 228 | os.chmod(os.path.join(controldir, script), 0o755) |
229 | 229 | ||
230 | conffiles_str = ' '.join(get_conffiles(pkg, d)) | 230 | conffiles_str = ' '.join(get_conffiles(pkg, d)) |
231 | if conffiles_str: | 231 | if conffiles_str: |
diff --git a/meta/classes/report-error.bbclass b/meta/classes/report-error.bbclass index 20d2bef797..5cbf8f07fd 100644 --- a/meta/classes/report-error.bbclass +++ b/meta/classes/report-error.bbclass | |||
@@ -43,7 +43,7 @@ python errorreport_handler () { | |||
43 | data['target_sys'] = e.data.getVar("TARGET_SYS", True) | 43 | data['target_sys'] = e.data.getVar("TARGET_SYS", True) |
44 | data['failures'] = [] | 44 | data['failures'] = [] |
45 | data['component'] = e.getPkgs()[0] | 45 | data['component'] = e.getPkgs()[0] |
46 | data['branch_commit'] = base_detect_branch(e.data) + ": " + base_detect_revision(e.data) | 46 | data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data)) |
47 | lock = bb.utils.lockfile(datafile + '.lock') | 47 | lock = bb.utils.lockfile(datafile + '.lock') |
48 | errorreport_savedata(e, data, "error-report.txt") | 48 | errorreport_savedata(e, data, "error-report.txt") |
49 | bb.utils.unlockfile(lock) | 49 | bb.utils.unlockfile(lock) |
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index ab8ca6e215..6491bad204 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -235,10 +235,11 @@ def sstate_install(ss, d): | |||
235 | bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") | 235 | bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") |
236 | 236 | ||
237 | # Write out the manifest | 237 | # Write out the manifest |
238 | f = open(manifest, "w") | 238 | f = open(manifest, "w", encoding="utf-8", errors="surrogateescape") |
239 | for file in sharedfiles: | 239 | for file in sharedfiles: |
240 | f.write(file + "\n") | 240 | f.write(file + "\n") |
241 | 241 | ||
242 | |||
242 | # We want to ensure that directories appear at the end of the manifest | 243 | # We want to ensure that directories appear at the end of the manifest |
243 | # so that when we test to see if they should be deleted any contents | 244 | # so that when we test to see if they should be deleted any contents |
244 | # added by the task will have been removed first. | 245 | # added by the task will have been removed first. |
@@ -382,7 +383,7 @@ def sstate_clean_cachefiles(d): | |||
382 | def sstate_clean_manifest(manifest, d): | 383 | def sstate_clean_manifest(manifest, d): |
383 | import oe.path | 384 | import oe.path |
384 | 385 | ||
385 | mfile = open(manifest) | 386 | mfile = open(manifest, encoding="utf-8") |
386 | entries = mfile.readlines() | 387 | entries = mfile.readlines() |
387 | mfile.close() | 388 | mfile.close() |
388 | 389 | ||
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass index a0f82be4ab..30f2b507e1 100644 --- a/meta/classes/staging.bbclass +++ b/meta/classes/staging.bbclass | |||
@@ -187,7 +187,7 @@ def sysroot_checkhashes(covered, tasknames, fnids, fns, d, invalidtasks = None): | |||
187 | problems = set() | 187 | problems = set() |
188 | configurefnids = set() | 188 | configurefnids = set() |
189 | if not invalidtasks: | 189 | if not invalidtasks: |
190 | invalidtasks = xrange(len(tasknames)) | 190 | invalidtasks = range(len(tasknames)) |
191 | for task in invalidtasks: | 191 | for task in invalidtasks: |
192 | if tasknames[task] == "do_configure" and task not in covered: | 192 | if tasknames[task] == "do_configure" and task not in covered: |
193 | configurefnids.add(fnids[task]) | 193 | configurefnids.add(fnids[task]) |
diff --git a/meta/classes/terminal.bbclass b/meta/classes/terminal.bbclass index a94f755a40..d1cc4d2c16 100644 --- a/meta/classes/terminal.bbclass +++ b/meta/classes/terminal.bbclass | |||
@@ -73,7 +73,7 @@ def oe_terminal(command, title, d): | |||
73 | # Replace command with an executable wrapper script | 73 | # Replace command with an executable wrapper script |
74 | command = emit_terminal_func(command, envdata, d) | 74 | command = emit_terminal_func(command, envdata, d) |
75 | 75 | ||
76 | terminal = oe.data.typed_value('OE_TERMINAL', d).lower() | 76 | terminal = "auto" #oe.data.typed_value('OE_TERMINAL', d).lower() |
77 | if terminal == 'none': | 77 | if terminal == 'none': |
78 | bb.fatal('Devshell usage disabled with OE_TERMINAL') | 78 | bb.fatal('Devshell usage disabled with OE_TERMINAL') |
79 | elif terminal != 'auto': | 79 | elif terminal != 'auto': |
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass index 1a70f14a92..1878fe095d 100644 --- a/meta/classes/toaster.bbclass +++ b/meta/classes/toaster.bbclass | |||
@@ -33,6 +33,7 @@ python toaster_layerinfo_dumpdata() { | |||
33 | 33 | ||
34 | def _get_git_branch(layer_path): | 34 | def _get_git_branch(layer_path): |
35 | branch = subprocess.Popen("git symbolic-ref HEAD 2>/dev/null ", cwd=layer_path, shell=True, stdout=subprocess.PIPE).communicate()[0] | 35 | branch = subprocess.Popen("git symbolic-ref HEAD 2>/dev/null ", cwd=layer_path, shell=True, stdout=subprocess.PIPE).communicate()[0] |
36 | branch = branch.decode('utf-8') | ||
36 | branch = branch.replace('refs/heads/', '').rstrip() | 37 | branch = branch.replace('refs/heads/', '').rstrip() |
37 | return branch | 38 | return branch |
38 | 39 | ||
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass index 5bcfd0b72c..7bc584abb9 100644 --- a/meta/classes/utility-tasks.bbclass +++ b/meta/classes/utility-tasks.bbclass | |||
@@ -46,7 +46,7 @@ python do_checkuri() { | |||
46 | try: | 46 | try: |
47 | fetcher = bb.fetch2.Fetch(src_uri, d) | 47 | fetcher = bb.fetch2.Fetch(src_uri, d) |
48 | fetcher.checkstatus() | 48 | fetcher.checkstatus() |
49 | except bb.fetch2.BBFetchException, e: | 49 | except bb.fetch2.BBFetchException as e: |
50 | raise bb.build.FuncFailed(e) | 50 | raise bb.build.FuncFailed(e) |
51 | } | 51 | } |
52 | 52 | ||
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf index 382dadca6d..f5675577fa 100644 --- a/meta/conf/bitbake.conf +++ b/meta/conf/bitbake.conf | |||
@@ -484,7 +484,7 @@ export BUILD_NM = "${BUILD_PREFIX}nm" | |||
484 | export MAKE = "make" | 484 | export MAKE = "make" |
485 | EXTRA_OEMAKE = "" | 485 | EXTRA_OEMAKE = "" |
486 | EXTRA_OECONF = "" | 486 | EXTRA_OECONF = "" |
487 | export LC_ALL = "C" | 487 | export LC_ALL = "us_EN.UTF-8" |
488 | 488 | ||
489 | ################################################################## | 489 | ################################################################## |
490 | # Patch handling. | 490 | # Patch handling. |
diff --git a/meta/lib/oe/buildhistory_analysis.py b/meta/lib/oe/buildhistory_analysis.py index 5395c768a3..0dcd49f4f1 100644 --- a/meta/lib/oe/buildhistory_analysis.py +++ b/meta/lib/oe/buildhistory_analysis.py | |||
@@ -62,7 +62,7 @@ class ChangeRecord: | |||
62 | 62 | ||
63 | def pkglist_combine(depver): | 63 | def pkglist_combine(depver): |
64 | pkglist = [] | 64 | pkglist = [] |
65 | for k,v in depver.iteritems(): | 65 | for k,v in depver.items(): |
66 | if v: | 66 | if v: |
67 | pkglist.append("%s (%s)" % (k,v)) | 67 | pkglist.append("%s (%s)" % (k,v)) |
68 | else: | 68 | else: |
@@ -220,7 +220,7 @@ def compare_file_lists(alines, blines): | |||
220 | adict = file_list_to_dict(alines) | 220 | adict = file_list_to_dict(alines) |
221 | bdict = file_list_to_dict(blines) | 221 | bdict = file_list_to_dict(blines) |
222 | filechanges = [] | 222 | filechanges = [] |
223 | for path, splitv in adict.iteritems(): | 223 | for path, splitv in adict.items(): |
224 | newsplitv = bdict.pop(path, None) | 224 | newsplitv = bdict.pop(path, None) |
225 | if newsplitv: | 225 | if newsplitv: |
226 | # Check type | 226 | # Check type |
diff --git a/meta/lib/oe/classutils.py b/meta/lib/oe/classutils.py index 58188fdd6e..e7856c86f2 100644 --- a/meta/lib/oe/classutils.py +++ b/meta/lib/oe/classutils.py | |||
@@ -1,4 +1,11 @@ | |||
1 | class ClassRegistry(type): | 1 | |
2 | class ClassRegistryMeta(type): | ||
3 | """Give each ClassRegistry their own registry""" | ||
4 | def __init__(cls, name, bases, attrs): | ||
5 | cls.registry = {} | ||
6 | type.__init__(cls, name, bases, attrs) | ||
7 | |||
8 | class ClassRegistry(type, metaclass=ClassRegistryMeta): | ||
2 | """Maintain a registry of classes, indexed by name. | 9 | """Maintain a registry of classes, indexed by name. |
3 | 10 | ||
4 | Note that this implementation requires that the names be unique, as it uses | 11 | Note that this implementation requires that the names be unique, as it uses |
@@ -12,12 +19,6 @@ Subclasses of ClassRegistry may define an 'implemented' property to exert | |||
12 | control over whether the class will be added to the registry (e.g. to keep | 19 | control over whether the class will be added to the registry (e.g. to keep |
13 | abstract base classes out of the registry).""" | 20 | abstract base classes out of the registry).""" |
14 | priority = 0 | 21 | priority = 0 |
15 | class __metaclass__(type): | ||
16 | """Give each ClassRegistry their own registry""" | ||
17 | def __init__(cls, name, bases, attrs): | ||
18 | cls.registry = {} | ||
19 | type.__init__(cls, name, bases, attrs) | ||
20 | |||
21 | def __init__(cls, name, bases, attrs): | 22 | def __init__(cls, name, bases, attrs): |
22 | super(ClassRegistry, cls).__init__(name, bases, attrs) | 23 | super(ClassRegistry, cls).__init__(name, bases, attrs) |
23 | try: | 24 | try: |
@@ -34,7 +35,7 @@ abstract base classes out of the registry).""" | |||
34 | 35 | ||
35 | @classmethod | 36 | @classmethod |
36 | def prioritized(tcls): | 37 | def prioritized(tcls): |
37 | return sorted(tcls.registry.values(), | 38 | return sorted(list(tcls.registry.values()), |
38 | key=lambda v: v.priority, reverse=True) | 39 | key=lambda v: v.priority, reverse=True) |
39 | 40 | ||
40 | def unregister(cls): | 41 | def unregister(cls): |
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py index 7b9a0ee065..eddf5bb2da 100644 --- a/meta/lib/oe/copy_buildsystem.py +++ b/meta/lib/oe/copy_buildsystem.py | |||
@@ -124,7 +124,7 @@ class BuildSystem(object): | |||
124 | def generate_locked_sigs(sigfile, d): | 124 | def generate_locked_sigs(sigfile, d): |
125 | bb.utils.mkdirhier(os.path.dirname(sigfile)) | 125 | bb.utils.mkdirhier(os.path.dirname(sigfile)) |
126 | depd = d.getVar('BB_TASKDEPDATA', False) | 126 | depd = d.getVar('BB_TASKDEPDATA', False) |
127 | tasks = ['%s.%s' % (v[2], v[1]) for v in depd.itervalues()] | 127 | tasks = ['%s.%s' % (v[2], v[1]) for v in depd.values()] |
128 | bb.parse.siggen.dump_lockedsigs(sigfile, tasks) | 128 | bb.parse.siggen.dump_lockedsigs(sigfile, tasks) |
129 | 129 | ||
130 | def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, pruned_output): | 130 | def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, pruned_output): |
@@ -195,7 +195,7 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu | |||
195 | fulltypes.append(typename) | 195 | fulltypes.append(typename) |
196 | f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) | 196 | f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) |
197 | 197 | ||
198 | write_sigs_file(copy_output, tocopy.keys(), tocopy) | 198 | write_sigs_file(copy_output, list(tocopy.keys()), tocopy) |
199 | if merged_output: | 199 | if merged_output: |
200 | write_sigs_file(merged_output, arch_order, merged) | 200 | write_sigs_file(merged_output, arch_order, merged) |
201 | 201 | ||
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py index e49572177b..58e764699d 100644 --- a/meta/lib/oe/data.py +++ b/meta/lib/oe/data.py | |||
@@ -1,17 +1,20 @@ | |||
1 | import oe.maketype | 1 | import oe.maketype |
2 | 2 | ||
3 | def typed_value(key, d): | 3 | def typed_value(key, d): |
4 | #return d.getVar(key, True).split() | ||
5 | |||
4 | """Construct a value for the specified metadata variable, using its flags | 6 | """Construct a value for the specified metadata variable, using its flags |
5 | to determine the type and parameters for construction.""" | 7 | to determine the type and parameters for construction.""" |
6 | var_type = d.getVarFlag(key, 'type', True) | 8 | var_type = d.getVarFlag(key, 'type', True) |
7 | flags = d.getVarFlags(key) | 9 | flags = d.getVarFlags(key) |
8 | if flags is not None: | 10 | if flags is not None: |
9 | flags = dict((flag, d.expand(value)) | 11 | flags = dict((flag, d.expand(value)) |
10 | for flag, value in flags.iteritems()) | 12 | for flag, value in list(flags.items())) |
11 | else: | 13 | else: |
12 | flags = {} | 14 | flags = {} |
13 | 15 | ||
14 | try: | 16 | try: |
15 | return oe.maketype.create(d.getVar(key, True) or '', var_type, **flags) | 17 | return oe.maketype.create(d.getVar(key, True) or '', var_type, **flags) |
16 | except (TypeError, ValueError), exc: | 18 | except (TypeError, ValueError) as exc: |
17 | bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) | 19 | bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) |
20 | |||
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index 8655a6fc14..746e242f5d 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py | |||
@@ -1,8 +1,8 @@ | |||
1 | from contextlib import contextmanager | 1 | from contextlib import contextmanager |
2 | @contextmanager | 2 | @contextmanager |
3 | def create_socket(url, d): | 3 | def create_socket(url, d): |
4 | import urllib | 4 | import urllib.request, urllib.parse, urllib.error |
5 | socket = urllib.urlopen(url, proxies=get_proxies(d)) | 5 | socket = urllib.request.urlopen(url, proxies=get_proxies(d)) |
6 | try: | 6 | try: |
7 | yield socket | 7 | yield socket |
8 | finally: | 8 | finally: |
@@ -104,8 +104,8 @@ def get_source_package_list_from_url(url, section, d): | |||
104 | 104 | ||
105 | bb.note("Reading %s: %s" % (url, section)) | 105 | bb.note("Reading %s: %s" % (url, section)) |
106 | links = get_links_from_url(url, d) | 106 | links = get_links_from_url(url, d) |
107 | srpms = filter(is_src_rpm, links) | 107 | srpms = list(filter(is_src_rpm, links)) |
108 | names_list = map(package_name_from_srpm, srpms) | 108 | names_list = list(map(package_name_from_srpm, srpms)) |
109 | 109 | ||
110 | new_pkgs = [] | 110 | new_pkgs = [] |
111 | for pkgs in names_list: | 111 | for pkgs in names_list: |
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py index f0f661c3ba..39ef9654fc 100644 --- a/meta/lib/oe/license.py +++ b/meta/lib/oe/license.py | |||
@@ -47,7 +47,7 @@ class LicenseVisitor(ast.NodeVisitor): | |||
47 | """Get elements based on OpenEmbedded license strings""" | 47 | """Get elements based on OpenEmbedded license strings""" |
48 | def get_elements(self, licensestr): | 48 | def get_elements(self, licensestr): |
49 | new_elements = [] | 49 | new_elements = [] |
50 | elements = filter(lambda x: x.strip(), license_operator.split(licensestr)) | 50 | elements = list([x for x in license_operator.split(licensestr) if x.strip()]) |
51 | for pos, element in enumerate(elements): | 51 | for pos, element in enumerate(elements): |
52 | if license_pattern.match(element): | 52 | if license_pattern.match(element): |
53 | if pos > 0 and license_pattern.match(elements[pos-1]): | 53 | if pos > 0 and license_pattern.match(elements[pos-1]): |
@@ -118,8 +118,8 @@ def is_included(licensestr, whitelist=None, blacklist=None): | |||
118 | def choose_licenses(alpha, beta): | 118 | def choose_licenses(alpha, beta): |
119 | """Select the option in an OR which is the 'best' (has the most | 119 | """Select the option in an OR which is the 'best' (has the most |
120 | included licenses).""" | 120 | included licenses).""" |
121 | alpha_weight = len(filter(include_license, alpha)) | 121 | alpha_weight = len(list(filter(include_license, alpha))) |
122 | beta_weight = len(filter(include_license, beta)) | 122 | beta_weight = len(list(filter(include_license, beta))) |
123 | if alpha_weight > beta_weight: | 123 | if alpha_weight > beta_weight: |
124 | return alpha | 124 | return alpha |
125 | else: | 125 | else: |
@@ -132,8 +132,8 @@ def is_included(licensestr, whitelist=None, blacklist=None): | |||
132 | blacklist = [] | 132 | blacklist = [] |
133 | 133 | ||
134 | licenses = flattened_licenses(licensestr, choose_licenses) | 134 | licenses = flattened_licenses(licensestr, choose_licenses) |
135 | excluded = filter(lambda lic: exclude_license(lic), licenses) | 135 | excluded = [lic for lic in licenses if exclude_license(lic)] |
136 | included = filter(lambda lic: include_license(lic), licenses) | 136 | included = [lic for lic in licenses if include_license(lic)] |
137 | if excluded: | 137 | if excluded: |
138 | return False, excluded | 138 | return False, excluded |
139 | else: | 139 | else: |
diff --git a/meta/lib/oe/lsb.py b/meta/lib/oe/lsb.py index e0bdfba255..b7e142387c 100644 --- a/meta/lib/oe/lsb.py +++ b/meta/lib/oe/lsb.py | |||
@@ -9,9 +9,9 @@ def release_dict(): | |||
9 | 9 | ||
10 | data = {} | 10 | data = {} |
11 | for line in output.splitlines(): | 11 | for line in output.splitlines(): |
12 | if line.startswith("-e"): line = line[3:] | 12 | if line.startswith(b"-e"): line = line[3:] |
13 | try: | 13 | try: |
14 | key, value = line.split(":\t", 1) | 14 | key, value = line.decode().split(":\t", 1) |
15 | except ValueError: | 15 | except ValueError: |
16 | continue | 16 | continue |
17 | else: | 17 | else: |
diff --git a/meta/lib/oe/maketype.py b/meta/lib/oe/maketype.py index 139f333691..de344a802c 100644 --- a/meta/lib/oe/maketype.py +++ b/meta/lib/oe/maketype.py | |||
@@ -6,7 +6,8 @@ the arguments of the type's factory for details. | |||
6 | """ | 6 | """ |
7 | 7 | ||
8 | import inspect | 8 | import inspect |
9 | import types | 9 | import oe.types as types |
10 | import collections | ||
10 | 11 | ||
11 | available_types = {} | 12 | available_types = {} |
12 | 13 | ||
@@ -53,7 +54,9 @@ def get_callable_args(obj): | |||
53 | if type(obj) is type: | 54 | if type(obj) is type: |
54 | obj = obj.__init__ | 55 | obj = obj.__init__ |
55 | 56 | ||
56 | args, varargs, keywords, defaults = inspect.getargspec(obj) | 57 | sig = inspect.signature(obj) |
58 | args = list(sig.parameters.keys()) | ||
59 | defaults = list(s for s in sig.parameters.keys() if sig.parameters[s].default != inspect.Parameter.empty) | ||
57 | flaglist = [] | 60 | flaglist = [] |
58 | if args: | 61 | if args: |
59 | if len(args) > 1 and args[0] == 'self': | 62 | if len(args) > 1 and args[0] == 'self': |
@@ -93,7 +96,8 @@ for name in dir(types): | |||
93 | continue | 96 | continue |
94 | 97 | ||
95 | obj = getattr(types, name) | 98 | obj = getattr(types, name) |
96 | if not callable(obj): | 99 | if not isinstance(obj, collections.Callable): |
97 | continue | 100 | continue |
98 | 101 | ||
99 | register(name, obj) | 102 | register(name, obj) |
103 | |||
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py index 42832f15d2..95f8eb2df3 100644 --- a/meta/lib/oe/manifest.py +++ b/meta/lib/oe/manifest.py | |||
@@ -4,11 +4,10 @@ import re | |||
4 | import bb | 4 | import bb |
5 | 5 | ||
6 | 6 | ||
7 | class Manifest(object): | 7 | class Manifest(object, metaclass=ABCMeta): |
8 | """ | 8 | """ |
9 | This is an abstract class. Do not instantiate this directly. | 9 | This is an abstract class. Do not instantiate this directly. |
10 | """ | 10 | """ |
11 | __metaclass__ = ABCMeta | ||
12 | 11 | ||
13 | PKG_TYPE_MUST_INSTALL = "mip" | 12 | PKG_TYPE_MUST_INSTALL = "mip" |
14 | PKG_TYPE_MULTILIB = "mlp" | 13 | PKG_TYPE_MULTILIB = "mlp" |
@@ -219,7 +218,7 @@ class RpmManifest(Manifest): | |||
219 | if var in self.vars_to_split: | 218 | if var in self.vars_to_split: |
220 | split_pkgs = self._split_multilib(self.d.getVar(var, True)) | 219 | split_pkgs = self._split_multilib(self.d.getVar(var, True)) |
221 | if split_pkgs is not None: | 220 | if split_pkgs is not None: |
222 | pkgs = dict(pkgs.items() + split_pkgs.items()) | 221 | pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) |
223 | else: | 222 | else: |
224 | pkg_list = self.d.getVar(var, True) | 223 | pkg_list = self.d.getVar(var, True) |
225 | if pkg_list is not None: | 224 | if pkg_list is not None: |
@@ -269,7 +268,7 @@ class OpkgManifest(Manifest): | |||
269 | if var in self.vars_to_split: | 268 | if var in self.vars_to_split: |
270 | split_pkgs = self._split_multilib(self.d.getVar(var, True)) | 269 | split_pkgs = self._split_multilib(self.d.getVar(var, True)) |
271 | if split_pkgs is not None: | 270 | if split_pkgs is not None: |
272 | pkgs = dict(pkgs.items() + split_pkgs.items()) | 271 | pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) |
273 | else: | 272 | else: |
274 | pkg_list = self.d.getVar(var, True) | 273 | pkg_list = self.d.getVar(var, True) |
275 | if pkg_list is not None: | 274 | if pkg_list is not None: |
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index 252e32d1df..faa0ab2edb 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py | |||
@@ -8,7 +8,7 @@ def runstrip(arg): | |||
8 | # 8 - shared library | 8 | # 8 - shared library |
9 | # 16 - kernel module | 9 | # 16 - kernel module |
10 | 10 | ||
11 | import commands, stat, subprocess | 11 | import stat, subprocess |
12 | 12 | ||
13 | (file, elftype, strip) = arg | 13 | (file, elftype, strip) = arg |
14 | 14 | ||
@@ -64,8 +64,8 @@ def filedeprunner(arg): | |||
64 | 64 | ||
65 | def process_deps(pipe, pkg, pkgdest, provides, requires): | 65 | def process_deps(pipe, pkg, pkgdest, provides, requires): |
66 | for line in pipe: | 66 | for line in pipe: |
67 | f = line.split(" ", 1)[0].strip() | 67 | f = line.decode("utf-8").split(" ", 1)[0].strip() |
68 | line = line.split(" ", 1)[1].strip() | 68 | line = line.decode("utf-8").split(" ", 1)[1].strip() |
69 | 69 | ||
70 | if line.startswith("Requires:"): | 70 | if line.startswith("Requires:"): |
71 | i = requires | 71 | i = requires |
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py index 1eedeb8526..0ea17f8153 100644 --- a/meta/lib/oe/package_manager.py +++ b/meta/lib/oe/package_manager.py | |||
@@ -17,7 +17,7 @@ def create_index(arg): | |||
17 | 17 | ||
18 | try: | 18 | try: |
19 | bb.note("Executing '%s' ..." % index_cmd) | 19 | bb.note("Executing '%s' ..." % index_cmd) |
20 | result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True) | 20 | result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") |
21 | except subprocess.CalledProcessError as e: | 21 | except subprocess.CalledProcessError as e: |
22 | return("Index creation command '%s' failed with return code %d:\n%s" % | 22 | return("Index creation command '%s' failed with return code %d:\n%s" % |
23 | (e.cmd, e.returncode, e.output)) | 23 | (e.cmd, e.returncode, e.output)) |
@@ -89,9 +89,7 @@ def opkg_query(cmd_output): | |||
89 | return output | 89 | return output |
90 | 90 | ||
91 | 91 | ||
92 | class Indexer(object): | 92 | class Indexer(object, metaclass=ABCMeta): |
93 | __metaclass__ = ABCMeta | ||
94 | |||
95 | def __init__(self, d, deploy_dir): | 93 | def __init__(self, d, deploy_dir): |
96 | self.d = d | 94 | self.d = d |
97 | self.deploy_dir = deploy_dir | 95 | self.deploy_dir = deploy_dir |
@@ -342,9 +340,7 @@ class DpkgIndexer(Indexer): | |||
342 | 340 | ||
343 | 341 | ||
344 | 342 | ||
345 | class PkgsList(object): | 343 | class PkgsList(object, metaclass=ABCMeta): |
346 | __metaclass__ = ABCMeta | ||
347 | |||
348 | def __init__(self, d, rootfs_dir): | 344 | def __init__(self, d, rootfs_dir): |
349 | self.d = d | 345 | self.d = d |
350 | self.rootfs_dir = rootfs_dir | 346 | self.rootfs_dir = rootfs_dir |
@@ -367,7 +363,7 @@ class RpmPkgsList(PkgsList): | |||
367 | # Determine rpm version | 363 | # Determine rpm version |
368 | cmd = "%s --version" % self.rpm_cmd | 364 | cmd = "%s --version" % self.rpm_cmd |
369 | try: | 365 | try: |
370 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) | 366 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") |
371 | except subprocess.CalledProcessError as e: | 367 | except subprocess.CalledProcessError as e: |
372 | bb.fatal("Getting rpm version failed. Command '%s' " | 368 | bb.fatal("Getting rpm version failed. Command '%s' " |
373 | "returned %d:\n%s" % (cmd, e.returncode, e.output)) | 369 | "returned %d:\n%s" % (cmd, e.returncode, e.output)) |
@@ -411,7 +407,7 @@ class RpmPkgsList(PkgsList): | |||
411 | "-t", self.image_rpmlib] | 407 | "-t", self.image_rpmlib] |
412 | 408 | ||
413 | try: | 409 | try: |
414 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip() | 410 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8") |
415 | except subprocess.CalledProcessError as e: | 411 | except subprocess.CalledProcessError as e: |
416 | bb.fatal("Cannot get the package dependencies. Command '%s' " | 412 | bb.fatal("Cannot get the package dependencies. Command '%s' " |
417 | "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output)) | 413 | "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output)) |
@@ -425,7 +421,7 @@ class RpmPkgsList(PkgsList): | |||
425 | 421 | ||
426 | try: | 422 | try: |
427 | # bb.note(cmd) | 423 | # bb.note(cmd) |
428 | tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip() | 424 | tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip().decode("utf-8") |
429 | except subprocess.CalledProcessError as e: | 425 | except subprocess.CalledProcessError as e: |
430 | bb.fatal("Cannot get the installed packages list. Command '%s' " | 426 | bb.fatal("Cannot get the installed packages list. Command '%s' " |
431 | "returned %d:\n%s" % (cmd, e.returncode, e.output)) | 427 | "returned %d:\n%s" % (cmd, e.returncode, e.output)) |
@@ -485,6 +481,8 @@ class OpkgPkgsList(PkgsList): | |||
485 | # output streams separately and check for empty stderr. | 481 | # output streams separately and check for empty stderr. |
486 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) | 482 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) |
487 | cmd_output, cmd_stderr = p.communicate() | 483 | cmd_output, cmd_stderr = p.communicate() |
484 | cmd_output = cmd_output.decode("utf-8") | ||
485 | cmd_stderr = cmd_stderr.decode("utf-8") | ||
488 | if p.returncode or cmd_stderr: | 486 | if p.returncode or cmd_stderr: |
489 | bb.fatal("Cannot get the installed packages list. Command '%s' " | 487 | bb.fatal("Cannot get the installed packages list. Command '%s' " |
490 | "returned %d and stderr:\n%s" % (cmd, p.returncode, cmd_stderr)) | 488 | "returned %d and stderr:\n%s" % (cmd, p.returncode, cmd_stderr)) |
@@ -502,7 +500,7 @@ class DpkgPkgsList(PkgsList): | |||
502 | cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\n\n") | 500 | cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\n\n") |
503 | 501 | ||
504 | try: | 502 | try: |
505 | cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip() | 503 | cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8") |
506 | except subprocess.CalledProcessError as e: | 504 | except subprocess.CalledProcessError as e: |
507 | bb.fatal("Cannot get the installed packages list. Command '%s' " | 505 | bb.fatal("Cannot get the installed packages list. Command '%s' " |
508 | "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output)) | 506 | "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output)) |
@@ -510,11 +508,10 @@ class DpkgPkgsList(PkgsList): | |||
510 | return opkg_query(cmd_output) | 508 | return opkg_query(cmd_output) |
511 | 509 | ||
512 | 510 | ||
513 | class PackageManager(object): | 511 | class PackageManager(object, metaclass=ABCMeta): |
514 | """ | 512 | """ |
515 | This is an abstract class. Do not instantiate this directly. | 513 | This is an abstract class. Do not instantiate this directly. |
516 | """ | 514 | """ |
517 | __metaclass__ = ABCMeta | ||
518 | 515 | ||
519 | def __init__(self, d): | 516 | def __init__(self, d): |
520 | self.d = d | 517 | self.d = d |
@@ -608,7 +605,7 @@ class PackageManager(object): | |||
608 | try: | 605 | try: |
609 | bb.note("Installing complementary packages ...") | 606 | bb.note("Installing complementary packages ...") |
610 | bb.note('Running %s' % cmd) | 607 | bb.note('Running %s' % cmd) |
611 | complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | 608 | complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8") |
612 | except subprocess.CalledProcessError as e: | 609 | except subprocess.CalledProcessError as e: |
613 | bb.fatal("Could not compute complementary packages list. Command " | 610 | bb.fatal("Could not compute complementary packages list. Command " |
614 | "'%s' returned %d:\n%s" % | 611 | "'%s' returned %d:\n%s" % |
@@ -641,8 +638,8 @@ class PackageManager(object): | |||
641 | def construct_uris(self, uris, base_paths): | 638 | def construct_uris(self, uris, base_paths): |
642 | def _append(arr1, arr2, sep='/'): | 639 | def _append(arr1, arr2, sep='/'): |
643 | res = [] | 640 | res = [] |
644 | narr1 = map(lambda a: string.rstrip(a, sep), arr1) | 641 | narr1 = [string.rstrip(a, sep) for a in arr1] |
645 | narr2 = map(lambda a: string.lstrip(string.rstrip(a, sep), sep), arr2) | 642 | narr2 = [string.lstrip(string.rstrip(a, sep), sep) for a in arr2] |
646 | for a1 in narr1: | 643 | for a1 in narr1: |
647 | if arr2: | 644 | if arr2: |
648 | for a2 in narr2: | 645 | for a2 in narr2: |
@@ -784,7 +781,7 @@ class RpmPM(PackageManager): | |||
784 | try: | 781 | try: |
785 | complementary_pkgs = subprocess.check_output(cmd, | 782 | complementary_pkgs = subprocess.check_output(cmd, |
786 | stderr=subprocess.STDOUT, | 783 | stderr=subprocess.STDOUT, |
787 | shell=True) | 784 | shell=True).decode("utf-8") |
788 | # bb.note(complementary_pkgs) | 785 | # bb.note(complementary_pkgs) |
789 | return complementary_pkgs | 786 | return complementary_pkgs |
790 | except subprocess.CalledProcessError as e: | 787 | except subprocess.CalledProcessError as e: |
@@ -808,7 +805,7 @@ class RpmPM(PackageManager): | |||
808 | (self.smart_cmd, self.smart_opt, pkg) | 805 | (self.smart_cmd, self.smart_opt, pkg) |
809 | cmd += " | sed -ne 's/ *Provides://p'" | 806 | cmd += " | sed -ne 's/ *Provides://p'" |
810 | bb.note('cmd: %s' % cmd) | 807 | bb.note('cmd: %s' % cmd) |
811 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) | 808 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") |
812 | # Found a provider | 809 | # Found a provider |
813 | if output: | 810 | if output: |
814 | bb.note('Found providers for %s: %s' % (pkg, output)) | 811 | bb.note('Found providers for %s: %s' % (pkg, output)) |
@@ -1091,7 +1088,7 @@ class RpmPM(PackageManager): | |||
1091 | open(self.scriptlet_wrapper, 'w+').write(scriptlet_content) | 1088 | open(self.scriptlet_wrapper, 'w+').write(scriptlet_content) |
1092 | 1089 | ||
1093 | bb.note("Note: configuring RPM cross-install scriptlet_wrapper") | 1090 | bb.note("Note: configuring RPM cross-install scriptlet_wrapper") |
1094 | os.chmod(self.scriptlet_wrapper, 0755) | 1091 | os.chmod(self.scriptlet_wrapper, 0o755) |
1095 | cmd = 'config --set rpm-extra-macros._cross_scriptlet_wrapper=%s' % \ | 1092 | cmd = 'config --set rpm-extra-macros._cross_scriptlet_wrapper=%s' % \ |
1096 | self.scriptlet_wrapper | 1093 | self.scriptlet_wrapper |
1097 | self._invoke_smart(cmd) | 1094 | self._invoke_smart(cmd) |
@@ -1109,7 +1106,7 @@ class RpmPM(PackageManager): | |||
1109 | sub_rdep = sub_data.get("RDEPENDS_" + pkg) | 1106 | sub_rdep = sub_data.get("RDEPENDS_" + pkg) |
1110 | if not sub_rdep: | 1107 | if not sub_rdep: |
1111 | continue | 1108 | continue |
1112 | done = bb.utils.explode_dep_versions2(sub_rdep).keys() | 1109 | done = list(bb.utils.explode_dep_versions2(sub_rdep).keys()) |
1113 | next = done | 1110 | next = done |
1114 | # Find all the rdepends on dependency chain | 1111 | # Find all the rdepends on dependency chain |
1115 | while next: | 1112 | while next: |
@@ -1203,7 +1200,7 @@ class RpmPM(PackageManager): | |||
1203 | cmd = "%s %s install --attempt -y %s" % \ | 1200 | cmd = "%s %s install --attempt -y %s" % \ |
1204 | (self.smart_cmd, self.smart_opt, ' '.join(pkgs)) | 1201 | (self.smart_cmd, self.smart_opt, ' '.join(pkgs)) |
1205 | try: | 1202 | try: |
1206 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) | 1203 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT).decode("utf-8") |
1207 | bb.note(output) | 1204 | bb.note(output) |
1208 | except subprocess.CalledProcessError as e: | 1205 | except subprocess.CalledProcessError as e: |
1209 | bb.fatal("Unable to install packages. Command '%s' " | 1206 | bb.fatal("Unable to install packages. Command '%s' " |
@@ -1233,7 +1230,7 @@ class RpmPM(PackageManager): | |||
1233 | 1230 | ||
1234 | try: | 1231 | try: |
1235 | bb.note(cmd) | 1232 | bb.note(cmd) |
1236 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) | 1233 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") |
1237 | bb.note(output) | 1234 | bb.note(output) |
1238 | except subprocess.CalledProcessError as e: | 1235 | except subprocess.CalledProcessError as e: |
1239 | bb.note("Unable to remove packages. Command '%s' " | 1236 | bb.note("Unable to remove packages. Command '%s' " |
@@ -1379,9 +1376,9 @@ class RpmPM(PackageManager): | |||
1379 | 1376 | ||
1380 | try: | 1377 | try: |
1381 | bb.note(cmd) | 1378 | bb.note(cmd) |
1382 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip() | 1379 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip().decode("utf-8") |
1383 | bb.note(output) | 1380 | bb.note(output) |
1384 | os.chmod(saved_dir, 0755) | 1381 | os.chmod(saved_dir, 0o755) |
1385 | except subprocess.CalledProcessError as e: | 1382 | except subprocess.CalledProcessError as e: |
1386 | bb.fatal("Invoke save_rpmpostinst failed. Command '%s' " | 1383 | bb.fatal("Invoke save_rpmpostinst failed. Command '%s' " |
1387 | "returned %d:\n%s" % (cmd, e.returncode, e.output)) | 1384 | "returned %d:\n%s" % (cmd, e.returncode, e.output)) |
@@ -1753,7 +1750,7 @@ class OpkgPM(OpkgDpkgPM): | |||
1753 | try: | 1750 | try: |
1754 | bb.note("Installing the following packages: %s" % ' '.join(pkgs)) | 1751 | bb.note("Installing the following packages: %s" % ' '.join(pkgs)) |
1755 | bb.note(cmd) | 1752 | bb.note(cmd) |
1756 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) | 1753 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT).decode("utf-8") |
1757 | bb.note(output) | 1754 | bb.note(output) |
1758 | except subprocess.CalledProcessError as e: | 1755 | except subprocess.CalledProcessError as e: |
1759 | (bb.fatal, bb.note)[attempt_only]("Unable to install packages. " | 1756 | (bb.fatal, bb.note)[attempt_only]("Unable to install packages. " |
@@ -1770,7 +1767,7 @@ class OpkgPM(OpkgDpkgPM): | |||
1770 | 1767 | ||
1771 | try: | 1768 | try: |
1772 | bb.note(cmd) | 1769 | bb.note(cmd) |
1773 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) | 1770 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT).decode("utf-8") |
1774 | bb.note(output) | 1771 | bb.note(output) |
1775 | except subprocess.CalledProcessError as e: | 1772 | except subprocess.CalledProcessError as e: |
1776 | bb.fatal("Unable to remove packages. Command '%s' " | 1773 | bb.fatal("Unable to remove packages. Command '%s' " |
@@ -1817,7 +1814,7 @@ class OpkgPM(OpkgDpkgPM): | |||
1817 | pkg_info = cmd + pkg | 1814 | pkg_info = cmd + pkg |
1818 | 1815 | ||
1819 | try: | 1816 | try: |
1820 | output = subprocess.check_output(pkg_info.split(), stderr=subprocess.STDOUT).strip() | 1817 | output = subprocess.check_output(pkg_info.split(), stderr=subprocess.STDOUT).strip().decode("utf-8") |
1821 | except subprocess.CalledProcessError as e: | 1818 | except subprocess.CalledProcessError as e: |
1822 | bb.fatal("Cannot get package info. Command '%s' " | 1819 | bb.fatal("Cannot get package info. Command '%s' " |
1823 | "returned %d:\n%s" % (pkg_info, e.returncode, e.output)) | 1820 | "returned %d:\n%s" % (pkg_info, e.returncode, e.output)) |
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index bc0fd06bce..21d4de914f 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py | |||
@@ -8,7 +8,7 @@ def read_pkgdatafile(fn): | |||
8 | pkgdata = {} | 8 | pkgdata = {} |
9 | 9 | ||
10 | def decode(str): | 10 | def decode(str): |
11 | c = codecs.getdecoder("string_escape") | 11 | c = codecs.getdecoder("unicode_escape") |
12 | return c(str)[0] | 12 | return c(str)[0] |
13 | 13 | ||
14 | if os.access(fn, os.R_OK): | 14 | if os.access(fn, os.R_OK): |
@@ -66,7 +66,7 @@ def _pkgmap(d): | |||
66 | bb.warn("No files in %s?" % pkgdatadir) | 66 | bb.warn("No files in %s?" % pkgdatadir) |
67 | files = [] | 67 | files = [] |
68 | 68 | ||
69 | for pn in filter(lambda f: not os.path.isdir(os.path.join(pkgdatadir, f)), files): | 69 | for pn in [f for f in files if not os.path.isdir(os.path.join(pkgdatadir, f))]: |
70 | try: | 70 | try: |
71 | pkgdata = read_pkgdatafile(os.path.join(pkgdatadir, pn)) | 71 | pkgdata = read_pkgdatafile(os.path.join(pkgdatadir, pn)) |
72 | except OSError: | 72 | except OSError: |
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py index a6fee5f950..97819279b7 100644 --- a/meta/lib/oe/packagegroup.py +++ b/meta/lib/oe/packagegroup.py | |||
@@ -16,11 +16,11 @@ def packages(features, d): | |||
16 | yield pkg | 16 | yield pkg |
17 | 17 | ||
18 | def required_packages(features, d): | 18 | def required_packages(features, d): |
19 | req = filter(lambda feature: not is_optional(feature, d), features) | 19 | req = [feature for feature in features if not is_optional(feature, d)] |
20 | return packages(req, d) | 20 | return packages(req, d) |
21 | 21 | ||
22 | def optional_packages(features, d): | 22 | def optional_packages(features, d): |
23 | opt = filter(lambda feature: is_optional(feature, d), features) | 23 | opt = [feature for feature in features if is_optional(feature, d)] |
24 | return packages(opt, d) | 24 | return packages(opt, d) |
25 | 25 | ||
26 | def active_packages(features, d): | 26 | def active_packages(features, d): |
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index add41045f3..a25fd527f6 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -434,7 +434,7 @@ class GitApplyTree(PatchTree): | |||
434 | # change other places which read it back | 434 | # change other places which read it back |
435 | f.write('echo >> $1\n') | 435 | f.write('echo >> $1\n') |
436 | f.write('echo "%s: $PATCHFILE" >> $1\n' % GitApplyTree.patch_line_prefix) | 436 | f.write('echo "%s: $PATCHFILE" >> $1\n' % GitApplyTree.patch_line_prefix) |
437 | os.chmod(commithook, 0755) | 437 | os.chmod(commithook, 0o755) |
438 | shutil.copy2(commithook, applyhook) | 438 | shutil.copy2(commithook, applyhook) |
439 | try: | 439 | try: |
440 | patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file']) | 440 | patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file']) |
@@ -672,7 +672,7 @@ class UserResolver(Resolver): | |||
672 | f.write("echo 'Run \"quilt refresh\" when patch is corrected, press CTRL+D to exit.'\n") | 672 | f.write("echo 'Run \"quilt refresh\" when patch is corrected, press CTRL+D to exit.'\n") |
673 | f.write("echo ''\n") | 673 | f.write("echo ''\n") |
674 | f.write(" ".join(patchcmd) + "\n") | 674 | f.write(" ".join(patchcmd) + "\n") |
675 | os.chmod(rcfile, 00775) | 675 | os.chmod(rcfile, 0o775) |
676 | 676 | ||
677 | self.terminal("bash --rcfile " + rcfile, 'Patch Rejects: Please fix patch rejects manually', self.patchset.d) | 677 | self.terminal("bash --rcfile " + rcfile, 'Patch Rejects: Please fix patch rejects manually', self.patchset.d) |
678 | 678 | ||
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py index b0cbcb1fbc..0054f954cc 100644 --- a/meta/lib/oe/prservice.py +++ b/meta/lib/oe/prservice.py | |||
@@ -1,7 +1,7 @@ | |||
1 | 1 | ||
2 | def prserv_make_conn(d, check = False): | 2 | def prserv_make_conn(d, check = False): |
3 | import prserv.serv | 3 | import prserv.serv |
4 | host_params = filter(None, (d.getVar("PRSERV_HOST", True) or '').split(':')) | 4 | host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) |
5 | try: | 5 | try: |
6 | conn = None | 6 | conn = None |
7 | conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) | 7 | conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) |
@@ -9,7 +9,7 @@ def prserv_make_conn(d, check = False): | |||
9 | if not conn.ping(): | 9 | if not conn.ping(): |
10 | raise Exception('service not available') | 10 | raise Exception('service not available') |
11 | d.setVar("__PRSERV_CONN",conn) | 11 | d.setVar("__PRSERV_CONN",conn) |
12 | except Exception, exc: | 12 | except Exception as exc: |
13 | bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc))) | 13 | bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc))) |
14 | 14 | ||
15 | return conn | 15 | return conn |
@@ -114,7 +114,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): | |||
114 | bb.utils.unlockfile(lf) | 114 | bb.utils.unlockfile(lf) |
115 | 115 | ||
116 | def prserv_check_avail(d): | 116 | def prserv_check_avail(d): |
117 | host_params = filter(None, (d.getVar("PRSERV_HOST", True) or '').split(':')) | 117 | host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) |
118 | try: | 118 | try: |
119 | if len(host_params) != 2: | 119 | if len(host_params) != 2: |
120 | raise TypeError | 120 | raise TypeError |
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py index 3cfeee737b..ff0c87a383 100644 --- a/meta/lib/oe/qa.py +++ b/meta/lib/oe/qa.py | |||
@@ -43,17 +43,17 @@ class ELFFile: | |||
43 | if not os.path.isfile(self.name): | 43 | if not os.path.isfile(self.name): |
44 | raise NotELFFileError("%s is not a normal file" % self.name) | 44 | raise NotELFFileError("%s is not a normal file" % self.name) |
45 | 45 | ||
46 | self.file = file(self.name, "r") | 46 | with open(self.name, "rb") as f: |
47 | # Read 4k which should cover most of the headers we're after | 47 | # Read 4k which should cover most of the headers we're after |
48 | self.data = self.file.read(4096) | 48 | self.data = f.read(4096) |
49 | 49 | ||
50 | if len(self.data) < ELFFile.EI_NIDENT + 4: | 50 | if len(self.data) < ELFFile.EI_NIDENT + 4: |
51 | raise NotELFFileError("%s is not an ELF" % self.name) | 51 | raise NotELFFileError("%s is not an ELF" % self.name) |
52 | 52 | ||
53 | self.my_assert(self.data[0], chr(0x7f) ) | 53 | self.my_assert(self.data[0], chr(0x7f) ) |
54 | self.my_assert(self.data[1], 'E') | 54 | self.my_assert(self.data[1], b'E') |
55 | self.my_assert(self.data[2], 'L') | 55 | self.my_assert(self.data[2], b'L') |
56 | self.my_assert(self.data[3], 'F') | 56 | self.my_assert(self.data[3], b'F') |
57 | if self.bits == 0: | 57 | if self.bits == 0: |
58 | if self.data[ELFFile.EI_CLASS] == chr(ELFFile.ELFCLASS32): | 58 | if self.data[ELFFile.EI_CLASS] == chr(ELFFile.ELFCLASS32): |
59 | self.bits = 32 | 59 | self.bits = 32 |
@@ -148,4 +148,4 @@ if __name__ == "__main__": | |||
148 | import sys | 148 | import sys |
149 | elf = ELFFile(sys.argv[1]) | 149 | elf = ELFFile(sys.argv[1]) |
150 | elf.open() | 150 | elf.open() |
151 | print elf.isDynamic() | 151 | print(elf.isDynamic()) |
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py index 6c7adb5bdb..c183f71d3c 100644 --- a/meta/lib/oe/recipeutils.py +++ b/meta/lib/oe/recipeutils.py | |||
@@ -11,7 +11,7 @@ import os.path | |||
11 | import tempfile | 11 | import tempfile |
12 | import textwrap | 12 | import textwrap |
13 | import difflib | 13 | import difflib |
14 | import utils | 14 | from . import utils |
15 | import shutil | 15 | import shutil |
16 | import re | 16 | import re |
17 | import fnmatch | 17 | import fnmatch |
@@ -318,7 +318,7 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath=''): | |||
318 | varfiles = get_var_files(fn, varlist, d) | 318 | varfiles = get_var_files(fn, varlist, d) |
319 | locs = localise_file_vars(fn, varfiles, varlist) | 319 | locs = localise_file_vars(fn, varfiles, varlist) |
320 | patches = [] | 320 | patches = [] |
321 | for f,v in locs.iteritems(): | 321 | for f,v in locs.items(): |
322 | vals = {k: varvalues[k] for k in v} | 322 | vals = {k: varvalues[k] for k in v} |
323 | patchdata = patch_recipe_file(f, vals, patch, relpath) | 323 | patchdata = patch_recipe_file(f, vals, patch, relpath) |
324 | if patch: | 324 | if patch: |
@@ -536,7 +536,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
536 | bbappendlines = [] | 536 | bbappendlines = [] |
537 | if extralines: | 537 | if extralines: |
538 | if isinstance(extralines, dict): | 538 | if isinstance(extralines, dict): |
539 | for name, value in extralines.iteritems(): | 539 | for name, value in extralines.items(): |
540 | bbappendlines.append((name, '=', value)) | 540 | bbappendlines.append((name, '=', value)) |
541 | else: | 541 | else: |
542 | # Do our best to split it | 542 | # Do our best to split it |
@@ -550,14 +550,14 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
550 | raise Exception('Invalid extralines value passed') | 550 | raise Exception('Invalid extralines value passed') |
551 | 551 | ||
552 | def popline(varname): | 552 | def popline(varname): |
553 | for i in xrange(0, len(bbappendlines)): | 553 | for i in range(0, len(bbappendlines)): |
554 | if bbappendlines[i][0] == varname: | 554 | if bbappendlines[i][0] == varname: |
555 | line = bbappendlines.pop(i) | 555 | line = bbappendlines.pop(i) |
556 | return line | 556 | return line |
557 | return None | 557 | return None |
558 | 558 | ||
559 | def appendline(varname, op, value): | 559 | def appendline(varname, op, value): |
560 | for i in xrange(0, len(bbappendlines)): | 560 | for i in range(0, len(bbappendlines)): |
561 | item = bbappendlines[i] | 561 | item = bbappendlines[i] |
562 | if item[0] == varname: | 562 | if item[0] == varname: |
563 | bbappendlines[i] = (item[0], item[1], item[2] + ' ' + value) | 563 | bbappendlines[i] = (item[0], item[1], item[2] + ' ' + value) |
@@ -576,7 +576,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
576 | copyfiles = {} | 576 | copyfiles = {} |
577 | if srcfiles: | 577 | if srcfiles: |
578 | instfunclines = [] | 578 | instfunclines = [] |
579 | for newfile, origsrcfile in srcfiles.iteritems(): | 579 | for newfile, origsrcfile in srcfiles.items(): |
580 | srcfile = origsrcfile | 580 | srcfile = origsrcfile |
581 | srcurientry = None | 581 | srcurientry = None |
582 | if not srcfile: | 582 | if not srcfile: |
@@ -644,7 +644,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
644 | 644 | ||
645 | if removevar in removevalues: | 645 | if removevar in removevalues: |
646 | remove = removevalues[removevar] | 646 | remove = removevalues[removevar] |
647 | if isinstance(remove, basestring): | 647 | if isinstance(remove, str): |
648 | if remove in splitval: | 648 | if remove in splitval: |
649 | splitval.remove(remove) | 649 | splitval.remove(remove) |
650 | changed = True | 650 | changed = True |
@@ -674,7 +674,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
674 | 674 | ||
675 | varnames = [item[0] for item in bbappendlines] | 675 | varnames = [item[0] for item in bbappendlines] |
676 | if removevalues: | 676 | if removevalues: |
677 | varnames.extend(removevalues.keys()) | 677 | varnames.extend(list(removevalues.keys())) |
678 | 678 | ||
679 | with open(appendpath, 'r') as f: | 679 | with open(appendpath, 'r') as f: |
680 | (updated, newlines) = bb.utils.edit_metadata(f, varnames, appendfile_varfunc) | 680 | (updated, newlines) = bb.utils.edit_metadata(f, varnames, appendfile_varfunc) |
@@ -699,7 +699,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
699 | if copyfiles: | 699 | if copyfiles: |
700 | if machine: | 700 | if machine: |
701 | destsubdir = os.path.join(destsubdir, machine) | 701 | destsubdir = os.path.join(destsubdir, machine) |
702 | for newfile, srcfile in copyfiles.iteritems(): | 702 | for newfile, srcfile in copyfiles.items(): |
703 | filedest = os.path.join(appenddir, destsubdir, os.path.basename(srcfile)) | 703 | filedest = os.path.join(appenddir, destsubdir, os.path.basename(srcfile)) |
704 | if os.path.abspath(newfile) != os.path.abspath(filedest): | 704 | if os.path.abspath(newfile) != os.path.abspath(filedest): |
705 | bb.note('Copying %s to %s' % (newfile, filedest)) | 705 | bb.note('Copying %s to %s' % (newfile, filedest)) |
@@ -725,12 +725,12 @@ def replace_dir_vars(path, d): | |||
725 | """Replace common directory paths with appropriate variable references (e.g. /etc becomes ${sysconfdir})""" | 725 | """Replace common directory paths with appropriate variable references (e.g. /etc becomes ${sysconfdir})""" |
726 | dirvars = {} | 726 | dirvars = {} |
727 | # Sort by length so we get the variables we're interested in first | 727 | # Sort by length so we get the variables we're interested in first |
728 | for var in sorted(d.keys(), key=len): | 728 | for var in sorted(list(d.keys()), key=len): |
729 | if var.endswith('dir') and var.lower() == var: | 729 | if var.endswith('dir') and var.lower() == var: |
730 | value = d.getVar(var, True) | 730 | value = d.getVar(var, True) |
731 | if value.startswith('/') and not '\n' in value and value not in dirvars: | 731 | if value.startswith('/') and not '\n' in value and value not in dirvars: |
732 | dirvars[value] = var | 732 | dirvars[value] = var |
733 | for dirpath in sorted(dirvars.keys(), reverse=True): | 733 | for dirpath in sorted(list(dirvars.keys()), reverse=True): |
734 | path = path.replace(dirpath, '${%s}' % dirvars[dirpath]) | 734 | path = path.replace(dirpath, '${%s}' % dirvars[dirpath]) |
735 | return path | 735 | return path |
736 | 736 | ||
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py index 95fd3ab7f1..528f50c0df 100644 --- a/meta/lib/oe/rootfs.py +++ b/meta/lib/oe/rootfs.py | |||
@@ -10,11 +10,10 @@ import subprocess | |||
10 | import re | 10 | import re |
11 | 11 | ||
12 | 12 | ||
13 | class Rootfs(object): | 13 | class Rootfs(object, metaclass=ABCMeta): |
14 | """ | 14 | """ |
15 | This is an abstract class. Do not instantiate this directly. | 15 | This is an abstract class. Do not instantiate this directly. |
16 | """ | 16 | """ |
17 | __metaclass__ = ABCMeta | ||
18 | 17 | ||
19 | def __init__(self, d): | 18 | def __init__(self, d): |
20 | self.d = d | 19 | self.d = d |
@@ -51,8 +50,8 @@ class Rootfs(object): | |||
51 | m = r.search(line) | 50 | m = r.search(line) |
52 | if m: | 51 | if m: |
53 | bb.warn('[log_check] %s: found a warning message in the logfile (keyword \'%s\'):\n[log_check] %s' | 52 | bb.warn('[log_check] %s: found a warning message in the logfile (keyword \'%s\'):\n[log_check] %s' |
54 | % (self.d.getVar('PN', True), m.group(), line)) | 53 | % (self.d.getVar('PN', True), m.group(), line)) |
55 | 54 | ||
56 | def _log_check_error(self): | 55 | def _log_check_error(self): |
57 | r = re.compile(self.log_check_regex) | 56 | r = re.compile(self.log_check_regex) |
58 | log_path = self.d.expand("${T}/log.do_rootfs") | 57 | log_path = self.d.expand("${T}/log.do_rootfs") |
@@ -77,7 +76,7 @@ class Rootfs(object): | |||
77 | found_error = 1 | 76 | found_error = 1 |
78 | bb.warn('[log_check] In line: [%s]' % line) | 77 | bb.warn('[log_check] In line: [%s]' % line) |
79 | bb.warn('[log_check] %s: found an error message in the logfile (keyword \'%s\'):\n[log_check] %s' | 78 | bb.warn('[log_check] %s: found an error message in the logfile (keyword \'%s\'):\n[log_check] %s' |
80 | % (self.d.getVar('PN', True), m.group(), line)) | 79 | % (self.d.getVar('PN', True), m.group(), line)) |
81 | 80 | ||
82 | if found_error >= 1 and found_error <= 5: | 81 | if found_error >= 1 and found_error <= 5: |
83 | message += line + '\n' | 82 | message += line + '\n' |
@@ -485,7 +484,7 @@ class RpmRootfs(Rootfs): | |||
485 | continue | 484 | continue |
486 | # sh -x may emit code which isn't actually executed | 485 | # sh -x may emit code which isn't actually executed |
487 | if line.startswith('+'): | 486 | if line.startswith('+'): |
488 | continue | 487 | continue |
489 | 488 | ||
490 | m = r.search(line) | 489 | m = r.search(line) |
491 | if m: | 490 | if m: |
@@ -565,7 +564,7 @@ class DpkgOpkgRootfs(Rootfs): | |||
565 | pkg_depends = m_depends.group(1) | 564 | pkg_depends = m_depends.group(1) |
566 | 565 | ||
567 | # remove package dependencies not in postinsts | 566 | # remove package dependencies not in postinsts |
568 | pkg_names = pkgs.keys() | 567 | pkg_names = list(pkgs.keys()) |
569 | for pkg_name in pkg_names: | 568 | for pkg_name in pkg_names: |
570 | deps = pkgs[pkg_name][:] | 569 | deps = pkgs[pkg_name][:] |
571 | 570 | ||
@@ -598,7 +597,7 @@ class DpkgOpkgRootfs(Rootfs): | |||
598 | pkgs = self._get_pkgs_postinsts(status_file) | 597 | pkgs = self._get_pkgs_postinsts(status_file) |
599 | if pkgs: | 598 | if pkgs: |
600 | root = "__packagegroup_postinst__" | 599 | root = "__packagegroup_postinst__" |
601 | pkgs[root] = pkgs.keys() | 600 | pkgs[root] = list(pkgs.keys()) |
602 | _dep_resolve(pkgs, root, pkg_list, []) | 601 | _dep_resolve(pkgs, root, pkg_list, []) |
603 | pkg_list.remove(root) | 602 | pkg_list.remove(root) |
604 | 603 | ||
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py index f1bbef6f58..c74525f929 100644 --- a/meta/lib/oe/sdk.py +++ b/meta/lib/oe/sdk.py | |||
@@ -8,9 +8,7 @@ import glob | |||
8 | import traceback | 8 | import traceback |
9 | 9 | ||
10 | 10 | ||
11 | class Sdk(object): | 11 | class Sdk(object, metaclass=ABCMeta): |
12 | __metaclass__ = ABCMeta | ||
13 | |||
14 | def __init__(self, d, manifest_dir): | 12 | def __init__(self, d, manifest_dir): |
15 | self.d = d | 13 | self.d = d |
16 | self.sdk_output = self.d.getVar('SDK_OUTPUT', True) | 14 | self.sdk_output = self.d.getVar('SDK_OUTPUT', True) |
@@ -251,12 +249,12 @@ class OpkgSdk(Sdk): | |||
251 | self.mkdirhier(target_sysconfdir) | 249 | self.mkdirhier(target_sysconfdir) |
252 | shutil.copy(self.target_conf, target_sysconfdir) | 250 | shutil.copy(self.target_conf, target_sysconfdir) |
253 | os.chmod(os.path.join(target_sysconfdir, | 251 | os.chmod(os.path.join(target_sysconfdir, |
254 | os.path.basename(self.target_conf)), 0644) | 252 | os.path.basename(self.target_conf)), 0o644) |
255 | 253 | ||
256 | self.mkdirhier(host_sysconfdir) | 254 | self.mkdirhier(host_sysconfdir) |
257 | shutil.copy(self.host_conf, host_sysconfdir) | 255 | shutil.copy(self.host_conf, host_sysconfdir) |
258 | os.chmod(os.path.join(host_sysconfdir, | 256 | os.chmod(os.path.join(host_sysconfdir, |
259 | os.path.basename(self.host_conf)), 0644) | 257 | os.path.basename(self.host_conf)), 0o644) |
260 | 258 | ||
261 | native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, | 259 | native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, |
262 | self.d.getVar('localstatedir_nativesdk', True).strip('/'), | 260 | self.d.getVar('localstatedir_nativesdk', True).strip('/'), |
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index 01dce660cf..a58f03a342 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -210,7 +210,7 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): | |||
210 | continue | 210 | continue |
211 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n") | 211 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n") |
212 | f.write(' "\n') | 212 | f.write(' "\n') |
213 | f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(types.keys()))) | 213 | f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(list(types.keys())))) |
214 | 214 | ||
215 | def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d): | 215 | def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d): |
216 | warn_msgs = [] | 216 | warn_msgs = [] |
@@ -220,7 +220,7 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): | |||
220 | for task in range(len(sq_fn)): | 220 | for task in range(len(sq_fn)): |
221 | if task not in ret: | 221 | if task not in ret: |
222 | for pn in self.lockedsigs: | 222 | for pn in self.lockedsigs: |
223 | if sq_hash[task] in self.lockedsigs[pn].itervalues(): | 223 | if sq_hash[task] in iter(self.lockedsigs[pn].values()): |
224 | if sq_task[task] == 'do_shared_workdir': | 224 | if sq_task[task] == 'do_shared_workdir': |
225 | continue | 225 | continue |
226 | sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" | 226 | sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" |
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py index 634daa9033..dc25d14ff6 100644 --- a/meta/lib/oe/terminal.py +++ b/meta/lib/oe/terminal.py | |||
@@ -25,9 +25,7 @@ class Registry(oe.classutils.ClassRegistry): | |||
25 | return bool(cls.command) | 25 | return bool(cls.command) |
26 | 26 | ||
27 | 27 | ||
28 | class Terminal(Popen): | 28 | class Terminal(Popen, metaclass=Registry): |
29 | __metaclass__ = Registry | ||
30 | |||
31 | def __init__(self, sh_cmd, title=None, env=None, d=None): | 29 | def __init__(self, sh_cmd, title=None, env=None, d=None): |
32 | fmt_sh_cmd = self.format_command(sh_cmd, title) | 30 | fmt_sh_cmd = self.format_command(sh_cmd, title) |
33 | try: | 31 | try: |
@@ -41,7 +39,7 @@ class Terminal(Popen): | |||
41 | 39 | ||
42 | def format_command(self, sh_cmd, title): | 40 | def format_command(self, sh_cmd, title): |
43 | fmt = {'title': title or 'Terminal', 'command': sh_cmd} | 41 | fmt = {'title': title or 'Terminal', 'command': sh_cmd} |
44 | if isinstance(self.command, basestring): | 42 | if isinstance(self.command, str): |
45 | return shlex.split(self.command.format(**fmt)) | 43 | return shlex.split(self.command.format(**fmt)) |
46 | else: | 44 | else: |
47 | return [element.format(**fmt) for element in self.command] | 45 | return [element.format(**fmt) for element in self.command] |
diff --git a/meta/lib/oe/tests/test_path.py b/meta/lib/oe/tests/test_path.py index 3d41ce157a..5aa024d393 100644 --- a/meta/lib/oe/tests/test_path.py +++ b/meta/lib/oe/tests/test_path.py | |||
@@ -85,5 +85,5 @@ class TestRealPath(unittest.TestCase): | |||
85 | 85 | ||
86 | def test_loop(self): | 86 | def test_loop(self): |
87 | for e in self.EXCEPTIONS: | 87 | for e in self.EXCEPTIONS: |
88 | self.assertRaisesRegexp(OSError, r'\[Errno %u\]' % e[1], | 88 | self.assertRaisesRegex(OSError, r'\[Errno %u\]' % e[1], |
89 | self.__realpath, e[0], False, False) | 89 | self.__realpath, e[0], False, False) |
diff --git a/meta/lib/oe/types.py b/meta/lib/oe/types.py index 7f47c17d0e..4ae58acfac 100644 --- a/meta/lib/oe/types.py +++ b/meta/lib/oe/types.py | |||
@@ -33,7 +33,7 @@ def choice(value, choices): | |||
33 | Acts as a multiple choice for the user. To use this, set the variable | 33 | Acts as a multiple choice for the user. To use this, set the variable |
34 | type flag to 'choice', and set the 'choices' flag to a space separated | 34 | type flag to 'choice', and set the 'choices' flag to a space separated |
35 | list of valid values.""" | 35 | list of valid values.""" |
36 | if not isinstance(value, basestring): | 36 | if not isinstance(value, str): |
37 | raise TypeError("choice accepts a string, not '%s'" % type(value)) | 37 | raise TypeError("choice accepts a string, not '%s'" % type(value)) |
38 | 38 | ||
39 | value = value.lower() | 39 | value = value.lower() |
@@ -106,7 +106,7 @@ def boolean(value): | |||
106 | Valid values for false: 'no', 'n', 'false', 'f', '0' | 106 | Valid values for false: 'no', 'n', 'false', 'f', '0' |
107 | """ | 107 | """ |
108 | 108 | ||
109 | if not isinstance(value, basestring): | 109 | if not isinstance(value, str): |
110 | raise TypeError("boolean accepts a string, not '%s'" % type(value)) | 110 | raise TypeError("boolean accepts a string, not '%s'" % type(value)) |
111 | 111 | ||
112 | value = value.lower() | 112 | value = value.lower() |
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index 30d30629f1..cecddc657f 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py | |||
@@ -46,7 +46,7 @@ def both_contain(variable1, variable2, checkvalue, d): | |||
46 | val2 = d.getVar(variable2, True) | 46 | val2 = d.getVar(variable2, True) |
47 | val1 = set(val1.split()) | 47 | val1 = set(val1.split()) |
48 | val2 = set(val2.split()) | 48 | val2 = set(val2.split()) |
49 | if isinstance(checkvalue, basestring): | 49 | if isinstance(checkvalue, str): |
50 | checkvalue = set(checkvalue.split()) | 50 | checkvalue = set(checkvalue.split()) |
51 | else: | 51 | else: |
52 | checkvalue = set(checkvalue) | 52 | checkvalue = set(checkvalue) |
@@ -85,11 +85,11 @@ def prune_suffix(var, suffixes, d): | |||
85 | 85 | ||
86 | def str_filter(f, str, d): | 86 | def str_filter(f, str, d): |
87 | from re import match | 87 | from re import match |
88 | return " ".join(filter(lambda x: match(f, x, 0), str.split())) | 88 | return " ".join([x for x in str.split() if match(f, x, 0)]) |
89 | 89 | ||
90 | def str_filter_out(f, str, d): | 90 | def str_filter_out(f, str, d): |
91 | from re import match | 91 | from re import match |
92 | return " ".join(filter(lambda x: not match(f, x, 0), str.split())) | 92 | return " ".join([x for x in str.split() if not match(f, x, 0)]) |
93 | 93 | ||
94 | def param_bool(cfg, field, dflt = None): | 94 | def param_bool(cfg, field, dflt = None): |
95 | """Lookup <field> in <cfg> map and convert it to a boolean; take | 95 | """Lookup <field> in <cfg> map and convert it to a boolean; take |
@@ -134,7 +134,7 @@ def packages_filter_out_system(d): | |||
134 | PN-dbg PN-doc PN-locale-eb-gb removed. | 134 | PN-dbg PN-doc PN-locale-eb-gb removed. |
135 | """ | 135 | """ |
136 | pn = d.getVar('PN', True) | 136 | pn = d.getVar('PN', True) |
137 | blacklist = map(lambda suffix: pn + suffix, ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')) | 137 | blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')] |
138 | localepkg = pn + "-locale-" | 138 | localepkg = pn + "-locale-" |
139 | pkgs = [] | 139 | pkgs = [] |
140 | 140 | ||
@@ -235,7 +235,7 @@ def format_pkg_list(pkg_dict, ret_format=None): | |||
235 | # so implement a version here | 235 | # so implement a version here |
236 | # | 236 | # |
237 | 237 | ||
238 | from Queue import Queue | 238 | from queue import Queue |
239 | from threading import Thread | 239 | from threading import Thread |
240 | 240 | ||
241 | class ThreadedWorker(Thread): | 241 | class ThreadedWorker(Thread): |
@@ -249,7 +249,7 @@ class ThreadedWorker(Thread): | |||
249 | self.worker_end = worker_end | 249 | self.worker_end = worker_end |
250 | 250 | ||
251 | def run(self): | 251 | def run(self): |
252 | from Queue import Empty | 252 | from queue import Empty |
253 | 253 | ||
254 | if self.worker_init is not None: | 254 | if self.worker_init is not None: |
255 | self.worker_init(self) | 255 | self.worker_init(self) |
@@ -264,8 +264,8 @@ class ThreadedWorker(Thread): | |||
264 | 264 | ||
265 | try: | 265 | try: |
266 | func(self, *args, **kargs) | 266 | func(self, *args, **kargs) |
267 | except Exception, e: | 267 | except Exception as e: |
268 | print e | 268 | print(e) |
269 | finally: | 269 | finally: |
270 | self.tasks.task_done() | 270 | self.tasks.task_done() |
271 | 271 | ||
diff --git a/meta/lib/oeqa/controllers/masterimage.py b/meta/lib/oeqa/controllers/masterimage.py index 522f9ebd76..4cb75539ee 100644 --- a/meta/lib/oeqa/controllers/masterimage.py +++ b/meta/lib/oeqa/controllers/masterimage.py | |||
@@ -24,9 +24,7 @@ from oeqa.utils import CommandError | |||
24 | 24 | ||
25 | from abc import ABCMeta, abstractmethod | 25 | from abc import ABCMeta, abstractmethod |
26 | 26 | ||
27 | class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget): | 27 | class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta): |
28 | |||
29 | __metaclass__ = ABCMeta | ||
30 | 28 | ||
31 | supported_image_fstypes = ['tar.gz', 'tar.bz2'] | 29 | supported_image_fstypes = ['tar.gz', 'tar.bz2'] |
32 | 30 | ||
diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py index 3ed5bb8c2b..869132273f 100644 --- a/meta/lib/oeqa/oetest.py +++ b/meta/lib/oeqa/oetest.py | |||
@@ -12,6 +12,7 @@ import unittest | |||
12 | import inspect | 12 | import inspect |
13 | import subprocess | 13 | import subprocess |
14 | import signal | 14 | import signal |
15 | import functools | ||
15 | try: | 16 | try: |
16 | import bb | 17 | import bb |
17 | except ImportError: | 18 | except ImportError: |
@@ -314,7 +315,14 @@ class TestContext(object): | |||
314 | for index, suite in enumerate(suites): | 315 | for index, suite in enumerate(suites): |
315 | set_suite_depth(suite) | 316 | set_suite_depth(suite) |
316 | suite.index = index | 317 | suite.index = index |
317 | suites.sort(cmp=lambda a,b: cmp((a.depth, a.index), (b.depth, b.index))) | 318 | |
319 | def cmp(a, b): | ||
320 | return (a > b) - (a < b) | ||
321 | |||
322 | def cmpfunc(a, b): | ||
323 | return cmp((a.depth, a.index), (b.depth, b.index)) | ||
324 | |||
325 | suites.sort(key=functools.cmp_to_key(cmpfunc)) | ||
318 | 326 | ||
319 | self.suite = testloader.suiteClass(suites) | 327 | self.suite = testloader.suiteClass(suites) |
320 | 328 | ||
diff --git a/meta/lib/oeqa/runtime/files/test.py b/meta/lib/oeqa/runtime/files/test.py index f3a2273c52..f389225d72 100644 --- a/meta/lib/oeqa/runtime/files/test.py +++ b/meta/lib/oeqa/runtime/files/test.py | |||
@@ -3,4 +3,4 @@ import os | |||
3 | os.system('touch /tmp/testfile.python') | 3 | os.system('touch /tmp/testfile.python') |
4 | 4 | ||
5 | a = 9.01e+21 - 9.01e+21 + 0.01 | 5 | a = 9.01e+21 - 9.01e+21 + 0.01 |
6 | print "the value of a is %s" % a | 6 | print("the value of a is %s" % a) |
diff --git a/meta/lib/oeqa/runtime/parselogs.py b/meta/lib/oeqa/runtime/parselogs.py index a93660131d..242cd8cdd5 100644 --- a/meta/lib/oeqa/runtime/parselogs.py +++ b/meta/lib/oeqa/runtime/parselogs.py | |||
@@ -238,7 +238,7 @@ class ParseLogsTest(oeRuntimeTest): | |||
238 | result = None | 238 | result = None |
239 | thegrep = self.build_grepcmd(errors, ignore_errors, log) | 239 | thegrep = self.build_grepcmd(errors, ignore_errors, log) |
240 | try: | 240 | try: |
241 | result = subprocess.check_output(thegrep, shell=True) | 241 | result = subprocess.check_output(thegrep, shell=True).decode("utf-8") |
242 | except: | 242 | except: |
243 | pass | 243 | pass |
244 | if (result is not None): | 244 | if (result is not None): |
@@ -246,7 +246,7 @@ class ParseLogsTest(oeRuntimeTest): | |||
246 | rez = result.splitlines() | 246 | rez = result.splitlines() |
247 | for xrez in rez: | 247 | for xrez in rez: |
248 | try: | 248 | try: |
249 | grep_output = subprocess.check_output(['grep', '-F', xrez, '-B', str(lines_before), '-A', str(lines_after), log]) | 249 | grep_output = subprocess.check_output(['grep', '-F', xrez, '-B', str(lines_before), '-A', str(lines_after), log]).decode("utf-8") |
250 | except: | 250 | except: |
251 | pass | 251 | pass |
252 | results[log.replace('target_logs/','')][xrez]=grep_output | 252 | results[log.replace('target_logs/','')][xrez]=grep_output |
diff --git a/meta/lib/oeqa/runtime/ping.py b/meta/lib/oeqa/runtime/ping.py index 80c460161b..0f27447926 100644 --- a/meta/lib/oeqa/runtime/ping.py +++ b/meta/lib/oeqa/runtime/ping.py | |||
@@ -14,7 +14,7 @@ class PingTest(oeRuntimeTest): | |||
14 | endtime = time.time() + 60 | 14 | endtime = time.time() + 60 |
15 | while count < 5 and time.time() < endtime: | 15 | while count < 5 and time.time() < endtime: |
16 | proc = subprocess.Popen("ping -c 1 %s" % self.target.ip, shell=True, stdout=subprocess.PIPE) | 16 | proc = subprocess.Popen("ping -c 1 %s" % self.target.ip, shell=True, stdout=subprocess.PIPE) |
17 | output += proc.communicate()[0] | 17 | output += proc.communicate()[0].decode("utf-8") |
18 | if proc.poll() == 0: | 18 | if proc.poll() == 0: |
19 | count += 1 | 19 | count += 1 |
20 | else: | 20 | else: |
diff --git a/meta/lib/oeqa/runtime/systemd.py b/meta/lib/oeqa/runtime/systemd.py index 2b2f10d71c..a96efa28dc 100644 --- a/meta/lib/oeqa/runtime/systemd.py +++ b/meta/lib/oeqa/runtime/systemd.py | |||
@@ -153,7 +153,7 @@ class SystemdJournalTests(SystemdTest): | |||
153 | if check_match: break | 153 | if check_match: break |
154 | # put the startup time in the test log | 154 | # put the startup time in the test log |
155 | if check_match: | 155 | if check_match: |
156 | print "%s" % check_match | 156 | print("%s" % check_match) |
157 | else: | 157 | else: |
158 | self.skipTest("Error at obtaining the boot time from journalctl") | 158 | self.skipTest("Error at obtaining the boot time from journalctl") |
159 | boot_time_sec = 0 | 159 | boot_time_sec = 0 |
@@ -174,5 +174,5 @@ class SystemdJournalTests(SystemdTest): | |||
174 | self.skipTest("Error when parsing time from boot string") | 174 | self.skipTest("Error when parsing time from boot string") |
175 | #Assert the target boot time against systemd's unit start timeout | 175 | #Assert the target boot time against systemd's unit start timeout |
176 | if boot_time_sec > systemd_TimeoutStartSec: | 176 | if boot_time_sec > systemd_TimeoutStartSec: |
177 | print "Target boot time %s exceeds systemd's TimeoutStartSec %s"\ | 177 | print("Target boot time %s exceeds systemd's TimeoutStartSec %s"\ |
178 | %(boot_time_sec, systemd_TimeoutStartSec) | 178 | %(boot_time_sec, systemd_TimeoutStartSec)) |
diff --git a/meta/lib/oeqa/selftest/_toaster.py b/meta/lib/oeqa/selftest/_toaster.py index c424659fdc..15ea9df9ef 100644 --- a/meta/lib/oeqa/selftest/_toaster.py +++ b/meta/lib/oeqa/selftest/_toaster.py | |||
@@ -2,7 +2,7 @@ import unittest | |||
2 | import os | 2 | import os |
3 | import sys | 3 | import sys |
4 | import shlex, subprocess | 4 | import shlex, subprocess |
5 | import urllib, commands, time, getpass, re, json, shlex | 5 | import urllib.request, urllib.parse, urllib.error, subprocess, time, getpass, re, json, shlex |
6 | 6 | ||
7 | import oeqa.utils.ftools as ftools | 7 | import oeqa.utils.ftools as ftools |
8 | from oeqa.selftest.base import oeSelfTest | 8 | from oeqa.selftest.base import oeSelfTest |
@@ -290,7 +290,7 @@ class Toaster_DB_Tests(ToasterSetup): | |||
290 | layers = Layer.objects.values('id', 'layer_index_url') | 290 | layers = Layer.objects.values('id', 'layer_index_url') |
291 | cnt_err = [] | 291 | cnt_err = [] |
292 | for layer in layers: | 292 | for layer in layers: |
293 | resp = urllib.urlopen(layer['layer_index_url']) | 293 | resp = urllib.request.urlopen(layer['layer_index_url']) |
294 | if (resp.getcode() != 200): | 294 | if (resp.getcode() != 200): |
295 | cnt_err.append(layer['id']) | 295 | cnt_err.append(layer['id']) |
296 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for layer id: %s' % cnt_err) | 296 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for layer id: %s' % cnt_err) |
diff --git a/meta/lib/oeqa/selftest/devtool.py b/meta/lib/oeqa/selftest/devtool.py index 132a73d0ec..d0421e7177 100644 --- a/meta/lib/oeqa/selftest/devtool.py +++ b/meta/lib/oeqa/selftest/devtool.py | |||
@@ -50,7 +50,7 @@ class DevtoolBase(oeSelfTest): | |||
50 | 50 | ||
51 | 51 | ||
52 | missingvars = {} | 52 | missingvars = {} |
53 | for var, value in checkvars.iteritems(): | 53 | for var, value in checkvars.items(): |
54 | if value is not None: | 54 | if value is not None: |
55 | missingvars[var] = value | 55 | missingvars[var] = value |
56 | self.assertEqual(missingvars, {}, 'Some expected variables not found in recipe: %s' % checkvars) | 56 | self.assertEqual(missingvars, {}, 'Some expected variables not found in recipe: %s' % checkvars) |
diff --git a/meta/lib/oeqa/selftest/pkgdata.py b/meta/lib/oeqa/selftest/pkgdata.py index 138b03aadb..5a63f89ff2 100644 --- a/meta/lib/oeqa/selftest/pkgdata.py +++ b/meta/lib/oeqa/selftest/pkgdata.py | |||
@@ -131,15 +131,15 @@ class OePkgdataUtilTests(oeSelfTest): | |||
131 | # Test recipe-space package name | 131 | # Test recipe-space package name |
132 | result = runCmd('oe-pkgdata-util list-pkg-files zlib-dev zlib-doc') | 132 | result = runCmd('oe-pkgdata-util list-pkg-files zlib-dev zlib-doc') |
133 | files = splitoutput(result.output) | 133 | files = splitoutput(result.output) |
134 | self.assertIn('zlib-dev', files.keys(), "listed pkgs. files: %s" %result.output) | 134 | self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
135 | self.assertIn('zlib-doc', files.keys(), "listed pkgs. files: %s" %result.output) | 135 | self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output) |
136 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev']) | 136 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev']) |
137 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc']) | 137 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc']) |
138 | # Test runtime package name | 138 | # Test runtime package name |
139 | result = runCmd('oe-pkgdata-util list-pkg-files -r libz1 libz-dev') | 139 | result = runCmd('oe-pkgdata-util list-pkg-files -r libz1 libz-dev') |
140 | files = splitoutput(result.output) | 140 | files = splitoutput(result.output) |
141 | self.assertIn('libz1', files.keys(), "listed pkgs. files: %s" %result.output) | 141 | self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output) |
142 | self.assertIn('libz-dev', files.keys(), "listed pkgs. files: %s" %result.output) | 142 | self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
143 | self.assertGreater(len(files['libz1']), 1) | 143 | self.assertGreater(len(files['libz1']), 1) |
144 | libspec = os.path.join(base_libdir, 'libz.so.1.*') | 144 | libspec = os.path.join(base_libdir, 'libz.so.1.*') |
145 | found = False | 145 | found = False |
@@ -152,12 +152,12 @@ class OePkgdataUtilTests(oeSelfTest): | |||
152 | # Test recipe | 152 | # Test recipe |
153 | result = runCmd('oe-pkgdata-util list-pkg-files -p zlib') | 153 | result = runCmd('oe-pkgdata-util list-pkg-files -p zlib') |
154 | files = splitoutput(result.output) | 154 | files = splitoutput(result.output) |
155 | self.assertIn('zlib-dbg', files.keys(), "listed pkgs. files: %s" %result.output) | 155 | self.assertIn('zlib-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output) |
156 | self.assertIn('zlib-doc', files.keys(), "listed pkgs. files: %s" %result.output) | 156 | self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output) |
157 | self.assertIn('zlib-dev', files.keys(), "listed pkgs. files: %s" %result.output) | 157 | self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
158 | self.assertIn('zlib-staticdev', files.keys(), "listed pkgs. files: %s" %result.output) | 158 | self.assertIn('zlib-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
159 | self.assertIn('zlib', files.keys(), "listed pkgs. files: %s" %result.output) | 159 | self.assertIn('zlib', list(files.keys()), "listed pkgs. files: %s" %result.output) |
160 | self.assertNotIn('zlib-locale', files.keys(), "listed pkgs. files: %s" %result.output) | 160 | self.assertNotIn('zlib-locale', list(files.keys()), "listed pkgs. files: %s" %result.output) |
161 | # (ignore ptest, might not be there depending on config) | 161 | # (ignore ptest, might not be there depending on config) |
162 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev']) | 162 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev']) |
163 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc']) | 163 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc']) |
@@ -165,36 +165,36 @@ class OePkgdataUtilTests(oeSelfTest): | |||
165 | # Test recipe, runtime | 165 | # Test recipe, runtime |
166 | result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r') | 166 | result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r') |
167 | files = splitoutput(result.output) | 167 | files = splitoutput(result.output) |
168 | self.assertIn('libz-dbg', files.keys(), "listed pkgs. files: %s" %result.output) | 168 | self.assertIn('libz-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output) |
169 | self.assertIn('libz-doc', files.keys(), "listed pkgs. files: %s" %result.output) | 169 | self.assertIn('libz-doc', list(files.keys()), "listed pkgs. files: %s" %result.output) |
170 | self.assertIn('libz-dev', files.keys(), "listed pkgs. files: %s" %result.output) | 170 | self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
171 | self.assertIn('libz-staticdev', files.keys(), "listed pkgs. files: %s" %result.output) | 171 | self.assertIn('libz-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
172 | self.assertIn('libz1', files.keys(), "listed pkgs. files: %s" %result.output) | 172 | self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output) |
173 | self.assertNotIn('libz-locale', files.keys(), "listed pkgs. files: %s" %result.output) | 173 | self.assertNotIn('libz-locale', list(files.keys()), "listed pkgs. files: %s" %result.output) |
174 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev']) | 174 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev']) |
175 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc']) | 175 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc']) |
176 | self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev']) | 176 | self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev']) |
177 | # Test recipe, unpackaged | 177 | # Test recipe, unpackaged |
178 | result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -u') | 178 | result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -u') |
179 | files = splitoutput(result.output) | 179 | files = splitoutput(result.output) |
180 | self.assertIn('zlib-dbg', files.keys(), "listed pkgs. files: %s" %result.output) | 180 | self.assertIn('zlib-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output) |
181 | self.assertIn('zlib-doc', files.keys(), "listed pkgs. files: %s" %result.output) | 181 | self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output) |
182 | self.assertIn('zlib-dev', files.keys(), "listed pkgs. files: %s" %result.output) | 182 | self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
183 | self.assertIn('zlib-staticdev', files.keys(), "listed pkgs. files: %s" %result.output) | 183 | self.assertIn('zlib-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
184 | self.assertIn('zlib', files.keys(), "listed pkgs. files: %s" %result.output) | 184 | self.assertIn('zlib', list(files.keys()), "listed pkgs. files: %s" %result.output) |
185 | self.assertIn('zlib-locale', files.keys(), "listed pkgs. files: %s" %result.output) # this is the key one | 185 | self.assertIn('zlib-locale', list(files.keys()), "listed pkgs. files: %s" %result.output) # this is the key one |
186 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev']) | 186 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev']) |
187 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc']) | 187 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc']) |
188 | self.assertIn(os.path.join(libdir, 'libz.a'), files['zlib-staticdev']) | 188 | self.assertIn(os.path.join(libdir, 'libz.a'), files['zlib-staticdev']) |
189 | # Test recipe, runtime, unpackaged | 189 | # Test recipe, runtime, unpackaged |
190 | result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r -u') | 190 | result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r -u') |
191 | files = splitoutput(result.output) | 191 | files = splitoutput(result.output) |
192 | self.assertIn('libz-dbg', files.keys(), "listed pkgs. files: %s" %result.output) | 192 | self.assertIn('libz-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output) |
193 | self.assertIn('libz-doc', files.keys(), "listed pkgs. files: %s" %result.output) | 193 | self.assertIn('libz-doc', list(files.keys()), "listed pkgs. files: %s" %result.output) |
194 | self.assertIn('libz-dev', files.keys(), "listed pkgs. files: %s" %result.output) | 194 | self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
195 | self.assertIn('libz-staticdev', files.keys(), "listed pkgs. files: %s" %result.output) | 195 | self.assertIn('libz-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output) |
196 | self.assertIn('libz1', files.keys(), "listed pkgs. files: %s" %result.output) | 196 | self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output) |
197 | self.assertIn('libz-locale', files.keys(), "listed pkgs. files: %s" %result.output) # this is the key one | 197 | self.assertIn('libz-locale', list(files.keys()), "listed pkgs. files: %s" %result.output) # this is the key one |
198 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev']) | 198 | self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev']) |
199 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc']) | 199 | self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc']) |
200 | self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev']) | 200 | self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev']) |
diff --git a/meta/lib/oeqa/selftest/recipetool.py b/meta/lib/oeqa/selftest/recipetool.py index e72911b0aa..a93d18e275 100644 --- a/meta/lib/oeqa/selftest/recipetool.py +++ b/meta/lib/oeqa/selftest/recipetool.py | |||
@@ -1,7 +1,7 @@ | |||
1 | import os | 1 | import os |
2 | import logging | 2 | import logging |
3 | import tempfile | 3 | import tempfile |
4 | import urlparse | 4 | import urllib.parse |
5 | 5 | ||
6 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer | 6 | from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer |
7 | from oeqa.utils.decorators import testcase | 7 | from oeqa.utils.decorators import testcase |
@@ -471,7 +471,7 @@ class RecipetoolAppendsrcBase(RecipetoolBase): | |||
471 | '''Return the first file:// in SRC_URI for the specified recipe.''' | 471 | '''Return the first file:// in SRC_URI for the specified recipe.''' |
472 | src_uri = get_bb_var('SRC_URI', recipe).split() | 472 | src_uri = get_bb_var('SRC_URI', recipe).split() |
473 | for uri in src_uri: | 473 | for uri in src_uri: |
474 | p = urlparse.urlparse(uri) | 474 | p = urllib.parse.urlparse(uri) |
475 | if p.scheme == 'file': | 475 | if p.scheme == 'file': |
476 | return p.netloc + p.path | 476 | return p.netloc + p.path |
477 | 477 | ||
diff --git a/meta/lib/oeqa/selftest/sstatetests.py b/meta/lib/oeqa/selftest/sstatetests.py index a1e5d33580..cc64c6cb68 100644 --- a/meta/lib/oeqa/selftest/sstatetests.py +++ b/meta/lib/oeqa/selftest/sstatetests.py | |||
@@ -445,14 +445,14 @@ http_proxy = "http://example.com/" | |||
445 | files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/") | 445 | files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/") |
446 | files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/") | 446 | files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/") |
447 | # Remove items that are identical in both sets | 447 | # Remove items that are identical in both sets |
448 | for k,v in files1.viewitems() & files2.viewitems(): | 448 | for k,v in files1.items() & files2.items(): |
449 | del files1[k] | 449 | del files1[k] |
450 | del files2[k] | 450 | del files2[k] |
451 | if not files1 and not files2: | 451 | if not files1 and not files2: |
452 | # No changes, so we're done | 452 | # No changes, so we're done |
453 | return | 453 | return |
454 | 454 | ||
455 | for k in files1.viewkeys() | files2.viewkeys(): | 455 | for k in files1.keys() | files2.keys(): |
456 | if k in files1 and k in files2: | 456 | if k in files1 and k in files2: |
457 | print("%s differs:" % k) | 457 | print("%s differs:" % k) |
458 | print(subprocess.check_output(("bitbake-diffsigs", | 458 | print(subprocess.check_output(("bitbake-diffsigs", |
diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py index 5422a617c4..1c57efaaef 100644 --- a/meta/lib/oeqa/targetcontrol.py +++ b/meta/lib/oeqa/targetcontrol.py | |||
@@ -43,9 +43,7 @@ def get_target_controller(d): | |||
43 | return controller(d) | 43 | return controller(d) |
44 | 44 | ||
45 | 45 | ||
46 | class BaseTarget(object): | 46 | class BaseTarget(object, metaclass=ABCMeta): |
47 | |||
48 | __metaclass__ = ABCMeta | ||
49 | 47 | ||
50 | supported_image_fstypes = [] | 48 | supported_image_fstypes = [] |
51 | 49 | ||
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py index 48f6441290..18fe39ecfe 100644 --- a/meta/lib/oeqa/utils/commands.py +++ b/meta/lib/oeqa/utils/commands.py | |||
@@ -41,7 +41,7 @@ class Command(object): | |||
41 | self.data = data | 41 | self.data = data |
42 | 42 | ||
43 | self.options = dict(self.defaultopts) | 43 | self.options = dict(self.defaultopts) |
44 | if isinstance(self.cmd, basestring): | 44 | if isinstance(self.cmd, str): |
45 | self.options["shell"] = True | 45 | self.options["shell"] = True |
46 | if self.data: | 46 | if self.data: |
47 | self.options['stdin'] = subprocess.PIPE | 47 | self.options['stdin'] = subprocess.PIPE |
@@ -78,7 +78,7 @@ class Command(object): | |||
78 | self.process.kill() | 78 | self.process.kill() |
79 | self.thread.join() | 79 | self.thread.join() |
80 | 80 | ||
81 | self.output = self.output.rstrip() | 81 | self.output = self.output.decode("utf-8").rstrip() |
82 | self.status = self.process.poll() | 82 | self.status = self.process.poll() |
83 | 83 | ||
84 | self.log.debug("Command '%s' returned %d as exit code." % (self.cmd, self.status)) | 84 | self.log.debug("Command '%s' returned %d as exit code." % (self.cmd, self.status)) |
@@ -123,7 +123,7 @@ def bitbake(command, ignore_status=False, timeout=None, postconfig=None, **optio | |||
123 | else: | 123 | else: |
124 | extra_args = "" | 124 | extra_args = "" |
125 | 125 | ||
126 | if isinstance(command, basestring): | 126 | if isinstance(command, str): |
127 | cmd = "bitbake " + extra_args + " " + command | 127 | cmd = "bitbake " + extra_args + " " + command |
128 | else: | 128 | else: |
129 | cmd = [ "bitbake" ] + [a for a in (command + extra_args.split(" ")) if a not in [""]] | 129 | cmd = [ "bitbake" ] + [a for a in (command + extra_args.split(" ")) if a not in [""]] |
diff --git a/meta/lib/oeqa/utils/decorators.py b/meta/lib/oeqa/utils/decorators.py index d52f326f1a..0b23565485 100644 --- a/meta/lib/oeqa/utils/decorators.py +++ b/meta/lib/oeqa/utils/decorators.py | |||
@@ -115,6 +115,8 @@ class NoParsingFilter(logging.Filter): | |||
115 | def filter(self, record): | 115 | def filter(self, record): |
116 | return record.levelno == 100 | 116 | return record.levelno == 100 |
117 | 117 | ||
118 | import inspect | ||
119 | |||
118 | def LogResults(original_class): | 120 | def LogResults(original_class): |
119 | orig_method = original_class.run | 121 | orig_method = original_class.run |
120 | 122 | ||
@@ -124,6 +126,19 @@ def LogResults(original_class): | |||
124 | logfile = os.path.join(os.getcwd(),'results-'+caller+'.'+timestamp+'.log') | 126 | logfile = os.path.join(os.getcwd(),'results-'+caller+'.'+timestamp+'.log') |
125 | linkfile = os.path.join(os.getcwd(),'results-'+caller+'.log') | 127 | linkfile = os.path.join(os.getcwd(),'results-'+caller+'.log') |
126 | 128 | ||
129 | def get_class_that_defined_method(meth): | ||
130 | if inspect.ismethod(meth): | ||
131 | for cls in inspect.getmro(meth.__self__.__class__): | ||
132 | if cls.__dict__.get(meth.__name__) is meth: | ||
133 | return cls | ||
134 | meth = meth.__func__ # fallback to __qualname__ parsing | ||
135 | if inspect.isfunction(meth): | ||
136 | cls = getattr(inspect.getmodule(meth), | ||
137 | meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0]) | ||
138 | if isinstance(cls, type): | ||
139 | return cls | ||
140 | return None | ||
141 | |||
127 | #rewrite the run method of unittest.TestCase to add testcase logging | 142 | #rewrite the run method of unittest.TestCase to add testcase logging |
128 | def run(self, result, *args, **kws): | 143 | def run(self, result, *args, **kws): |
129 | orig_method(self, result, *args, **kws) | 144 | orig_method(self, result, *args, **kws) |
@@ -135,7 +150,7 @@ def LogResults(original_class): | |||
135 | except AttributeError: | 150 | except AttributeError: |
136 | test_case = self._testMethodName | 151 | test_case = self._testMethodName |
137 | 152 | ||
138 | class_name = str(testMethod.im_class).split("'")[1] | 153 | class_name = str(get_class_that_defined_method(testMethod)).split("'")[1] |
139 | 154 | ||
140 | #create custom logging level for filtering. | 155 | #create custom logging level for filtering. |
141 | custom_log_level = 100 | 156 | custom_log_level = 100 |
@@ -215,7 +230,7 @@ def tag(*args, **kwargs): | |||
215 | def wrap_ob(ob): | 230 | def wrap_ob(ob): |
216 | for name in args: | 231 | for name in args: |
217 | setattr(ob, __tag_prefix + name, True) | 232 | setattr(ob, __tag_prefix + name, True) |
218 | for name, value in kwargs.iteritems(): | 233 | for name, value in kwargs.items(): |
219 | setattr(ob, __tag_prefix + name, value) | 234 | setattr(ob, __tag_prefix + name, value) |
220 | return ob | 235 | return ob |
221 | return wrap_ob | 236 | return wrap_ob |
diff --git a/meta/lib/oeqa/utils/dump.py b/meta/lib/oeqa/utils/dump.py index 63a591d366..71422a9aea 100644 --- a/meta/lib/oeqa/utils/dump.py +++ b/meta/lib/oeqa/utils/dump.py | |||
@@ -3,7 +3,7 @@ import sys | |||
3 | import errno | 3 | import errno |
4 | import datetime | 4 | import datetime |
5 | import itertools | 5 | import itertools |
6 | from commands import runCmd | 6 | from .commands import runCmd |
7 | 7 | ||
8 | def get_host_dumper(d): | 8 | def get_host_dumper(d): |
9 | cmds = d.getVar("testimage_dump_host", True) | 9 | cmds = d.getVar("testimage_dump_host", True) |
diff --git a/meta/lib/oeqa/utils/httpserver.py b/meta/lib/oeqa/utils/httpserver.py index 76518d8ef9..bd76f36468 100644 --- a/meta/lib/oeqa/utils/httpserver.py +++ b/meta/lib/oeqa/utils/httpserver.py | |||
@@ -1,8 +1,8 @@ | |||
1 | import SimpleHTTPServer | 1 | import http.server |
2 | import multiprocessing | 2 | import multiprocessing |
3 | import os | 3 | import os |
4 | 4 | ||
5 | class HTTPServer(SimpleHTTPServer.BaseHTTPServer.HTTPServer): | 5 | class HTTPServer(http.server.HTTPServer): |
6 | 6 | ||
7 | def server_start(self, root_dir): | 7 | def server_start(self, root_dir): |
8 | import signal | 8 | import signal |
@@ -10,7 +10,7 @@ class HTTPServer(SimpleHTTPServer.BaseHTTPServer.HTTPServer): | |||
10 | os.chdir(root_dir) | 10 | os.chdir(root_dir) |
11 | self.serve_forever() | 11 | self.serve_forever() |
12 | 12 | ||
13 | class HTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): | 13 | class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler): |
14 | 14 | ||
15 | def log_message(self, format_str, *args): | 15 | def log_message(self, format_str, *args): |
16 | pass | 16 | pass |
diff --git a/meta/lib/oeqa/utils/logparser.py b/meta/lib/oeqa/utils/logparser.py index 87b50354cd..b377dcd271 100644 --- a/meta/lib/oeqa/utils/logparser.py +++ b/meta/lib/oeqa/utils/logparser.py | |||
@@ -3,7 +3,7 @@ | |||
3 | import sys | 3 | import sys |
4 | import os | 4 | import os |
5 | import re | 5 | import re |
6 | import ftools | 6 | from . import ftools |
7 | 7 | ||
8 | 8 | ||
9 | # A parser that can be used to identify weather a line is a test result or a section statement. | 9 | # A parser that can be used to identify weather a line is a test result or a section statement. |
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py index 4bede3421c..773cf588b1 100644 --- a/meta/lib/oeqa/utils/qemurunner.py +++ b/meta/lib/oeqa/utils/qemurunner.py | |||
@@ -22,9 +22,9 @@ import logging | |||
22 | logger = logging.getLogger("BitBake.QemuRunner") | 22 | logger = logging.getLogger("BitBake.QemuRunner") |
23 | 23 | ||
24 | # Get Unicode non printable control chars | 24 | # Get Unicode non printable control chars |
25 | control_range = range(0,32)+range(127,160) | 25 | control_range = list(range(0,32))+list(range(127,160)) |
26 | control_chars = [unichr(x) for x in control_range | 26 | control_chars = [chr(x) for x in control_range |
27 | if unichr(x) not in string.printable] | 27 | if chr(x) not in string.printable] |
28 | re_control_char = re.compile('[%s]' % re.escape("".join(control_chars))) | 28 | re_control_char = re.compile('[%s]' % re.escape("".join(control_chars))) |
29 | 29 | ||
30 | class QemuRunner: | 30 | class QemuRunner: |
@@ -71,7 +71,8 @@ class QemuRunner: | |||
71 | if self.logfile: | 71 | if self.logfile: |
72 | # It is needed to sanitize the data received from qemu | 72 | # It is needed to sanitize the data received from qemu |
73 | # because is possible to have control characters | 73 | # because is possible to have control characters |
74 | msg = re_control_char.sub('', unicode(msg, 'utf-8')) | 74 | msg = msg.decode("utf-8") |
75 | msg = re_control_char.sub('', msg) | ||
75 | with codecs.open(self.logfile, "a", encoding="utf-8") as f: | 76 | with codecs.open(self.logfile, "a", encoding="utf-8") as f: |
76 | f.write("%s" % msg) | 77 | f.write("%s" % msg) |
77 | 78 | ||
@@ -79,7 +80,7 @@ class QemuRunner: | |||
79 | import fcntl | 80 | import fcntl |
80 | fl = fcntl.fcntl(o, fcntl.F_GETFL) | 81 | fl = fcntl.fcntl(o, fcntl.F_GETFL) |
81 | fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK) | 82 | fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK) |
82 | return os.read(o.fileno(), 1000000) | 83 | return os.read(o.fileno(), 1000000).decode("utf-8") |
83 | 84 | ||
84 | 85 | ||
85 | def handleSIGCHLD(self, signum, frame): | 86 | def handleSIGCHLD(self, signum, frame): |
@@ -114,7 +115,7 @@ class QemuRunner: | |||
114 | try: | 115 | try: |
115 | threadsock, threadport = self.create_socket() | 116 | threadsock, threadport = self.create_socket() |
116 | self.server_socket, self.serverport = self.create_socket() | 117 | self.server_socket, self.serverport = self.create_socket() |
117 | except socket.error, msg: | 118 | except socket.error as msg: |
118 | logger.error("Failed to create listening socket: %s" % msg[1]) | 119 | logger.error("Failed to create listening socket: %s" % msg[1]) |
119 | return False | 120 | return False |
120 | 121 | ||
@@ -192,7 +193,7 @@ class QemuRunner: | |||
192 | else: | 193 | else: |
193 | self.ip = ips[0] | 194 | self.ip = ips[0] |
194 | self.server_ip = ips[1] | 195 | self.server_ip = ips[1] |
195 | except IndexError, ValueError: | 196 | except (IndexError, ValueError): |
196 | logger.info("Couldn't get ip from qemu process arguments! Here is the qemu command line used:\n%s\nand output from runqemu:\n%s" % (cmdline, self.getOutput(output))) | 197 | logger.info("Couldn't get ip from qemu process arguments! Here is the qemu command line used:\n%s\nand output from runqemu:\n%s" % (cmdline, self.getOutput(output))) |
197 | self._dump_host() | 198 | self._dump_host() |
198 | self.stop() | 199 | self.stop() |
@@ -219,6 +220,7 @@ class QemuRunner: | |||
219 | stopread = False | 220 | stopread = False |
220 | qemusock = None | 221 | qemusock = None |
221 | bootlog = '' | 222 | bootlog = '' |
223 | data = b'' | ||
222 | while time.time() < endtime and not stopread: | 224 | while time.time() < endtime and not stopread: |
223 | sread, swrite, serror = select.select(socklist, [], [], 5) | 225 | sread, swrite, serror = select.select(socklist, [], [], 5) |
224 | for sock in sread: | 226 | for sock in sread: |
@@ -229,14 +231,19 @@ class QemuRunner: | |||
229 | socklist.remove(self.server_socket) | 231 | socklist.remove(self.server_socket) |
230 | logger.info("Connection from %s:%s" % addr) | 232 | logger.info("Connection from %s:%s" % addr) |
231 | else: | 233 | else: |
232 | data = sock.recv(1024) | 234 | data = data + sock.recv(1024) |
233 | if data: | 235 | if data: |
234 | bootlog += data | 236 | try: |
235 | if re.search(".* login:", bootlog): | 237 | data = data.decode("utf-8") |
236 | self.server_socket = qemusock | 238 | bootlog += data |
237 | stopread = True | 239 | data = b'' |
238 | reachedlogin = True | 240 | if re.search(".* login:", bootlog): |
239 | logger.info("Reached login banner") | 241 | self.server_socket = qemusock |
242 | stopread = True | ||
243 | reachedlogin = True | ||
244 | logger.info("Reached login banner") | ||
245 | except UnicodeDecodeError: | ||
246 | continue | ||
240 | else: | 247 | else: |
241 | socklist.remove(sock) | 248 | socklist.remove(sock) |
242 | sock.close() | 249 | sock.close() |
@@ -277,13 +284,14 @@ class QemuRunner: | |||
277 | if hasattr(self, "origchldhandler"): | 284 | if hasattr(self, "origchldhandler"): |
278 | signal.signal(signal.SIGCHLD, self.origchldhandler) | 285 | signal.signal(signal.SIGCHLD, self.origchldhandler) |
279 | if self.runqemu: | 286 | if self.runqemu: |
280 | os.kill(self.monitorpid, signal.SIGKILL) | 287 | if hasattr(self, "monitorpid"): |
281 | logger.info("Sending SIGTERM to runqemu") | 288 | os.kill(self.monitorpid, signal.SIGKILL) |
282 | try: | 289 | logger.info("Sending SIGTERM to runqemu") |
283 | os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) | 290 | try: |
284 | except OSError as e: | 291 | os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) |
285 | if e.errno != errno.ESRCH: | 292 | except OSError as e: |
286 | raise | 293 | if e.errno != errno.ESRCH: |
294 | raise | ||
287 | endtime = time.time() + self.runqemutime | 295 | endtime = time.time() + self.runqemutime |
288 | while self.runqemu.poll() is None and time.time() < endtime: | 296 | while self.runqemu.poll() is None and time.time() < endtime: |
289 | time.sleep(1) | 297 | time.sleep(1) |
@@ -325,7 +333,7 @@ class QemuRunner: | |||
325 | # Walk the process tree from the process specified looking for a qemu-system. Return its [pid'cmd] | 333 | # Walk the process tree from the process specified looking for a qemu-system. Return its [pid'cmd] |
326 | # | 334 | # |
327 | ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,command'], stdout=subprocess.PIPE).communicate()[0] | 335 | ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,command'], stdout=subprocess.PIPE).communicate()[0] |
328 | processes = ps.split('\n') | 336 | processes = ps.decode("utf-8").split('\n') |
329 | nfields = len(processes[0].split()) - 1 | 337 | nfields = len(processes[0].split()) - 1 |
330 | pids = {} | 338 | pids = {} |
331 | commands = {} | 339 | commands = {} |
@@ -442,7 +450,7 @@ class LoggingThread(threading.Thread): | |||
442 | def stop(self): | 450 | def stop(self): |
443 | self.logger.info("Stopping logging thread") | 451 | self.logger.info("Stopping logging thread") |
444 | if self.running: | 452 | if self.running: |
445 | os.write(self.writepipe, "stop") | 453 | os.write(self.writepipe, bytes("stop", "utf-8")) |
446 | 454 | ||
447 | def teardown(self): | 455 | def teardown(self): |
448 | self.logger.info("Tearing down logging thread") | 456 | self.logger.info("Tearing down logging thread") |
diff --git a/meta/lib/oeqa/utils/qemutinyrunner.py b/meta/lib/oeqa/utils/qemutinyrunner.py index e3d8c669e0..f733258bce 100644 --- a/meta/lib/oeqa/utils/qemutinyrunner.py +++ b/meta/lib/oeqa/utils/qemutinyrunner.py | |||
@@ -13,7 +13,7 @@ import re | |||
13 | import socket | 13 | import socket |
14 | import select | 14 | import select |
15 | import bb | 15 | import bb |
16 | from qemurunner import QemuRunner | 16 | from .qemurunner import QemuRunner |
17 | 17 | ||
18 | class QemuTinyRunner(QemuRunner): | 18 | class QemuTinyRunner(QemuRunner): |
19 | 19 | ||
@@ -50,7 +50,7 @@ class QemuTinyRunner(QemuRunner): | |||
50 | self.server_socket.connect(self.socketfile) | 50 | self.server_socket.connect(self.socketfile) |
51 | bb.note("Created listening socket for qemu serial console.") | 51 | bb.note("Created listening socket for qemu serial console.") |
52 | tries = 0 | 52 | tries = 0 |
53 | except socket.error, msg: | 53 | except socket.error as msg: |
54 | self.server_socket.close() | 54 | self.server_socket.close() |
55 | bb.fatal("Failed to create listening socket.") | 55 | bb.fatal("Failed to create listening socket.") |
56 | tries -= 1 | 56 | tries -= 1 |
@@ -102,7 +102,7 @@ class QemuTinyRunner(QemuRunner): | |||
102 | bb.note("Qemu pid didn't appeared in %s seconds" % self.runqemutime) | 102 | bb.note("Qemu pid didn't appeared in %s seconds" % self.runqemutime) |
103 | output = self.runqemu.stdout | 103 | output = self.runqemu.stdout |
104 | self.stop() | 104 | self.stop() |
105 | bb.note("Output from runqemu:\n%s" % output.read()) | 105 | bb.note("Output from runqemu:\n%s" % output.read().decode("utf-8")) |
106 | return False | 106 | return False |
107 | 107 | ||
108 | return self.is_alive() | 108 | return self.is_alive() |
@@ -131,7 +131,7 @@ class QemuTinyRunner(QemuRunner): | |||
131 | # Walk the process tree from the process specified looking for a qemu-system. Return its [pid'cmd] | 131 | # Walk the process tree from the process specified looking for a qemu-system. Return its [pid'cmd] |
132 | # | 132 | # |
133 | ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,command'], stdout=subprocess.PIPE).communicate()[0] | 133 | ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,command'], stdout=subprocess.PIPE).communicate()[0] |
134 | processes = ps.split('\n') | 134 | processes = ps.decode("utf-8").split('\n') |
135 | nfields = len(processes[0].split()) - 1 | 135 | nfields = len(processes[0].split()) - 1 |
136 | pids = {} | 136 | pids = {} |
137 | commands = {} | 137 | commands = {} |
@@ -167,4 +167,4 @@ class QemuTinyRunner(QemuRunner): | |||
167 | basecmd = commands[p].split()[0] | 167 | basecmd = commands[p].split()[0] |
168 | basecmd = os.path.basename(basecmd) | 168 | basecmd = os.path.basename(basecmd) |
169 | if "qemu-system" in basecmd and "-serial unix" in commands[p]: | 169 | if "qemu-system" in basecmd and "-serial unix" in commands[p]: |
170 | return [int(p),commands[p]] \ No newline at end of file | 170 | return [int(p),commands[p]] |
diff --git a/meta/lib/oeqa/utils/sshcontrol.py b/meta/lib/oeqa/utils/sshcontrol.py index ff88d37bd9..f5d46e03cc 100644 --- a/meta/lib/oeqa/utils/sshcontrol.py +++ b/meta/lib/oeqa/utils/sshcontrol.py | |||
@@ -58,6 +58,7 @@ class SSHProcess(object): | |||
58 | self.process.stdout.close() | 58 | self.process.stdout.close() |
59 | eof = True | 59 | eof = True |
60 | else: | 60 | else: |
61 | data = data.decode("utf-8") | ||
61 | output += data | 62 | output += data |
62 | self.log(data) | 63 | self.log(data) |
63 | endtime = time.time() + timeout | 64 | endtime = time.time() + timeout |
diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py index f850d78df1..d538f6b65f 100644 --- a/meta/lib/oeqa/utils/targetbuild.py +++ b/meta/lib/oeqa/utils/targetbuild.py | |||
@@ -10,9 +10,7 @@ import bb.utils | |||
10 | import subprocess | 10 | import subprocess |
11 | from abc import ABCMeta, abstractmethod | 11 | from abc import ABCMeta, abstractmethod |
12 | 12 | ||
13 | class BuildProject(): | 13 | class BuildProject(metaclass=ABCMeta): |
14 | |||
15 | __metaclass__ = ABCMeta | ||
16 | 14 | ||
17 | def __init__(self, d, uri, foldername=None, tmpdir="/tmp/"): | 15 | def __init__(self, d, uri, foldername=None, tmpdir="/tmp/"): |
18 | self.d = d | 16 | self.d = d |
diff --git a/meta/lib/oeqa/utils/testexport.py b/meta/lib/oeqa/utils/testexport.py index 4fbf4bdcb3..57be2ca449 100644 --- a/meta/lib/oeqa/utils/testexport.py +++ b/meta/lib/oeqa/utils/testexport.py | |||
@@ -6,7 +6,7 @@ | |||
6 | 6 | ||
7 | import os, re, glob as g, shutil as sh,sys | 7 | import os, re, glob as g, shutil as sh,sys |
8 | from time import sleep | 8 | from time import sleep |
9 | from commands import runCmd | 9 | from .commands import runCmd |
10 | from difflib import SequenceMatcher as SM | 10 | from difflib import SequenceMatcher as SM |
11 | 11 | ||
12 | try: | 12 | try: |
diff --git a/meta/recipes-devtools/apt/apt-native.inc b/meta/recipes-devtools/apt/apt-native.inc index 27cc9ff0b5..74db4a0a13 100644 --- a/meta/recipes-devtools/apt/apt-native.inc +++ b/meta/recipes-devtools/apt/apt-native.inc | |||
@@ -30,7 +30,7 @@ python do_install_config () { | |||
30 | 30 | ||
31 | outpath = oe.path.join(outdir, 'apt.conf.sample') | 31 | outpath = oe.path.join(outdir, 'apt.conf.sample') |
32 | if not os.path.exists(outpath): | 32 | if not os.path.exists(outpath): |
33 | outfile = file(outpath, 'w') | 33 | outfile = open(outpath, 'w') |
34 | outfile.write(data) | 34 | outfile.write(data) |
35 | outfile.close() | 35 | outfile.close() |
36 | } | 36 | } |
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc b/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc index 30307bc86d..fda3c93972 100644 --- a/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc +++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc | |||
@@ -168,9 +168,10 @@ python populate_packages_prepend() { | |||
168 | } | 168 | } |
169 | p = subprocess.Popen(args="pkg-config --variable=%s xorg-server" % abis[name], | 169 | p = subprocess.Popen(args="pkg-config --variable=%s xorg-server" % abis[name], |
170 | shell=True, env=newenv, stdout=subprocess.PIPE) | 170 | shell=True, env=newenv, stdout=subprocess.PIPE) |
171 | output = p.communicate()[0] | 171 | stdout, stderr = p.communicate() |
172 | output = stdout.decode("latin-1").split(".")[0] | ||
172 | mlprefix = d.getVar('MLPREFIX', True) or '' | 173 | mlprefix = d.getVar('MLPREFIX', True) or '' |
173 | return "%sxorg-abi-%s-%s" % (mlprefix, name, output.split(".")[0]) | 174 | return "%sxorg-abi-%s-%s" % (mlprefix, name, output) |
174 | 175 | ||
175 | pn = d.getVar("PN", True) | 176 | pn = d.getVar("PN", True) |
176 | d.appendVar("RPROVIDES_" + pn, " " + get_abi("input")) | 177 | d.appendVar("RPROVIDES_" + pn, " " + get_abi("input")) |