diff options
23 files changed, 51 insertions, 51 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index e42c419372..66efe7d54b 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass | |||
@@ -99,7 +99,7 @@ def get_bb_inc(d): | |||
99 | licenses = get_licenses(d) | 99 | licenses = get_licenses(d) |
100 | script_logs = os.path.join(work_dir, 'script-logs/'+ target_sys + '/' + licenses + '/' + pf + '/script-logs') | 100 | script_logs = os.path.join(work_dir, 'script-logs/'+ target_sys + '/' + licenses + '/' + pf + '/script-logs') |
101 | bb_inc = os.path.join(script_logs, 'bb_inc') | 101 | bb_inc = os.path.join(script_logs, 'bb_inc') |
102 | bb.mkdirhier(bb_inc) | 102 | bb.utils.mkdirhier(bb_inc) |
103 | 103 | ||
104 | def find_file(dir, file): | 104 | def find_file(dir, file): |
105 | for root, dirs, files in os.walk(dir): | 105 | for root, dirs, files in os.walk(dir): |
@@ -139,7 +139,7 @@ def get_logs(d): | |||
139 | script_logs = os.path.join(work_dir, 'script-logs/'+ target_sys + '/' + licenses + '/' + pf + '/script-logs') | 139 | script_logs = os.path.join(work_dir, 'script-logs/'+ target_sys + '/' + licenses + '/' + pf + '/script-logs') |
140 | 140 | ||
141 | try: | 141 | try: |
142 | bb.mkdirhier(os.path.join(script_logs, 'temp')) | 142 | bb.utils.mkdirhier(os.path.join(script_logs, 'temp')) |
143 | oe.path.copytree(os.path.join(work_dir, 'temp'), os.path.join(script_logs, 'temp')) | 143 | oe.path.copytree(os.path.join(work_dir, 'temp'), os.path.join(script_logs, 'temp')) |
144 | except (IOError, AttributeError): | 144 | except (IOError, AttributeError): |
145 | pass | 145 | pass |
@@ -158,7 +158,7 @@ def get_series(d): | |||
158 | s = d.getVar('S', True) | 158 | s = d.getVar('S', True) |
159 | dest = os.path.join(work_dir, pf + '-series') | 159 | dest = os.path.join(work_dir, pf + '-series') |
160 | shutil.rmtree(dest, ignore_errors=True) | 160 | shutil.rmtree(dest, ignore_errors=True) |
161 | bb.mkdirhier(dest) | 161 | bb.utils.mkdirhier(dest) |
162 | 162 | ||
163 | src_uri = d.getVar('SRC_URI', True).split() | 163 | src_uri = d.getVar('SRC_URI', True).split() |
164 | fetch = bb.fetch2.Fetch(src_uri, d) | 164 | fetch = bb.fetch2.Fetch(src_uri, d) |
@@ -175,7 +175,7 @@ def get_series(d): | |||
175 | shutil.copy(patch, dest) | 175 | shutil.copy(patch, dest) |
176 | except IOError: | 176 | except IOError: |
177 | if os.path.isdir(patch): | 177 | if os.path.isdir(patch): |
178 | bb.mkdirhier(os.path.join(dest, patch)) | 178 | bb.utils.mkdirhier(os.path.join(dest, patch)) |
179 | oe.path.copytree(patch, os.path.join(dest, patch)) | 179 | oe.path.copytree(patch, os.path.join(dest, patch)) |
180 | return dest | 180 | return dest |
181 | 181 | ||
@@ -190,11 +190,11 @@ def get_applying_patches(d): | |||
190 | work_dir = d.getVar('WORKDIR', True) | 190 | work_dir = d.getVar('WORKDIR', True) |
191 | dest = os.path.join(work_dir, pf + '-patches') | 191 | dest = os.path.join(work_dir, pf + '-patches') |
192 | shutil.rmtree(dest, ignore_errors=True) | 192 | shutil.rmtree(dest, ignore_errors=True) |
193 | bb.mkdirhier(dest) | 193 | bb.utils.mkdirhier(dest) |
194 | 194 | ||
195 | patches = src_patches(d) | 195 | patches = src_patches(d) |
196 | for patch in patches: | 196 | for patch in patches: |
197 | _, _, local, _, _, parm = bb.decodeurl(patch) | 197 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) |
198 | if local: | 198 | if local: |
199 | shutil.copy(local, dest) | 199 | shutil.copy(local, dest) |
200 | return dest | 200 | return dest |
@@ -357,7 +357,7 @@ def move_tarball_deploy(d, tarball_list): | |||
357 | work_dir = d.getVar('WORKDIR', True) | 357 | work_dir = d.getVar('WORKDIR', True) |
358 | tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | 358 | tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf |
359 | if not os.path.exists(tar_sources): | 359 | if not os.path.exists(tar_sources): |
360 | bb.mkdirhier(tar_sources) | 360 | bb.utils.mkdirhier(tar_sources) |
361 | for source in tarball_list: | 361 | for source in tarball_list: |
362 | if source: | 362 | if source: |
363 | if os.path.exists(os.path.join(tar_sources, source)): | 363 | if os.path.exists(os.path.join(tar_sources, source)): |
@@ -459,7 +459,7 @@ def dumpdata(d): | |||
459 | licenses = get_licenses(d) | 459 | licenses = get_licenses(d) |
460 | dumpdir = os.path.join(workdir, 'diffgz-envdata/'+ target_sys + '/' + licenses + '/' + pf ) | 460 | dumpdir = os.path.join(workdir, 'diffgz-envdata/'+ target_sys + '/' + licenses + '/' + pf ) |
461 | if not os.path.exists(dumpdir): | 461 | if not os.path.exists(dumpdir): |
462 | bb.mkdirhier(dumpdir) | 462 | bb.utils.mkdirhier(dumpdir) |
463 | 463 | ||
464 | dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump", d)) | 464 | dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump", d)) |
465 | 465 | ||
@@ -499,7 +499,7 @@ def create_diff_gz(d): | |||
499 | distro = d.getVar('DISTRO',True) or "" | 499 | distro = d.getVar('DISTRO',True) or "" |
500 | dest = s + '/' + distro + '/files' | 500 | dest = s + '/' + distro + '/files' |
501 | if not os.path.exists(dest): | 501 | if not os.path.exists(dest): |
502 | bb.mkdirhier(dest) | 502 | bb.utils.mkdirhier(dest) |
503 | for i in os.listdir(os.getcwd()): | 503 | for i in os.listdir(os.getcwd()): |
504 | if os.path.isfile(i): | 504 | if os.path.isfile(i): |
505 | try: | 505 | try: |
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index 98b823e7eb..dfa580c583 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -573,7 +573,7 @@ python () { | |||
573 | d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot') | 573 | d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot') |
574 | 574 | ||
575 | # *.xz should depends on xz-native for unpacking | 575 | # *.xz should depends on xz-native for unpacking |
576 | # Not endswith because of "*.patch.xz;patch=1". Need bb.decodeurl in future | 576 | # Not endswith because of "*.patch.xz;patch=1". Need bb.fetch.decodeurl in future |
577 | if '.xz' in srcuri: | 577 | if '.xz' in srcuri: |
578 | d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | 578 | d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') |
579 | 579 | ||
diff --git a/meta/classes/buildstats.bbclass b/meta/classes/buildstats.bbclass index b98ba3bea7..72fff1167f 100644 --- a/meta/classes/buildstats.bbclass +++ b/meta/classes/buildstats.bbclass | |||
@@ -175,7 +175,7 @@ python run_buildstats () { | |||
175 | # set the buildname | 175 | # set the buildname |
176 | ######################################################################## | 176 | ######################################################################## |
177 | try: | 177 | try: |
178 | bb.mkdirhier(e.data.getVar('BUILDSTATS_BASE', True)) | 178 | bb.utils.mkdirhier(e.data.getVar('BUILDSTATS_BASE', True)) |
179 | except: | 179 | except: |
180 | pass | 180 | pass |
181 | set_bn(e) | 181 | set_bn(e) |
@@ -185,7 +185,7 @@ python run_buildstats () { | |||
185 | 185 | ||
186 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) | 186 | bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn) |
187 | try: | 187 | try: |
188 | bb.mkdirhier(bsdir) | 188 | bb.utils.mkdirhier(bsdir) |
189 | except: | 189 | except: |
190 | pass | 190 | pass |
191 | if device != "NoLogicalDevice": | 191 | if device != "NoLogicalDevice": |
@@ -236,7 +236,7 @@ python run_buildstats () { | |||
236 | set_diskdata("__diskdata_task", device, e.data) | 236 | set_diskdata("__diskdata_task", device, e.data) |
237 | set_timedata("__timedata_task", e.data) | 237 | set_timedata("__timedata_task", e.data) |
238 | try: | 238 | try: |
239 | bb.mkdirhier(taskdir) | 239 | bb.utils.mkdirhier(taskdir) |
240 | except: | 240 | except: |
241 | pass | 241 | pass |
242 | # write into the task event file the name and start time | 242 | # write into the task event file the name and start time |
diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass index ca3ca43164..2cdce46932 100644 --- a/meta/classes/ccache.bbclass +++ b/meta/classes/ccache.bbclass | |||
@@ -1,4 +1,4 @@ | |||
1 | CCACHE = "${@bb.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" | 1 | CCACHE = "${@bb.utils.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" |
2 | export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" | 2 | export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" |
3 | CCACHE_DISABLE[unexport] = "1" | 3 | CCACHE_DISABLE[unexport] = "1" |
4 | 4 | ||
diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass index 6b30b876f8..32aa7577f0 100644 --- a/meta/classes/copyleft_compliance.bbclass +++ b/meta/classes/copyleft_compliance.bbclass | |||
@@ -33,7 +33,7 @@ python do_prepare_copyleft_sources () { | |||
33 | pf = d.getVar('PF', True) | 33 | pf = d.getVar('PF', True) |
34 | dest = os.path.join(sources_dir, pf) | 34 | dest = os.path.join(sources_dir, pf) |
35 | shutil.rmtree(dest, ignore_errors=True) | 35 | shutil.rmtree(dest, ignore_errors=True) |
36 | bb.mkdirhier(dest) | 36 | bb.utils.mkdirhier(dest) |
37 | 37 | ||
38 | for u in ud.values(): | 38 | for u in ud.values(): |
39 | local = os.path.normpath(fetch.localpath(u.url)) | 39 | local = os.path.normpath(fetch.localpath(u.url)) |
@@ -51,7 +51,7 @@ python do_prepare_copyleft_sources () { | |||
51 | 51 | ||
52 | patches = src_patches(d) | 52 | patches = src_patches(d) |
53 | for patch in patches: | 53 | for patch in patches: |
54 | _, _, local, _, _, parm = bb.decodeurl(patch) | 54 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) |
55 | patchdir = parm.get('patchdir') | 55 | patchdir = parm.get('patchdir') |
56 | if patchdir: | 56 | if patchdir: |
57 | series = os.path.join(dest, 'series.subdir.%s' % patchdir.replace('/', '_')) | 57 | series = os.path.join(dest, 'series.subdir.%s' % patchdir.replace('/', '_')) |
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index aef7973945..085575a041 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -591,7 +591,7 @@ python do_checkpkg() { | |||
591 | pupver = "N/A" | 591 | pupver = "N/A" |
592 | pstatus = "ErrUnknown" | 592 | pstatus = "ErrUnknown" |
593 | 593 | ||
594 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) | 594 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(uri) |
595 | if type in ['http', 'https', 'ftp']: | 595 | if type in ['http', 'https', 'ftp']: |
596 | if d.getVar('PRSPV', True): | 596 | if d.getVar('PRSPV', True): |
597 | pcurver = d.getVar('PRSPV', True) | 597 | pcurver = d.getVar('PRSPV', True) |
@@ -621,7 +621,7 @@ python do_checkpkg() { | |||
621 | dirver = m.group().strip("/") | 621 | dirver = m.group().strip("/") |
622 | 622 | ||
623 | """use new path and remove param. for wget only param is md5sum""" | 623 | """use new path and remove param. for wget only param is md5sum""" |
624 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) | 624 | alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}]) |
625 | my_uri = d.getVar('REGEX_URI', True) | 625 | my_uri = d.getVar('REGEX_URI', True) |
626 | if my_uri: | 626 | if my_uri: |
627 | if d.getVar('PRSPV', True): | 627 | if d.getVar('PRSPV', True): |
@@ -647,7 +647,7 @@ python do_checkpkg() { | |||
647 | 647 | ||
648 | chk_uri = d.getVar('REGEX_URI', True) | 648 | chk_uri = d.getVar('REGEX_URI', True) |
649 | if not chk_uri: | 649 | if not chk_uri: |
650 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) | 650 | alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}]) |
651 | else: | 651 | else: |
652 | alturi = chk_uri | 652 | alturi = chk_uri |
653 | newver = check_new_version(alturi, curname, d) | 653 | newver = check_new_version(alturi, curname, d) |
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass index ea59c36441..84f638c099 100644 --- a/meta/classes/image.bbclass +++ b/meta/classes/image.bbclass | |||
@@ -149,7 +149,7 @@ def get_devtable_list(d): | |||
149 | if devtables == None: | 149 | if devtables == None: |
150 | devtables = 'files/device_table-minimal.txt' | 150 | devtables = 'files/device_table-minimal.txt' |
151 | for devtable in devtables.split(): | 151 | for devtable in devtables.split(): |
152 | str += " %s" % bb.which(d.getVar('BBPATH', True), devtable) | 152 | str += " %s" % bb.utils.which(d.getVar('BBPATH', True), devtable) |
153 | return str | 153 | return str |
154 | 154 | ||
155 | IMAGE_CLASSES ?= "image_types" | 155 | IMAGE_CLASSES ?= "image_types" |
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index aa02985f8d..524cdca244 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
@@ -594,7 +594,7 @@ def package_qa_check_license(workdir, d): | |||
594 | srcdir = d.getVar('S', True) | 594 | srcdir = d.getVar('S', True) |
595 | 595 | ||
596 | for url in lic_files.split(): | 596 | for url in lic_files.split(): |
597 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | 597 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) |
598 | srclicfile = os.path.join(srcdir, path) | 598 | srclicfile = os.path.join(srcdir, path) |
599 | if not os.path.isfile(srclicfile): | 599 | if not os.path.isfile(srclicfile): |
600 | raise bb.build.FuncFailed( pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile) | 600 | raise bb.build.FuncFailed( pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile) |
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass index fcc00e3d47..26d449acd7 100644 --- a/meta/classes/kernel-yocto.bbclass +++ b/meta/classes/kernel-yocto.bbclass | |||
@@ -9,7 +9,7 @@ def find_patches(d): | |||
9 | patches = src_patches(d) | 9 | patches = src_patches(d) |
10 | patch_list=[] | 10 | patch_list=[] |
11 | for p in patches: | 11 | for p in patches: |
12 | _, _, local, _, _, _ = bb.decodeurl(p) | 12 | _, _, local, _, _, _ = bb.fetch.decodeurl(p) |
13 | patch_list.append(local) | 13 | patch_list.append(local) |
14 | 14 | ||
15 | return patch_list | 15 | return patch_list |
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass index 2ca47cc198..621c1b2f55 100644 --- a/meta/classes/license.bbclass +++ b/meta/classes/license.bbclass | |||
@@ -129,9 +129,9 @@ def add_package_and_files(d): | |||
129 | d.setVar('RRECOMMENDS_' + pn, "%s" % (pn_lic)) | 129 | d.setVar('RRECOMMENDS_' + pn, "%s" % (pn_lic)) |
130 | 130 | ||
131 | def copy_license_files(lic_files_paths, destdir): | 131 | def copy_license_files(lic_files_paths, destdir): |
132 | bb.mkdirhier(destdir) | 132 | bb.utils.mkdirhier(destdir) |
133 | for (basename, path) in lic_files_paths: | 133 | for (basename, path) in lic_files_paths: |
134 | ret = bb.copyfile(path, os.path.join(destdir, basename)) | 134 | ret = bb.utils.copyfile(path, os.path.join(destdir, basename)) |
135 | # If the copy didn't occur, something horrible went wrong and we fail out | 135 | # If the copy didn't occur, something horrible went wrong and we fail out |
136 | if not ret: | 136 | if not ret: |
137 | bb.warn("%s could not be copied for some reason. It may not exist. WARN for now." % path) | 137 | bb.warn("%s could not be copied for some reason. It may not exist. WARN for now." % path) |
@@ -188,7 +188,7 @@ def find_license_files(d): | |||
188 | 188 | ||
189 | def find_license(license_type): | 189 | def find_license(license_type): |
190 | try: | 190 | try: |
191 | bb.mkdirhier(gen_lic_dest) | 191 | bb.utils.mkdirhier(gen_lic_dest) |
192 | except: | 192 | except: |
193 | pass | 193 | pass |
194 | spdx_generic = None | 194 | spdx_generic = None |
@@ -227,7 +227,7 @@ def find_license_files(d): | |||
227 | return lic_files_paths | 227 | return lic_files_paths |
228 | 228 | ||
229 | for url in lic_files.split(): | 229 | for url in lic_files.split(): |
230 | (type, host, path, user, pswd, parm) = bb.decodeurl(url) | 230 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) |
231 | # We want the license filename and path | 231 | # We want the license filename and path |
232 | srclicfile = os.path.join(srcdir, path) | 232 | srclicfile = os.path.join(srcdir, path) |
233 | lic_files_paths.append((os.path.basename(path), srclicfile)) | 233 | lic_files_paths.append((os.path.basename(path), srclicfile)) |
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index 068c11a535..765e894526 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass | |||
@@ -220,7 +220,7 @@ python do_package_deb () { | |||
220 | basedir = os.path.join(os.path.dirname(root)) | 220 | basedir = os.path.join(os.path.dirname(root)) |
221 | 221 | ||
222 | pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True)) | 222 | pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True)) |
223 | bb.mkdirhier(pkgoutdir) | 223 | bb.utils.mkdirhier(pkgoutdir) |
224 | 224 | ||
225 | os.chdir(root) | 225 | os.chdir(root) |
226 | from glob import glob | 226 | from glob import glob |
@@ -236,7 +236,7 @@ python do_package_deb () { | |||
236 | continue | 236 | continue |
237 | 237 | ||
238 | controldir = os.path.join(root, 'DEBIAN') | 238 | controldir = os.path.join(root, 'DEBIAN') |
239 | bb.mkdirhier(controldir) | 239 | bb.utils.mkdirhier(controldir) |
240 | os.chmod(controldir, 0755) | 240 | os.chmod(controldir, 0755) |
241 | try: | 241 | try: |
242 | ctrlfile = open(os.path.join(controldir, 'control'), 'w') | 242 | ctrlfile = open(os.path.join(controldir, 'control'), 'w') |
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index 68ffc62583..a633cfcc76 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass | |||
@@ -253,7 +253,7 @@ python do_package_ipk () { | |||
253 | basedir = os.path.join(os.path.dirname(root)) | 253 | basedir = os.path.join(os.path.dirname(root)) |
254 | arch = localdata.getVar('PACKAGE_ARCH', True) | 254 | arch = localdata.getVar('PACKAGE_ARCH', True) |
255 | pkgoutdir = "%s/%s" % (outdir, arch) | 255 | pkgoutdir = "%s/%s" % (outdir, arch) |
256 | bb.mkdirhier(pkgoutdir) | 256 | bb.utils.mkdirhier(pkgoutdir) |
257 | os.chdir(root) | 257 | os.chdir(root) |
258 | from glob import glob | 258 | from glob import glob |
259 | g = glob('*') | 259 | g = glob('*') |
@@ -268,7 +268,7 @@ python do_package_ipk () { | |||
268 | continue | 268 | continue |
269 | 269 | ||
270 | controldir = os.path.join(root, 'CONTROL') | 270 | controldir = os.path.join(root, 'CONTROL') |
271 | bb.mkdirhier(controldir) | 271 | bb.utils.mkdirhier(controldir) |
272 | try: | 272 | try: |
273 | ctrlfile = open(os.path.join(controldir, 'control'), 'w') | 273 | ctrlfile = open(os.path.join(controldir, 'control'), 'w') |
274 | except OSError: | 274 | except OSError: |
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index b29d3edb2f..cc77c5ea35 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass | |||
@@ -1069,7 +1069,7 @@ python do_package_rpm () { | |||
1069 | clean_licenses = get_licenses(d) | 1069 | clean_licenses = get_licenses(d) |
1070 | pkgwritesrpmdir = bb.data.expand('${PKGWRITEDIRSRPM}/${PACKAGE_ARCH_EXTEND}', d) | 1070 | pkgwritesrpmdir = bb.data.expand('${PKGWRITEDIRSRPM}/${PACKAGE_ARCH_EXTEND}', d) |
1071 | pkgwritesrpmdir = pkgwritesrpmdir + '/' + clean_licenses | 1071 | pkgwritesrpmdir = pkgwritesrpmdir + '/' + clean_licenses |
1072 | bb.mkdirhier(pkgwritesrpmdir) | 1072 | bb.utils.mkdirhier(pkgwritesrpmdir) |
1073 | os.chmod(pkgwritesrpmdir, 0755) | 1073 | os.chmod(pkgwritesrpmdir, 0755) |
1074 | return pkgwritesrpmdir | 1074 | return pkgwritesrpmdir |
1075 | 1075 | ||
@@ -1123,7 +1123,7 @@ python do_package_rpm () { | |||
1123 | pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}') | 1123 | pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}') |
1124 | pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}') | 1124 | pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}') |
1125 | magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc') | 1125 | magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc') |
1126 | bb.mkdirhier(pkgwritedir) | 1126 | bb.utils.mkdirhier(pkgwritedir) |
1127 | os.chmod(pkgwritedir, 0755) | 1127 | os.chmod(pkgwritedir, 0755) |
1128 | 1128 | ||
1129 | cmd = rpmbuild | 1129 | cmd = rpmbuild |
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass index ed12802491..86c65b3b8d 100644 --- a/meta/classes/patch.bbclass +++ b/meta/classes/patch.bbclass | |||
@@ -44,7 +44,7 @@ def src_patches(d, all = False ): | |||
44 | if patchdir: | 44 | if patchdir: |
45 | patchparm['patchdir'] = patchdir | 45 | patchparm['patchdir'] = patchdir |
46 | 46 | ||
47 | localurl = bb.encodeurl(('file', '', local, '', '', patchparm)) | 47 | localurl = bb.fetch.encodeurl(('file', '', local, '', '', patchparm)) |
48 | patches.append(localurl) | 48 | patches.append(localurl) |
49 | 49 | ||
50 | if all: | 50 | if all: |
@@ -147,7 +147,7 @@ python patch_do_patch() { | |||
147 | os.environ['TMPDIR'] = process_tmpdir | 147 | os.environ['TMPDIR'] = process_tmpdir |
148 | 148 | ||
149 | for patch in src_patches(d): | 149 | for patch in src_patches(d): |
150 | _, _, local, _, _, parm = bb.decodeurl(patch) | 150 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) |
151 | 151 | ||
152 | if "patchdir" in parm: | 152 | if "patchdir" in parm: |
153 | patchdir = parm["patchdir"] | 153 | patchdir = parm["patchdir"] |
diff --git a/meta/classes/recipe_sanity.bbclass b/meta/classes/recipe_sanity.bbclass index 2f90159d58..5dd4624f40 100644 --- a/meta/classes/recipe_sanity.bbclass +++ b/meta/classes/recipe_sanity.bbclass | |||
@@ -73,7 +73,7 @@ def can_delete_FILESPATH(cfgdata, d): | |||
73 | 73 | ||
74 | def can_delete_FILESDIR(cfgdata, d): | 74 | def can_delete_FILESDIR(cfgdata, d): |
75 | expected = cfgdata.get("FILESDIR") | 75 | expected = cfgdata.get("FILESDIR") |
76 | #expected = "${@bb.which(d.getVar('FILESPATH', True), '.')}" | 76 | #expected = "${@bb.utils.which(d.getVar('FILESPATH', True), '.')}" |
77 | unexpanded = d.getVar("FILESDIR", 0) | 77 | unexpanded = d.getVar("FILESDIR", 0) |
78 | if unexpanded is None: | 78 | if unexpanded is None: |
79 | return False | 79 | return False |
diff --git a/meta/classes/spdx.bbclass b/meta/classes/spdx.bbclass index bde6e496ff..55ce3aff4f 100644 --- a/meta/classes/spdx.bbclass +++ b/meta/classes/spdx.bbclass | |||
@@ -45,9 +45,9 @@ python do_spdx () { | |||
45 | cur_ver_code = get_ver_code( info['sourcedir'] ) | 45 | cur_ver_code = get_ver_code( info['sourcedir'] ) |
46 | cache_cur = False | 46 | cache_cur = False |
47 | if not os.path.exists( spdx_sstate_dir ): | 47 | if not os.path.exists( spdx_sstate_dir ): |
48 | bb.mkdirhier( spdx_sstate_dir ) | 48 | bb.utils.mkdirhier( spdx_sstate_dir ) |
49 | if not os.path.exists( info['spdx_temp_dir'] ): | 49 | if not os.path.exists( info['spdx_temp_dir'] ): |
50 | bb.mkdirhier( info['spdx_temp_dir'] ) | 50 | bb.utils.mkdirhier( info['spdx_temp_dir'] ) |
51 | if os.path.exists( sstatefile ): | 51 | if os.path.exists( sstatefile ): |
52 | ## cache for this package exists. read it in | 52 | ## cache for this package exists. read it in |
53 | cached_spdx = get_cached_spdx( sstatefile ) | 53 | cached_spdx = get_cached_spdx( sstatefile ) |
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index c86f393c6f..b088e58d98 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -129,7 +129,7 @@ def sstate_install(ss, d): | |||
129 | 129 | ||
130 | sharedfiles = [] | 130 | sharedfiles = [] |
131 | shareddirs = [] | 131 | shareddirs = [] |
132 | bb.mkdirhier(d.expand("${SSTATE_MANIFESTS}")) | 132 | bb.utils.mkdirhier(d.expand("${SSTATE_MANIFESTS}")) |
133 | 133 | ||
134 | d2 = d.createCopy() | 134 | d2 = d.createCopy() |
135 | extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True) | 135 | extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True) |
@@ -215,7 +215,7 @@ def sstate_installpkg(ss, d): | |||
215 | # remove dir if it exists, ensure any parent directories do exist | 215 | # remove dir if it exists, ensure any parent directories do exist |
216 | if os.path.exists(dir): | 216 | if os.path.exists(dir): |
217 | oe.path.remove(dir) | 217 | oe.path.remove(dir) |
218 | bb.mkdirhier(dir) | 218 | bb.utils.mkdirhier(dir) |
219 | oe.path.remove(dir) | 219 | oe.path.remove(dir) |
220 | 220 | ||
221 | sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['name']) | 221 | sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['name']) |
@@ -281,7 +281,7 @@ def sstate_installpkg(ss, d): | |||
281 | workdir = d.getVar('WORKDIR', True) | 281 | workdir = d.getVar('WORKDIR', True) |
282 | src = sstateinst + "/" + plain.replace(workdir, '') | 282 | src = sstateinst + "/" + plain.replace(workdir, '') |
283 | dest = plain | 283 | dest = plain |
284 | bb.mkdirhier(src) | 284 | bb.utils.mkdirhier(src) |
285 | prepdir(dest) | 285 | prepdir(dest) |
286 | os.rename(src, dest) | 286 | os.rename(src, dest) |
287 | 287 | ||
@@ -456,8 +456,8 @@ def sstate_package(ss, d): | |||
456 | sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['name']) | 456 | sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['name']) |
457 | sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['name'] + ".tgz" | 457 | sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['name'] + ".tgz" |
458 | bb.utils.remove(sstatebuild, recurse=True) | 458 | bb.utils.remove(sstatebuild, recurse=True) |
459 | bb.mkdirhier(sstatebuild) | 459 | bb.utils.mkdirhier(sstatebuild) |
460 | bb.mkdirhier(os.path.dirname(sstatepkg)) | 460 | bb.utils.mkdirhier(os.path.dirname(sstatepkg)) |
461 | for state in ss['dirs']: | 461 | for state in ss['dirs']: |
462 | if not os.path.exists(state[1]): | 462 | if not os.path.exists(state[1]): |
463 | continue | 463 | continue |
@@ -477,8 +477,8 @@ def sstate_package(ss, d): | |||
477 | workdir = d.getVar('WORKDIR', True) | 477 | workdir = d.getVar('WORKDIR', True) |
478 | for plain in ss['plaindirs']: | 478 | for plain in ss['plaindirs']: |
479 | pdir = plain.replace(workdir, sstatebuild) | 479 | pdir = plain.replace(workdir, sstatebuild) |
480 | bb.mkdirhier(plain) | 480 | bb.utils.mkdirhier(plain) |
481 | bb.mkdirhier(pdir) | 481 | bb.utils.mkdirhier(pdir) |
482 | oe.path.copyhardlinktree(plain, pdir) | 482 | oe.path.copyhardlinktree(plain, pdir) |
483 | 483 | ||
484 | d.setVar('SSTATE_BUILDDIR', sstatebuild) | 484 | d.setVar('SSTATE_BUILDDIR', sstatebuild) |
@@ -503,7 +503,7 @@ def pstaging_fetch(sstatefetch, sstatepkg, d): | |||
503 | bb.data.update_data(localdata) | 503 | bb.data.update_data(localdata) |
504 | 504 | ||
505 | dldir = localdata.expand("${SSTATE_DIR}") | 505 | dldir = localdata.expand("${SSTATE_DIR}") |
506 | bb.mkdirhier(dldir) | 506 | bb.utils.mkdirhier(dldir) |
507 | 507 | ||
508 | localdata.delVar('MIRRORS') | 508 | localdata.delVar('MIRRORS') |
509 | localdata.delVar('FILESPATH') | 509 | localdata.delVar('FILESPATH') |
diff --git a/meta/classes/terminal.bbclass b/meta/classes/terminal.bbclass index 591b4acf6d..efbc4eb9ae 100644 --- a/meta/classes/terminal.bbclass +++ b/meta/classes/terminal.bbclass | |||
@@ -20,7 +20,7 @@ def emit_terminal_func(command, envdata, d): | |||
20 | runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" | 20 | runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" |
21 | runfile = runfmt.format(func=cmd_func, task=cmd_func, taskfunc=cmd_func, pid=os.getpid()) | 21 | runfile = runfmt.format(func=cmd_func, task=cmd_func, taskfunc=cmd_func, pid=os.getpid()) |
22 | runfile = os.path.join(d.getVar('T', True), runfile) | 22 | runfile = os.path.join(d.getVar('T', True), runfile) |
23 | bb.mkdirhier(os.path.dirname(runfile)) | 23 | bb.utils.mkdirhier(os.path.dirname(runfile)) |
24 | 24 | ||
25 | with open(runfile, 'w') as script: | 25 | with open(runfile, 'w') as script: |
26 | script.write('#!/bin/sh -e\n') | 26 | script.write('#!/bin/sh -e\n') |
diff --git a/meta/conf/distro/include/tcmode-external-sourcery.inc b/meta/conf/distro/include/tcmode-external-sourcery.inc index d6cac74d4d..5590f7a1e9 100644 --- a/meta/conf/distro/include/tcmode-external-sourcery.inc +++ b/meta/conf/distro/include/tcmode-external-sourcery.inc | |||
@@ -114,7 +114,7 @@ def populate_toolchain_links(d): | |||
114 | bb.fatal("Unable to populate toolchain binary symlinks in %s" % pattern) | 114 | bb.fatal("Unable to populate toolchain binary symlinks in %s" % pattern) |
115 | 115 | ||
116 | bindir = d.getVar('STAGING_BINDIR_TOOLCHAIN', True) | 116 | bindir = d.getVar('STAGING_BINDIR_TOOLCHAIN', True) |
117 | bb.mkdirhier(bindir) | 117 | bb.utils.mkdirhier(bindir) |
118 | for f in files: | 118 | for f in files: |
119 | base = os.path.basename(f) | 119 | base = os.path.basename(f) |
120 | newpath = os.path.join(bindir, base) | 120 | newpath = os.path.join(bindir, base) |
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 244f6c5cf2..59abd0af19 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -331,7 +331,7 @@ class QuiltTree(PatchSet): | |||
331 | patch = self.patches[kwargs["patch"]] | 331 | patch = self.patches[kwargs["patch"]] |
332 | if not patch: | 332 | if not patch: |
333 | raise PatchError("No patch found at index %s in patchset." % kwargs["patch"]) | 333 | raise PatchError("No patch found at index %s in patchset." % kwargs["patch"]) |
334 | (type, host, path, user, pswd, parm) = bb.decodeurl(patch["remote"]) | 334 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(patch["remote"]) |
335 | if type == "file": | 335 | if type == "file": |
336 | import shutil | 336 | import shutil |
337 | if not patch.get("file") and patch.get("remote"): | 337 | if not patch.get("file") and patch.get("remote"): |
diff --git a/meta/recipes-devtools/automake/automake.inc b/meta/recipes-devtools/automake/automake.inc index ae743cc58a..507c062e4a 100644 --- a/meta/recipes-devtools/automake/automake.inc +++ b/meta/recipes-devtools/automake/automake.inc | |||
@@ -13,6 +13,6 @@ do_configure() { | |||
13 | oe_runconf | 13 | oe_runconf |
14 | } | 14 | } |
15 | 15 | ||
16 | export AUTOMAKE = "${@bb.which('automake', d.getVar('PATH', True))}" | 16 | export AUTOMAKE = "${@bb.utils.which('automake', d.getVar('PATH', True))}" |
17 | 17 | ||
18 | FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" | 18 | FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" |
diff --git a/meta/recipes-devtools/gcc/gcc-multilib-config.inc b/meta/recipes-devtools/gcc/gcc-multilib-config.inc index f89aa8ea7f..005aa6b814 100644 --- a/meta/recipes-devtools/gcc/gcc-multilib-config.inc +++ b/meta/recipes-devtools/gcc/gcc-multilib-config.inc | |||
@@ -34,7 +34,7 @@ python gcc_multilib_setup() { | |||
34 | rel_path = os.path.relpath(fn, src_conf_dir) | 34 | rel_path = os.path.relpath(fn, src_conf_dir) |
35 | parent_dir = os.path.dirname(rel_path) | 35 | parent_dir = os.path.dirname(rel_path) |
36 | bb.utils.mkdirhier('%s/%s' % (build_conf_dir, parent_dir)) | 36 | bb.utils.mkdirhier('%s/%s' % (build_conf_dir, parent_dir)) |
37 | bb.copyfile(fn, '%s/%s' % (build_conf_dir, rel_path)) | 37 | bb.utils.copyfile(fn, '%s/%s' % (build_conf_dir, rel_path)) |
38 | 38 | ||
39 | multilibs = (d.getVar('MULTILIB_VARIANTS', True) or '').split() | 39 | multilibs = (d.getVar('MULTILIB_VARIANTS', True) or '').split() |
40 | if not multilibs: | 40 | if not multilibs: |
diff --git a/meta/recipes-support/libpcre/libpcre_8.33.bb b/meta/recipes-support/libpcre/libpcre_8.33.bb index 50930b42bd..caf877ff80 100644 --- a/meta/recipes-support/libpcre/libpcre_8.33.bb +++ b/meta/recipes-support/libpcre/libpcre_8.33.bb | |||
@@ -16,7 +16,7 @@ SRC_URI[sha256sum] = "c603957a4966811c04af5f6048c71cfb4966ec93312d7b3118116ed9f3 | |||
16 | 16 | ||
17 | S = "${WORKDIR}/pcre-${PV}" | 17 | S = "${WORKDIR}/pcre-${PV}" |
18 | 18 | ||
19 | FILESPATH .= ":${@base_set_filespath([bb.which(BBPATH, 'recipes-support/libpcre/files', direction=True)], d)}" | 19 | FILESPATH .= ":${@base_set_filespath([bb.utils.which(BBPATH, 'recipes-support/libpcre/files', direction=True)], d)}" |
20 | 20 | ||
21 | PROVIDES += "pcre" | 21 | PROVIDES += "pcre" |
22 | DEPENDS += "bzip2 zlib" | 22 | DEPENDS += "bzip2 zlib" |