summaryrefslogtreecommitdiffstats
path: root/meta
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-04 13:20:28 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-07 09:06:37 +0000
commit984e90f4d71d866580131c4927b0a77baf1bb9bd (patch)
treeadfe717341c87f2719990a962951492b65c03c1c /meta
parentca7adf75295c2a6041b891bfa61e0b4bc2f7c860 (diff)
downloadpoky-984e90f4d71d866580131c4927b0a77baf1bb9bd.tar.gz
meta/classes: Update classes to use new fetcher API
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta')
-rw-r--r--meta/classes/base.bbclass176
-rw-r--r--meta/classes/patch.bbclass3
-rw-r--r--meta/classes/sstate.bbclass56
-rw-r--r--meta/classes/utils.bbclass11
-rw-r--r--meta/lib/oe/patch.py6
5 files changed, 63 insertions, 189 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index d8efcc0f8c..edb65eb96b 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -116,163 +116,38 @@ addtask setscene before do_fetch
116addtask fetch 116addtask fetch
117do_fetch[dirs] = "${DL_DIR}" 117do_fetch[dirs] = "${DL_DIR}"
118python base_do_fetch() { 118python base_do_fetch() {
119 import sys 119
120 src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split()
121 if len(src_uri) == 0:
122 return
120 123
121 localdata = bb.data.createCopy(d) 124 localdata = bb.data.createCopy(d)
122 bb.data.update_data(localdata) 125 bb.data.update_data(localdata)
123 126
124 src_uri = bb.data.getVar('SRC_URI', localdata, 1) 127 try:
125 if not src_uri: 128 fetcher = bb.fetch2.Fetch(src_uri, localdata)
126 return 1 129 fetcher.download()
127 130 except bb.fetch2.BBFetchException, e:
128 try: 131 raise bb.build.FuncFailed(e)
129 bb.fetch.init(src_uri.split(),d)
130 except bb.fetch.NoMethodError:
131 (type, value, traceback) = sys.exc_info()
132 raise bb.build.FuncFailed("No method: %s" % value)
133 except bb.MalformedUrl:
134 (type, value, traceback) = sys.exc_info()
135 raise bb.build.FuncFailed("Malformed URL: %s" % value)
136
137 try:
138 if bb.fetch.__version__ == "1":
139 bb.fetch.go(localdata)
140 else:
141 bb.fetch.download(localdata)
142 except bb.fetch.MissingParameterError:
143 (type, value, traceback) = sys.exc_info()
144 raise bb.build.FuncFailed("Missing parameters: %s" % value)
145 except bb.fetch.FetchError:
146 (type, value, traceback) = sys.exc_info()
147 raise bb.build.FuncFailed("Fetch failed: %s" % value)
148 except bb.fetch.MD5SumError:
149 (type, value, traceback) = sys.exc_info()
150 raise bb.build.FuncFailed("MD5 failed: %s" % value)
151 except:
152 (type, value, traceback) = sys.exc_info()
153 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
154} 132}
155 133
156def subprocess_setup():
157 import signal
158 # Python installs a SIGPIPE handler by default. This is usually not what
159 # non-Python subprocesses expect.
160 # SIGPIPE errors are known issues with gzip/bash
161 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
162
163def oe_unpack_file(file, data, url = None):
164 import subprocess
165 if not url:
166 url = "file://%s" % file
167 dots = file.split(".")
168 if dots[-1] in ['gz', 'bz2', 'Z']:
169 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
170 else:
171 efile = file
172 cmd = None
173 if file.endswith('.tar'):
174 cmd = 'tar x --no-same-owner -f %s' % file
175 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
176 cmd = 'tar xz --no-same-owner -f %s' % file
177 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
178 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
179 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
180 cmd = 'gzip -dc %s > %s' % (file, efile)
181 elif file.endswith('.bz2'):
182 cmd = 'bzip2 -dc %s > %s' % (file, efile)
183 elif file.endswith('.tar.xz'):
184 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
185 elif file.endswith('.xz'):
186 cmd = 'xz -dc %s > %s' % (file, efile)
187 elif file.endswith('.zip') or file.endswith('.jar'):
188 cmd = 'unzip -q -o'
189 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
190 if 'dos' in parm:
191 cmd = '%s -a' % cmd
192 cmd = "%s '%s'" % (cmd, file)
193 elif os.path.isdir(file):
194 filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
195 destdir = "."
196 if file[0:len(filesdir)] == filesdir:
197 destdir = file[len(filesdir):file.rfind('/')]
198 destdir = destdir.strip('/')
199 if len(destdir) < 1:
200 destdir = "."
201 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
202 os.makedirs("%s/%s" % (os.getcwd(), destdir))
203 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
204 else:
205 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
206 if not 'patch' in parm:
207 # The "destdir" handling was specifically done for FILESPATH
208 # items. So, only do so for file:// entries.
209 if type == "file" and path.find("/") != -1:
210 destdir = path.rsplit("/", 1)[0]
211 else:
212 destdir = "."
213 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
214 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
215
216 if not cmd:
217 return True
218
219 dest = os.path.join(os.getcwd(), os.path.basename(file))
220 if os.path.exists(dest):
221 if os.path.samefile(file, dest):
222 return True
223
224 # Change to subdir before executing command
225 save_cwd = os.getcwd();
226 parm = bb.decodeurl(url)[5]
227 if 'subdir' in parm:
228 newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
229 bb.mkdirhier(newdir)
230 os.chdir(newdir)
231
232 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
233 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
234 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
235
236 os.chdir(save_cwd)
237
238 return ret == 0
239
240addtask unpack after do_fetch 134addtask unpack after do_fetch
241do_unpack[dirs] = "${WORKDIR}" 135do_unpack[dirs] = "${WORKDIR}"
242python base_do_unpack() { 136python base_do_unpack() {
243 import re 137 src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split()
138 if len(src_uri) == 0:
139 return
244 140
245 localdata = bb.data.createCopy(d) 141 localdata = bb.data.createCopy(d)
246 bb.data.update_data(localdata) 142 bb.data.update_data(localdata)
247 143
248 urldata = bb.fetch.init([], localdata) 144 rootdir = bb.data.getVar('WORKDIR', localdata, True)
249 145
250 src_uri = bb.data.getVar('SRC_URI', localdata, True) 146 try:
251 if not src_uri: 147 fetcher = bb.fetch2.Fetch(src_uri, localdata)
252 return 148 fetcher.unpack(rootdir)
253 for url in src_uri.split(): 149 except bb.fetch2.BBFetchException, e:
254 try: 150 raise bb.build.FuncFailed(e)
255 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
256 except bb.MalformedUrl, e:
257 raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
258 if local is None:
259 continue
260 local = os.path.realpath(local)
261 lockfile = urldata[url].lockfile
262 if lockfile:
263 lf = bb.utils.lockfile(urldata[url].lockfile)
264
265 if bb.fetch.__version__ == "1":
266 ret = oe_unpack_file(local, localdata, url)
267 else:
268 # use bb.fetch2 unpack API
269 ud = urldata[url]
270 rootdir = bb.data.getVar('WORKDIR', localdata, True)
271 ret = ud.method.unpack(ud, rootdir, localdata)
272 if lockfile:
273 bb.utils.unlockfile(lf)
274 if not ret:
275 raise bb.build.FuncFailed("oe_unpack_file failed with return value %s" % ret)
276} 151}
277 152
278GIT_CONFIG = "${STAGING_DIR_NATIVE}/usr/etc/gitconfig" 153GIT_CONFIG = "${STAGING_DIR_NATIVE}/usr/etc/gitconfig"
@@ -550,7 +425,8 @@ python () {
550 for s in srcuri.split(): 425 for s in srcuri.split():
551 if not s.startswith("file://"): 426 if not s.startswith("file://"):
552 continue 427 continue
553 local = bb.data.expand(bb.fetch.localpath(s, d), d) 428 fetcher = bb.fetch2.Fetch([s], d)
429 local = fetcher.localpath(s)
554 for mp in paths: 430 for mp in paths:
555 if local.startswith(mp): 431 if local.startswith(mp):
556 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch)) 432 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
@@ -594,14 +470,12 @@ python do_cleanall() {
594 dl_dir = bb.data.getVar('DL_DIR', localdata, True) 470 dl_dir = bb.data.getVar('DL_DIR', localdata, True)
595 dl_dir = os.path.realpath(dl_dir) 471 dl_dir = os.path.realpath(dl_dir)
596 472
597 src_uri = bb.data.getVar('SRC_URI', localdata, True) 473 src_uri = (bb.data.getVar('SRC_URI', localdata, True) or "").split()
598 if not src_uri: 474 if len(src_uri) == 0:
599 return 475 return
600 for url in src_uri.split(): 476 fetcher = bb.fetch2.Fetch(src_uri, localdata)
601 try: 477 for url in src_uri:
602 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata) 478 local = fetcher.localpath(url)
603 except bb.MalformedUrl, e:
604 raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
605 if local is None: 479 if local is None:
606 continue 480 continue
607 local = os.path.realpath(local) 481 local = os.path.realpath(local)
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass
index ee8a2026fc..80fd45f0e3 100644
--- a/meta/classes/patch.bbclass
+++ b/meta/classes/patch.bbclass
@@ -58,9 +58,8 @@ python patch_do_patch() {
58 continue 58 continue
59 59
60 if not local: 60 if not local:
61 bb.fetch.init([url],d)
62 url = bb.encodeurl((type, host, path, user, pswd, [])) 61 url = bb.encodeurl((type, host, path, user, pswd, []))
63 local = os.path.join('/', bb.fetch.localpath(url, d)) 62 local = os.path.join('/', bb.fetch2.localpath(url, d))
64 local = bb.data.expand(local, d) 63 local = bb.data.expand(local, d)
65 64
66 if "striplevel" in parm: 65 if "striplevel" in parm:
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass
index a754821c08..e4564e4b07 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes/sstate.bbclass
@@ -326,40 +326,40 @@ def sstate_package(ss, d):
326 return 326 return
327 327
328def pstaging_fetch(sstatepkg, d): 328def pstaging_fetch(sstatepkg, d):
329 import bb.fetch
330
331 # only try and fetch if the user has configured a mirror
332 329
330 # Only try and fetch if the user has configured a mirror
333 mirrors = bb.data.getVar('SSTATE_MIRRORS', d, True) 331 mirrors = bb.data.getVar('SSTATE_MIRRORS', d, True)
334 if mirrors: 332 if not mirrors:
335 # Copy the data object and override DL_DIR and SRC_URI 333 return
336 localdata = bb.data.createCopy(d)
337 bb.data.update_data(localdata)
338 334
339 dldir = bb.data.expand("${SSTATE_DIR}", localdata) 335 import bb.fetch2
340 srcuri = "file://" + os.path.basename(sstatepkg) 336 # Copy the data object and override DL_DIR and SRC_URI
337 localdata = bb.data.createCopy(d)
338 bb.data.update_data(localdata)
341 339
342 bb.mkdirhier(dldir) 340 dldir = bb.data.expand("${SSTATE_DIR}", localdata)
341 srcuri = "file://" + os.path.basename(sstatepkg)
343 342
344 bb.data.setVar('DL_DIR', dldir, localdata) 343 bb.mkdirhier(dldir)
345 bb.data.setVar('PREMIRRORS', mirrors, localdata)
346 bb.data.setVar('SRC_URI', srcuri, localdata)
347 344
348 # Try a fetch from the sstate mirror, if it fails just return and 345 bb.data.setVar('DL_DIR', dldir, localdata)
349 # we will build the package 346 bb.data.setVar('PREMIRRORS', mirrors, localdata)
350 try: 347 bb.data.setVar('SRC_URI', srcuri, localdata)
351 bb.fetch.init([srcuri], localdata) 348
352 if bb.fetch.__version__ == "1": 349 # Try a fetch from the sstate mirror, if it fails just return and
353 bb.fetch.go(localdata, [srcuri]) 350 # we will build the package
354 else: 351 try:
355 bb.fetch.download(localdata, [srcuri]) 352 fetcher = bb.fetch2.Fetch([srcuri], localdata)
356 # Need to optimise this, if using file:// urls, the fetcher just changes the local path 353 fetcher.download()
357 # For now work around by symlinking 354
358 localpath = bb.data.expand(bb.fetch.localpath(srcuri, localdata), localdata) 355 # Need to optimise this, if using file:// urls, the fetcher just changes the local path
359 if localpath != sstatepkg and os.path.exists(localpath): 356 # For now work around by symlinking
360 os.symlink(localpath, sstatepkg) 357 localpath = bb.data.expand(fetcher.localpath(srcuri), localdata)
361 except: 358 if localpath != sstatepkg and os.path.exists(localpath) and not os.path.exists(sstatepkg):
362 pass 359 os.symlink(localpath, sstatepkg)
360
361 except bb.fetch2.BBFetchException:
362 pass
363 363
364def sstate_setscene(d): 364def sstate_setscene(d):
365 shared_state = sstate_state_fromvars(d) 365 shared_state = sstate_state_fromvars(d)
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass
index 4d4b9953e2..455b49d54a 100644
--- a/meta/classes/utils.bbclass
+++ b/meta/classes/utils.bbclass
@@ -51,11 +51,12 @@ def machine_paths(d):
51def is_machine_specific(d): 51def is_machine_specific(d):
52 """Determine whether the current recipe is machine specific""" 52 """Determine whether the current recipe is machine specific"""
53 machinepaths = set(machine_paths(d)) 53 machinepaths = set(machine_paths(d))
54 urldatadict = bb.fetch.init(d.getVar("SRC_URI", True).split(), d, True) 54 srcuri = d.getVar("SRC_URI", True).split()
55 for urldata in (urldata for urldata in urldatadict.itervalues() 55 for url in srcuri:
56 if urldata.type == "file"): 56 fetcher = bb.fetch2.Fetch([srcuri], d)
57 if any(urldata.localpath.startswith(mp + "/") for mp in machinepaths): 57 if url.startswith("file://"):
58 return True 58 if any(fetcher.localpath(url).startswith(mp + "/") for mp in machinepaths):
59 return True
59 60
60def oe_popen_env(d): 61def oe_popen_env(d):
61 env = d.getVar("__oe_popen_env", False) 62 env = d.getVar("__oe_popen_env", False)
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index f203d683da..c8eeb8bc4c 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -72,14 +72,14 @@ class PatchSet(object):
72 if not patch.get("remote"): 72 if not patch.get("remote"):
73 raise PatchError("Patch file must be specified in patch import.") 73 raise PatchError("Patch file must be specified in patch import.")
74 else: 74 else:
75 patch["file"] = bb.fetch.localpath(patch["remote"], self.d) 75 patch["file"] = bb.fetch2.localpath(patch["remote"], self.d)
76 76
77 for param in PatchSet.defaults: 77 for param in PatchSet.defaults:
78 if not patch.get(param): 78 if not patch.get(param):
79 patch[param] = PatchSet.defaults[param] 79 patch[param] = PatchSet.defaults[param]
80 80
81 if patch.get("remote"): 81 if patch.get("remote"):
82 patch["file"] = bb.data.expand(bb.fetch.localpath(patch["remote"], self.d), self.d) 82 patch["file"] = bb.data.expand(bb.fetch2.localpath(patch["remote"], self.d), self.d)
83 83
84 patch["filemd5"] = bb.utils.md5_file(patch["file"]) 84 patch["filemd5"] = bb.utils.md5_file(patch["file"])
85 85
@@ -293,7 +293,7 @@ class QuiltTree(PatchSet):
293 if type == "file": 293 if type == "file":
294 import shutil 294 import shutil
295 if not patch.get("file") and patch.get("remote"): 295 if not patch.get("file") and patch.get("remote"):
296 patch["file"] = bb.fetch.localpath(patch["remote"], self.d) 296 patch["file"] = bb.fetch2.localpath(patch["remote"], self.d)
297 297
298 shutil.copyfile(patch["quiltfile"], patch["file"]) 298 shutil.copyfile(patch["quiltfile"], patch["file"])
299 else: 299 else: