summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-03 18:22:06 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-03 18:22:06 +0000
commitfa9fda05e1d269446b51050195b891346482e8bb (patch)
treecca4e265a2726f89a652fe64c55b1b07487ed0a5 /bitbake
parent7f99605562119a13a2510a3c990e3cf577ad764e (diff)
downloadpoky-fa9fda05e1d269446b51050195b891346482e8bb.tar.gz
bitbake/fetch2: Ensure that mirror fetches are symlinked from the download directory
When files are fetched from a mirror source that happens to be local, ensure links are created for the file since subsequent fetch calls can then follow the links to find files. Any other approach such as the existing manipulations of localpath internally to the fetcher are prone to errors, races and other issues. Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py20
1 files changed, 17 insertions, 3 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 43ba772dbf..2e8dab9f57 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -263,6 +263,15 @@ def subprocess_setup():
263 # SIGPIPE errors are known issues with gzip/bash 263 # SIGPIPE errors are known issues with gzip/bash
264 signal.signal(signal.SIGPIPE, signal.SIG_DFL) 264 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
265 265
266def download_update(result, target):
267 if os.path.exists(target):
268 return
269 if not result or not os.path.exists(result):
270 return
271 if target != result:
272 os.symlink(result, target)
273 return
274
266def download(d, urls = None): 275def download(d, urls = None):
267 """ 276 """
268 Fetch all urls 277 Fetch all urls
@@ -289,6 +298,8 @@ def download(d, urls = None):
289 elif os.path.exists(ud.localfile): 298 elif os.path.exists(ud.localfile):
290 localpath = ud.localfile 299 localpath = ud.localfile
291 300
301 download_update(localpath, ud.localpath)
302
292 # Need to re-test forcefetch() which will return true if our copy is too old 303 # Need to re-test forcefetch() which will return true if our copy is too old
293 if m.forcefetch(u, ud, d) or not localpath: 304 if m.forcefetch(u, ud, d) or not localpath:
294 # Next try fetching from the original uri, u 305 # Next try fetching from the original uri, u
@@ -297,16 +308,19 @@ def download(d, urls = None):
297 if hasattr(m, "build_mirror_data"): 308 if hasattr(m, "build_mirror_data"):
298 m.build_mirror_data(u, ud, d) 309 m.build_mirror_data(u, ud, d)
299 localpath = ud.localpath 310 localpath = ud.localpath
311 download_update(localpath, ud.localpath)
312
300 except FetchError: 313 except FetchError:
301 # Remove any incomplete file 314 # Remove any incomplete file
302 bb.utils.remove(ud.localpath) 315 bb.utils.remove(ud.localpath)
303 # Finally, try fetching uri, u, from MIRRORS 316 # Finally, try fetching uri, u, from MIRRORS
304 mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) 317 mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
305 localpath = try_mirrors (d, u, mirrors) 318 localpath = try_mirrors (d, u, mirrors)
306 if not localpath or not os.path.exists(localpath):
307 raise FetchError("Unable to fetch URL %s from any source." % u)
308 319
309 ud.localpath = localpath 320 if not localpath or not os.path.exists(localpath):
321 raise FetchError("Unable to fetch URL %s from any source." % u)
322
323 download_update(localpath, ud.localpath)
310 324
311 if os.path.exists(ud.md5): 325 if os.path.exists(ud.md5):
312 # Touch the md5 file to show active use of the download 326 # Touch the md5 file to show active use of the download