diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-02-04 13:14:03 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-02-07 09:06:37 +0000 |
commit | ca7adf75295c2a6041b891bfa61e0b4bc2f7c860 (patch) | |
tree | 97eda1b92047b68973b95c12e907be6366b5cc0c /bitbake/lib/bb | |
parent | 08a9fef4fa0036708f29bd150d7ecf66354e5475 (diff) | |
download | poky-ca7adf75295c2a6041b891bfa61e0b4bc2f7c860.tar.gz |
bitbake/fetch2: Define a new interface to the fetcher code though the 'Fetch' class
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 313 |
1 files changed, 169 insertions, 144 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index f1b87f9ee2..43ee1562cd 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -60,6 +60,13 @@ class FetchError(BBFetchException): | |||
60 | self.url = url | 60 | self.url = url |
61 | Exception.__init__(self, self.msg) | 61 | Exception.__init__(self, self.msg) |
62 | 62 | ||
63 | class UnpackError(BBFetchException): | ||
64 | """General fetcher exception when something happens incorrectly when unpacking""" | ||
65 | def __init__(self, message, url): | ||
66 | self.msg = "Unpack failure for URL: '%s'. %s" % (url, message) | ||
67 | self.url = url | ||
68 | Exception.__init__(self, self.msg) | ||
69 | |||
63 | class NoMethodError(BBFetchException): | 70 | class NoMethodError(BBFetchException): |
64 | """Exception raised when there is no method to obtain a supplied url or set of urls""" | 71 | """Exception raised when there is no method to obtain a supplied url or set of urls""" |
65 | def __init__(self, url): | 72 | def __init__(self, url): |
@@ -231,26 +238,6 @@ def fetcher_compare_revisions(d): | |||
231 | logger.debug(2, "%s did not change", key) | 238 | logger.debug(2, "%s did not change", key) |
232 | return False | 239 | return False |
233 | 240 | ||
234 | # Function call order is usually: | ||
235 | # 1. init | ||
236 | # 2. go | ||
237 | # 3. localpaths | ||
238 | # localpath can be called at any time | ||
239 | |||
240 | def init(urls, d): | ||
241 | urldata = {} | ||
242 | |||
243 | fn = bb.data.getVar('FILE', d, 1) | ||
244 | if fn in urldata_cache: | ||
245 | urldata = urldata_cache[fn] | ||
246 | |||
247 | for url in urls: | ||
248 | if url not in urldata: | ||
249 | urldata[url] = FetchData(url, d) | ||
250 | |||
251 | urldata_cache[fn] = urldata | ||
252 | return urldata | ||
253 | |||
254 | def mirror_from_string(data): | 241 | def mirror_from_string(data): |
255 | return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] | 242 | return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] |
256 | 243 | ||
@@ -304,118 +291,6 @@ def download_update(result, target): | |||
304 | os.symlink(result, target) | 291 | os.symlink(result, target) |
305 | return | 292 | return |
306 | 293 | ||
307 | def download(d, urls = None): | ||
308 | """ | ||
309 | Fetch all urls | ||
310 | init must have previously been called | ||
311 | """ | ||
312 | if not urls: | ||
313 | urls = d.getVar("SRC_URI", 1).split() | ||
314 | urldata = init(urls, d) | ||
315 | |||
316 | for u in urls: | ||
317 | urldata[u].setup_localpath(d) | ||
318 | |||
319 | for u in urls: | ||
320 | ud = urldata[u] | ||
321 | m = ud.method | ||
322 | localpath = "" | ||
323 | |||
324 | if not ud.localfile: | ||
325 | continue | ||
326 | |||
327 | lf = bb.utils.lockfile(ud.lockfile) | ||
328 | |||
329 | if m.try_premirror(u, ud, d): | ||
330 | # First try fetching uri, u, from PREMIRRORS | ||
331 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | ||
332 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) | ||
333 | elif os.path.exists(ud.localfile): | ||
334 | localpath = ud.localfile | ||
335 | |||
336 | download_update(localpath, ud.localpath) | ||
337 | |||
338 | # Need to re-test forcefetch() which will return true if our copy is too old | ||
339 | if m.forcefetch(u, ud, d) or not localpath: | ||
340 | # Next try fetching from the original uri, u | ||
341 | try: | ||
342 | m.download(u, ud, d) | ||
343 | if hasattr(m, "build_mirror_data"): | ||
344 | m.build_mirror_data(u, ud, d) | ||
345 | localpath = ud.localpath | ||
346 | download_update(localpath, ud.localpath) | ||
347 | |||
348 | except FetchError: | ||
349 | # Remove any incomplete file | ||
350 | bb.utils.remove(ud.localpath) | ||
351 | # Finally, try fetching uri, u, from MIRRORS | ||
352 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | ||
353 | localpath = try_mirrors (d, u, mirrors) | ||
354 | |||
355 | if not localpath or not os.path.exists(localpath): | ||
356 | raise FetchError("Unable to fetch URL %s from any source." % u, u) | ||
357 | |||
358 | download_update(localpath, ud.localpath) | ||
359 | |||
360 | if os.path.exists(ud.donestamp): | ||
361 | # Touch the done stamp file to show active use of the download | ||
362 | try: | ||
363 | os.utime(ud.donestamp, None) | ||
364 | except: | ||
365 | # Errors aren't fatal here | ||
366 | pass | ||
367 | else: | ||
368 | # Only check the checksums if we've not seen this item before, then create the stamp | ||
369 | verify_checksum(u, ud, d) | ||
370 | open(ud.donestamp, 'w').close() | ||
371 | |||
372 | |||
373 | bb.utils.unlockfile(lf) | ||
374 | |||
375 | def checkstatus(d, urls = None): | ||
376 | """ | ||
377 | Check all urls exist upstream | ||
378 | init must have previously been called | ||
379 | """ | ||
380 | urldata = init([], d) | ||
381 | |||
382 | if not urls: | ||
383 | urls = urldata | ||
384 | |||
385 | for u in urls: | ||
386 | ud = urldata[u] | ||
387 | ud.setup_localpath(d) | ||
388 | m = ud.method | ||
389 | logger.debug(1, "Testing URL %s", u) | ||
390 | # First try checking uri, u, from PREMIRRORS | ||
391 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | ||
392 | ret = try_mirrors(d, u, mirrors, True) | ||
393 | if not ret: | ||
394 | # Next try checking from the original uri, u | ||
395 | try: | ||
396 | ret = m.checkstatus(u, ud, d) | ||
397 | except: | ||
398 | # Finally, try checking uri, u, from MIRRORS | ||
399 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | ||
400 | ret = try_mirrors (d, u, mirrors, True) | ||
401 | |||
402 | if not ret: | ||
403 | raise FetchError("URL %s doesn't work" % u, u) | ||
404 | |||
405 | def localpaths(d): | ||
406 | """ | ||
407 | Return a list of the local filenames, assuming successful fetch | ||
408 | """ | ||
409 | local = [] | ||
410 | urldata = init([], d) | ||
411 | |||
412 | for u in urldata: | ||
413 | ud = urldata[u] | ||
414 | ud.setup_localpath(d) | ||
415 | local.append(ud.localpath) | ||
416 | |||
417 | return local | ||
418 | |||
419 | def get_autorev(d): | 294 | def get_autorev(d): |
420 | # only not cache src rev in autorev case | 295 | # only not cache src rev in autorev case |
421 | if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": | 296 | if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": |
@@ -432,7 +307,8 @@ def get_srcrev(d): | |||
432 | """ | 307 | """ |
433 | 308 | ||
434 | scms = [] | 309 | scms = [] |
435 | urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d) | 310 | fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d) |
311 | urldata = fetcher.ud | ||
436 | for u in urldata: | 312 | for u in urldata: |
437 | if urldata[u].method.supports_srcrev(): | 313 | if urldata[u].method.supports_srcrev(): |
438 | scms.append(u) | 314 | scms.append(u) |
@@ -459,14 +335,8 @@ def get_srcrev(d): | |||
459 | return format | 335 | return format |
460 | 336 | ||
461 | def localpath(url, d): | 337 | def localpath(url, d): |
462 | """ | 338 | fetcher = bb.fetch2.Fetch([url], d) |
463 | Called from public code, e.g. classes in OE e.g. patch.bbclass | 339 | return fetcher.localpath(url) |
464 | """ | ||
465 | ud = init([url], d) | ||
466 | if ud[url].method: | ||
467 | ud[url].setup_localpath(d) | ||
468 | return ud[url].localpath | ||
469 | return url | ||
470 | 340 | ||
471 | def runfetchcmd(cmd, d, quiet = False, cleanup = []): | 341 | def runfetchcmd(cmd, d, quiet = False, cleanup = []): |
472 | """ | 342 | """ |
@@ -796,12 +666,12 @@ class FetchMethod(object): | |||
796 | cmd = 'cp %s %s/%s/' % (file, rootdir, destdir) | 666 | cmd = 'cp %s %s/%s/' % (file, rootdir, destdir) |
797 | 667 | ||
798 | if not cmd: | 668 | if not cmd: |
799 | return True | 669 | return |
800 | 670 | ||
801 | dest = os.path.join(rootdir, os.path.basename(file)) | 671 | dest = os.path.join(rootdir, os.path.basename(file)) |
802 | if os.path.exists(dest): | 672 | if os.path.exists(dest): |
803 | if os.path.samefile(file, dest): | 673 | if os.path.samefile(file, dest): |
804 | return True | 674 | return |
805 | 675 | ||
806 | # Change to subdir before executing command | 676 | # Change to subdir before executing command |
807 | save_cwd = os.getcwd(); | 677 | save_cwd = os.getcwd(); |
@@ -817,7 +687,10 @@ class FetchMethod(object): | |||
817 | 687 | ||
818 | os.chdir(save_cwd) | 688 | os.chdir(save_cwd) |
819 | 689 | ||
820 | return ret == 0 | 690 | if ret != 0: |
691 | raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url) | ||
692 | |||
693 | return | ||
821 | 694 | ||
822 | def try_premirror(self, url, urldata, d): | 695 | def try_premirror(self, url, urldata, d): |
823 | """ | 696 | """ |
@@ -915,6 +788,158 @@ class FetchMethod(object): | |||
915 | key = self._revision_key(url, ud, d, name) | 788 | key = self._revision_key(url, ud, d, name) |
916 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | 789 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") |
917 | 790 | ||
791 | class Fetch(object): | ||
792 | def __init__(self, urls, d): | ||
793 | if len(urls) == 0: | ||
794 | urls = d.getVar("SRC_URI", 1).split() | ||
795 | self.urls = urls | ||
796 | self.d = d | ||
797 | self.ud = {} | ||
798 | |||
799 | fn = bb.data.getVar('FILE', d, 1) | ||
800 | if fn in urldata_cache: | ||
801 | self.ud = urldata_cache[fn] | ||
802 | |||
803 | for url in urls: | ||
804 | if url not in self.ud: | ||
805 | self.ud[url] = FetchData(url, d) | ||
806 | |||
807 | urldata_cache[fn] = self.ud | ||
808 | |||
809 | def localpath(self, url): | ||
810 | if url not in self.urls: | ||
811 | self.ud[url] = FetchData(url, self.d) | ||
812 | |||
813 | self.ud[url].setup_localpath(self.d) | ||
814 | return bb.data.expand(self.ud[url].localpath, self.d) | ||
815 | |||
816 | def localpaths(self): | ||
817 | """ | ||
818 | Return a list of the local filenames, assuming successful fetch | ||
819 | """ | ||
820 | local = [] | ||
821 | |||
822 | for u in self.urls: | ||
823 | ud = self.ud[u] | ||
824 | ud.setup_localpath(self.d) | ||
825 | local.append(ud.localpath) | ||
826 | |||
827 | return local | ||
828 | |||
829 | def download(self, urls = []): | ||
830 | """ | ||
831 | Fetch all urls | ||
832 | """ | ||
833 | if len(urls) == 0: | ||
834 | urls = self.urls | ||
835 | |||
836 | for u in urls: | ||
837 | ud = self.ud[u] | ||
838 | ud.setup_localpath(self.d) | ||
839 | m = ud.method | ||
840 | localpath = "" | ||
841 | |||
842 | if not ud.localfile: | ||
843 | continue | ||
844 | |||
845 | lf = bb.utils.lockfile(ud.lockfile) | ||
846 | |||
847 | if m.try_premirror(u, ud, self.d): | ||
848 | # First try fetching uri, u, from PREMIRRORS | ||
849 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) | ||
850 | localpath = try_mirrors(self.d, u, mirrors, False, m.forcefetch(u, ud, self.d)) | ||
851 | elif os.path.exists(ud.localfile): | ||
852 | localpath = ud.localfile | ||
853 | |||
854 | download_update(localpath, ud.localpath) | ||
855 | |||
856 | # Need to re-test forcefetch() which will return true if our copy is too old | ||
857 | if m.forcefetch(u, ud, self.d) or not localpath: | ||
858 | # Next try fetching from the original uri, u | ||
859 | try: | ||
860 | m.download(u, ud, self.d) | ||
861 | if hasattr(m, "build_mirror_data"): | ||
862 | m.build_mirror_data(u, ud, self.d) | ||
863 | localpath = ud.localpath | ||
864 | download_update(localpath, ud.localpath) | ||
865 | |||
866 | except FetchError: | ||
867 | # Remove any incomplete file | ||
868 | bb.utils.remove(ud.localpath) | ||
869 | # Finally, try fetching uri, u, from MIRRORS | ||
870 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True)) | ||
871 | localpath = try_mirrors (self.d, u, mirrors) | ||
872 | |||
873 | if not localpath or not os.path.exists(localpath): | ||
874 | raise FetchError("Unable to fetch URL %s from any source." % u, u) | ||
875 | |||
876 | download_update(localpath, ud.localpath) | ||
877 | |||
878 | if os.path.exists(ud.donestamp): | ||
879 | # Touch the done stamp file to show active use of the download | ||
880 | try: | ||
881 | os.utime(ud.donestamp, None) | ||
882 | except: | ||
883 | # Errors aren't fatal here | ||
884 | pass | ||
885 | else: | ||
886 | # Only check the checksums if we've not seen this item before, then create the stamp | ||
887 | verify_checksum(u, ud, self.d) | ||
888 | open(ud.donestamp, 'w').close() | ||
889 | |||
890 | bb.utils.unlockfile(lf) | ||
891 | |||
892 | def checkstatus(self, urls = []): | ||
893 | """ | ||
894 | Check all urls exist upstream | ||
895 | """ | ||
896 | |||
897 | if len(urls) == 0: | ||
898 | urls = self.urls | ||
899 | |||
900 | for u in urls: | ||
901 | ud = self.ud[u] | ||
902 | ud.setup_localpath(self.d) | ||
903 | m = ud.method | ||
904 | logger.debug(1, "Testing URL %s", u) | ||
905 | # First try checking uri, u, from PREMIRRORS | ||
906 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) | ||
907 | ret = try_mirrors(self.d, u, mirrors, True) | ||
908 | if not ret: | ||
909 | # Next try checking from the original uri, u | ||
910 | try: | ||
911 | ret = m.checkstatus(u, ud, self.d) | ||
912 | except: | ||
913 | # Finally, try checking uri, u, from MIRRORS | ||
914 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True)) | ||
915 | ret = try_mirrors (self.d, u, mirrors, True) | ||
916 | |||
917 | if not ret: | ||
918 | raise FetchError("URL %s doesn't work" % u, u) | ||
919 | |||
920 | def unpack(self, root, urls = []): | ||
921 | """ | ||
922 | Check all urls exist upstream | ||
923 | """ | ||
924 | |||
925 | if len(urls) == 0: | ||
926 | urls = self.urls | ||
927 | |||
928 | for u in urls: | ||
929 | ud = self.ud[u] | ||
930 | ud.setup_localpath(self.d) | ||
931 | |||
932 | if bb.data.expand(self.localpath, self.d) is None: | ||
933 | continue | ||
934 | |||
935 | if ud.lockfile: | ||
936 | lf = bb.utils.lockfile(ud.lockfile) | ||
937 | |||
938 | ud.method.unpack(ud, root, self.d) | ||
939 | |||
940 | if ud.lockfile: | ||
941 | bb.utils.unlockfile(lf) | ||
942 | |||
918 | from . import cvs | 943 | from . import cvs |
919 | from . import git | 944 | from . import git |
920 | from . import local | 945 | from . import local |