diff options
| -rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 112 |
1 files changed, 102 insertions, 10 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 599ea8c822..b004dae0d4 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
| @@ -45,6 +45,13 @@ _checksum_cache = bb.checksum.FileChecksumCache() | |||
| 45 | 45 | ||
| 46 | logger = logging.getLogger("BitBake.Fetcher") | 46 | logger = logging.getLogger("BitBake.Fetcher") |
| 47 | 47 | ||
| 48 | try: | ||
| 49 | import cPickle as pickle | ||
| 50 | except ImportError: | ||
| 51 | import pickle | ||
| 52 | logger.info("Importing cPickle failed. " | ||
| 53 | "Falling back to a very slow implementation.") | ||
| 54 | |||
| 48 | class BBFetchException(Exception): | 55 | class BBFetchException(Exception): |
| 49 | """Class all fetch exceptions inherit from""" | 56 | """Class all fetch exceptions inherit from""" |
| 50 | def __init__(self, message): | 57 | def __init__(self, message): |
| @@ -525,7 +532,7 @@ def fetcher_compare_revisions(d): | |||
| 525 | def mirror_from_string(data): | 532 | def mirror_from_string(data): |
| 526 | return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] | 533 | return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] |
| 527 | 534 | ||
| 528 | def verify_checksum(ud, d): | 535 | def verify_checksum(ud, d, precomputed={}): |
| 529 | """ | 536 | """ |
| 530 | verify the MD5 and SHA256 checksum for downloaded src | 537 | verify the MD5 and SHA256 checksum for downloaded src |
| 531 | 538 | ||
| @@ -533,13 +540,28 @@ def verify_checksum(ud, d): | |||
| 533 | the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no | 540 | the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no |
| 534 | checksums specified. | 541 | checksums specified. |
| 535 | 542 | ||
| 543 | Returns a dict of checksums that can be stored in a done stamp file and | ||
| 544 | passed in as precomputed parameter in a later call to avoid re-computing | ||
| 545 | the checksums from the file. This allows verifying the checksums of the | ||
| 546 | file against those in the recipe each time, rather than only after | ||
| 547 | downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. | ||
| 536 | """ | 548 | """ |
| 537 | 549 | ||
| 550 | _MD5_KEY = "md5" | ||
| 551 | _SHA256_KEY = "sha256" | ||
| 552 | |||
| 538 | if ud.ignore_checksums or not ud.method.supports_checksum(ud): | 553 | if ud.ignore_checksums or not ud.method.supports_checksum(ud): |
| 539 | return | 554 | return {} |
| 540 | 555 | ||
| 541 | md5data = bb.utils.md5_file(ud.localpath) | 556 | if _MD5_KEY in precomputed: |
| 542 | sha256data = bb.utils.sha256_file(ud.localpath) | 557 | md5data = precomputed[_MD5_KEY] |
| 558 | else: | ||
| 559 | md5data = bb.utils.md5_file(ud.localpath) | ||
| 560 | |||
| 561 | if _SHA256_KEY in precomputed: | ||
| 562 | sha256data = precomputed[_SHA256_KEY] | ||
| 563 | else: | ||
| 564 | sha256data = bb.utils.sha256_file(ud.localpath) | ||
| 543 | 565 | ||
| 544 | if ud.method.recommends_checksum(ud): | 566 | if ud.method.recommends_checksum(ud): |
| 545 | # If strict checking enabled and neither sum defined, raise error | 567 | # If strict checking enabled and neither sum defined, raise error |
| @@ -589,6 +611,72 @@ def verify_checksum(ud, d): | |||
| 589 | if len(msg): | 611 | if len(msg): |
| 590 | raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data) | 612 | raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data) |
| 591 | 613 | ||
| 614 | return { | ||
| 615 | _MD5_KEY: md5data, | ||
| 616 | _SHA256_KEY: sha256data | ||
| 617 | } | ||
| 618 | |||
| 619 | |||
| 620 | def verify_donestamp(ud, d): | ||
| 621 | """ | ||
| 622 | Check whether the done stamp file has the right checksums (if the fetch | ||
| 623 | method supports them). If it doesn't, delete the done stamp and force | ||
| 624 | a re-download. | ||
| 625 | |||
| 626 | Returns True, if the donestamp exists and is valid, False otherwise. When | ||
| 627 | returning False, any existing done stamps are removed. | ||
| 628 | """ | ||
| 629 | if not os.path.exists(ud.donestamp): | ||
| 630 | return False | ||
| 631 | |||
| 632 | if not ud.method.supports_checksum(ud): | ||
| 633 | # done stamp exists, checksums not supported; assume the local file is | ||
| 634 | # current | ||
| 635 | return True | ||
| 636 | |||
| 637 | if not os.path.exists(ud.localpath): | ||
| 638 | # done stamp exists, but the downloaded file does not; the done stamp | ||
| 639 | # must be incorrect, re-trigger the download | ||
| 640 | bb.utils.remove(ud.donestamp) | ||
| 641 | return False | ||
| 642 | |||
| 643 | precomputed_checksums = {} | ||
| 644 | # Only re-use the precomputed checksums if the donestamp is newer than the | ||
| 645 | # file. Do not rely on the mtime of directories, though. If ud.localpath is | ||
| 646 | # a directory, there will probably not be any checksums anyway. | ||
| 647 | if (os.path.isdir(ud.localpath) or | ||
| 648 | os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)): | ||
| 649 | try: | ||
| 650 | with open(ud.donestamp, "rb") as cachefile: | ||
| 651 | pickled = pickle.Unpickler(cachefile) | ||
| 652 | precomputed_checksums.update(pickled.load()) | ||
| 653 | except Exception as e: | ||
| 654 | # Avoid the warnings on the upgrade path from emtpy done stamp | ||
| 655 | # files to those containing the checksums. | ||
| 656 | if not isinstance(e, EOFError): | ||
| 657 | # Ignore errors, they aren't fatal | ||
| 658 | logger.warn("Couldn't load checksums from donestamp %s: %s " | ||
| 659 | "(msg: %s)" % (ud.donestamp, type(e).__name__, | ||
| 660 | str(e))) | ||
| 661 | |||
| 662 | try: | ||
| 663 | checksums = verify_checksum(ud, d, precomputed_checksums) | ||
| 664 | # If the cache file did not have the checksums, compute and store them | ||
| 665 | # as an upgrade path from the previous done stamp file format. | ||
| 666 | if checksums != precomputed_checksums: | ||
| 667 | with open(ud.donestamp, "wb") as cachefile: | ||
| 668 | p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL) | ||
| 669 | p.dump(checksums) | ||
| 670 | return True | ||
| 671 | except ChecksumError as e: | ||
| 672 | # Checksums failed to verify, trigger re-download and remove the | ||
| 673 | # incorrect stamp file. | ||
| 674 | logger.warn("Checksum mismatch for local file %s\n" | ||
| 675 | "Cleaning and trying again." % ud.localpath) | ||
| 676 | rename_bad_checksum(ud, e.checksum) | ||
| 677 | bb.utils.remove(ud.donestamp) | ||
| 678 | return False | ||
| 679 | |||
| 592 | 680 | ||
| 593 | def update_stamp(ud, d): | 681 | def update_stamp(ud, d): |
| 594 | """ | 682 | """ |
| @@ -603,8 +691,11 @@ def update_stamp(ud, d): | |||
| 603 | # Errors aren't fatal here | 691 | # Errors aren't fatal here |
| 604 | pass | 692 | pass |
| 605 | else: | 693 | else: |
| 606 | verify_checksum(ud, d) | 694 | checksums = verify_checksum(ud, d) |
| 607 | open(ud.donestamp, 'w').close() | 695 | # Store the checksums for later re-verification against the recipe |
| 696 | with open(ud.donestamp, "wb") as cachefile: | ||
| 697 | p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL) | ||
| 698 | p.dump(checksums) | ||
| 608 | 699 | ||
| 609 | def subprocess_setup(): | 700 | def subprocess_setup(): |
| 610 | # Python installs a SIGPIPE handler by default. This is usually not what | 701 | # Python installs a SIGPIPE handler by default. This is usually not what |
| @@ -805,7 +896,7 @@ def try_mirror_url(origud, ud, ld, check = False): | |||
| 805 | 896 | ||
| 806 | os.chdir(ld.getVar("DL_DIR", True)) | 897 | os.chdir(ld.getVar("DL_DIR", True)) |
| 807 | 898 | ||
| 808 | if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld): | 899 | if not verify_donestamp(ud, ld) or ud.method.need_update(ud, ld): |
| 809 | ud.method.download(ud, ld) | 900 | ud.method.download(ud, ld) |
| 810 | if hasattr(ud.method,"build_mirror_data"): | 901 | if hasattr(ud.method,"build_mirror_data"): |
| 811 | ud.method.build_mirror_data(ud, ld) | 902 | ud.method.build_mirror_data(ud, ld) |
| @@ -821,12 +912,13 @@ def try_mirror_url(origud, ud, ld, check = False): | |||
| 821 | dldir = ld.getVar("DL_DIR", True) | 912 | dldir = ld.getVar("DL_DIR", True) |
| 822 | if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ | 913 | if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ |
| 823 | and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): | 914 | and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): |
| 915 | # Create donestamp in old format to avoid triggering a re-download | ||
| 824 | bb.utils.mkdirhier(os.path.dirname(ud.donestamp)) | 916 | bb.utils.mkdirhier(os.path.dirname(ud.donestamp)) |
| 825 | open(ud.donestamp, 'w').close() | 917 | open(ud.donestamp, 'w').close() |
| 826 | dest = os.path.join(dldir, os.path.basename(ud.localpath)) | 918 | dest = os.path.join(dldir, os.path.basename(ud.localpath)) |
| 827 | if not os.path.exists(dest): | 919 | if not os.path.exists(dest): |
| 828 | os.symlink(ud.localpath, dest) | 920 | os.symlink(ud.localpath, dest) |
| 829 | if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld): | 921 | if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld): |
| 830 | origud.method.download(origud, ld) | 922 | origud.method.download(origud, ld) |
| 831 | if hasattr(origud.method,"build_mirror_data"): | 923 | if hasattr(origud.method,"build_mirror_data"): |
| 832 | origud.method.build_mirror_data(origud, ld) | 924 | origud.method.build_mirror_data(origud, ld) |
| @@ -1422,7 +1514,7 @@ class Fetch(object): | |||
| 1422 | try: | 1514 | try: |
| 1423 | self.d.setVar("BB_NO_NETWORK", network) | 1515 | self.d.setVar("BB_NO_NETWORK", network) |
| 1424 | 1516 | ||
| 1425 | if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d): | 1517 | if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): |
| 1426 | localpath = ud.localpath | 1518 | localpath = ud.localpath |
| 1427 | elif m.try_premirror(ud, self.d): | 1519 | elif m.try_premirror(ud, self.d): |
| 1428 | logger.debug(1, "Trying PREMIRRORS") | 1520 | logger.debug(1, "Trying PREMIRRORS") |
| @@ -1435,7 +1527,7 @@ class Fetch(object): | |||
| 1435 | os.chdir(self.d.getVar("DL_DIR", True)) | 1527 | os.chdir(self.d.getVar("DL_DIR", True)) |
| 1436 | 1528 | ||
| 1437 | firsterr = None | 1529 | firsterr = None |
| 1438 | if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)): | 1530 | if not localpath and ((not verify_donestamp(ud, self.d)) or m.need_update(ud, self.d)): |
| 1439 | try: | 1531 | try: |
| 1440 | logger.debug(1, "Trying Upstream") | 1532 | logger.debug(1, "Trying Upstream") |
| 1441 | m.download(ud, self.d) | 1533 | m.download(ud, self.d) |
