diff options
| -rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 59 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch2/bzr.py | 4 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch2/git.py | 66 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch2/hg.py | 4 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch2/svn.py | 4 |
5 files changed, 73 insertions, 64 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index d62ba81b36..41848edd29 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
| @@ -394,8 +394,8 @@ def get_srcrev(d): | |||
| 394 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") | 394 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") |
| 395 | raise ParameterError | 395 | raise ParameterError |
| 396 | 396 | ||
| 397 | if len(scms) == 1: | 397 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: |
| 398 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) | 398 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0]) |
| 399 | 399 | ||
| 400 | # | 400 | # |
| 401 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 401 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
| @@ -406,9 +406,9 @@ def get_srcrev(d): | |||
| 406 | raise ParameterError | 406 | raise ParameterError |
| 407 | 407 | ||
| 408 | for scm in scms: | 408 | for scm in scms: |
| 409 | if 'name' in urldata[scm].parm: | 409 | ud = urldata[scm] |
| 410 | name = urldata[scm].parm["name"] | 410 | for name in ud.names: |
| 411 | rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d) | 411 | rev = ud.method.sortable_revision(scm, ud, d, name) |
| 412 | format = format.replace(name, rev) | 412 | format = format.replace(name, rev) |
| 413 | 413 | ||
| 414 | return format | 414 | return format |
| @@ -550,16 +550,29 @@ class FetchData(object): | |||
| 550 | self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) | 550 | self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) |
| 551 | self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) | 551 | self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) |
| 552 | 552 | ||
| 553 | self.names = self.parm.get("name",'').split(',') | ||
| 553 | for m in methods: | 554 | for m in methods: |
| 554 | if m.supports(url, self, d): | 555 | if m.supports(url, self, d): |
| 555 | self.method = m | 556 | self.method = m |
| 556 | if hasattr(m,"urldata_init"): | 557 | if hasattr(m,"urldata_init"): |
| 557 | m.urldata_init(self, d) | 558 | m.urldata_init(self, d) |
| 558 | if m.supports_srcrev(): | 559 | if m.supports_srcrev(): |
| 559 | self.revision = Fetch.srcrev_internal_helper(self, d); | 560 | self.setup_srcrevs(d) |
| 560 | return | 561 | return |
| 561 | raise NoMethodError("Missing implementation for url %s" % url) | 562 | raise NoMethodError("Missing implementation for url %s" % url) |
| 562 | 563 | ||
| 564 | def setup_srcrevs(self, d): | ||
| 565 | if not self.method.supports_srcrev(): | ||
| 566 | return | ||
| 567 | |||
| 568 | self.revisions = {} | ||
| 569 | for name in self.names: | ||
| 570 | self.revisions[name] = Fetch.srcrev_internal_helper(self, d, name) | ||
| 571 | |||
| 572 | # add compatibility code for non name specified case | ||
| 573 | if len(self.names) == 1: | ||
| 574 | self.revision = self.revisions[self.names[0]] | ||
| 575 | |||
| 563 | def setup_localpath(self, d): | 576 | def setup_localpath(self, d): |
| 564 | self.setup = True | 577 | self.setup = True |
| 565 | if "localpath" in self.parm: | 578 | if "localpath" in self.parm: |
| @@ -757,7 +770,7 @@ class Fetch(object): | |||
| 757 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | 770 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) |
| 758 | getSRCDate = staticmethod(getSRCDate) | 771 | getSRCDate = staticmethod(getSRCDate) |
| 759 | 772 | ||
| 760 | def srcrev_internal_helper(ud, d): | 773 | def srcrev_internal_helper(ud, d, name): |
| 761 | """ | 774 | """ |
| 762 | Return: | 775 | Return: |
| 763 | a) a source revision if specified | 776 | a) a source revision if specified |
| @@ -772,25 +785,25 @@ class Fetch(object): | |||
| 772 | return ud.parm['tag'] | 785 | return ud.parm['tag'] |
| 773 | 786 | ||
| 774 | rev = None | 787 | rev = None |
| 775 | if 'name' in ud.parm: | 788 | if name != '': |
| 776 | pn = data.getVar("PN", d, 1) | 789 | pn = data.getVar("PN", d, 1) |
| 777 | rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1) | 790 | rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, 1) |
| 778 | if not rev: | 791 | if not rev: |
| 779 | rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) | 792 | rev = data.getVar("SRCREV_pn-%s_%s" % (pn, name), d, 1) |
| 780 | if not rev: | 793 | if not rev: |
| 781 | rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) | 794 | rev = data.getVar("SRCREV_%s" % name, d, 1) |
| 782 | if not rev: | 795 | if not rev: |
| 783 | rev = data.getVar("SRCREV", d, 1) | 796 | rev = data.getVar("SRCREV", d, 1) |
| 784 | if rev == "INVALID": | 797 | if rev == "INVALID": |
| 785 | raise InvalidSRCREV("Please set SRCREV to a valid value") | 798 | raise InvalidSRCREV("Please set SRCREV to a valid value") |
| 786 | if rev == "AUTOINC": | 799 | if rev == "AUTOINC": |
| 787 | rev = ud.method.latest_revision(ud.url, ud, d) | 800 | rev = ud.method.latest_revision(ud.url, ud, d, name) |
| 788 | 801 | ||
| 789 | return rev | 802 | return rev |
| 790 | 803 | ||
| 791 | srcrev_internal_helper = staticmethod(srcrev_internal_helper) | 804 | srcrev_internal_helper = staticmethod(srcrev_internal_helper) |
| 792 | 805 | ||
| 793 | def localcount_internal_helper(ud, d): | 806 | def localcount_internal_helper(ud, d, name): |
| 794 | """ | 807 | """ |
| 795 | Return: | 808 | Return: |
| 796 | a) a locked localcount if specified | 809 | a) a locked localcount if specified |
| @@ -798,9 +811,9 @@ class Fetch(object): | |||
| 798 | """ | 811 | """ |
| 799 | 812 | ||
| 800 | localcount = None | 813 | localcount = None |
| 801 | if 'name' in ud.parm: | 814 | if name != '': |
| 802 | pn = data.getVar("PN", d, 1) | 815 | pn = data.getVar("PN", d, 1) |
| 803 | localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) | 816 | localcount = data.getVar("LOCALCOUNT_" + name, d, 1) |
| 804 | if not localcount: | 817 | if not localcount: |
| 805 | localcount = data.getVar("LOCALCOUNT", d, 1) | 818 | localcount = data.getVar("LOCALCOUNT", d, 1) |
| 806 | return localcount | 819 | return localcount |
| @@ -829,7 +842,7 @@ class Fetch(object): | |||
| 829 | md5out.close() | 842 | md5out.close() |
| 830 | write_md5sum = staticmethod(write_md5sum) | 843 | write_md5sum = staticmethod(write_md5sum) |
| 831 | 844 | ||
| 832 | def latest_revision(self, url, ud, d): | 845 | def latest_revision(self, url, ud, d, name): |
| 833 | """ | 846 | """ |
| 834 | Look in the cache for the latest revision, if not present ask the SCM. | 847 | Look in the cache for the latest revision, if not present ask the SCM. |
| 835 | """ | 848 | """ |
| @@ -838,15 +851,15 @@ class Fetch(object): | |||
| 838 | 851 | ||
| 839 | pd = persist_data.persist(d) | 852 | pd = persist_data.persist(d) |
| 840 | revs = pd['BB_URI_HEADREVS'] | 853 | revs = pd['BB_URI_HEADREVS'] |
| 841 | key = self.generate_revision_key(url, ud, d) | 854 | key = self.generate_revision_key(url, ud, d, name) |
| 842 | rev = revs[key] | 855 | rev = revs[key] |
| 843 | if rev != None: | 856 | if rev != None: |
| 844 | return str(rev) | 857 | return str(rev) |
| 845 | 858 | ||
| 846 | revs[key] = rev = self._latest_revision(url, ud, d) | 859 | revs[key] = rev = self._latest_revision(url, ud, d, name) |
| 847 | return rev | 860 | return rev |
| 848 | 861 | ||
| 849 | def sortable_revision(self, url, ud, d): | 862 | def sortable_revision(self, url, ud, d, name): |
| 850 | """ | 863 | """ |
| 851 | 864 | ||
| 852 | """ | 865 | """ |
| @@ -855,9 +868,9 @@ class Fetch(object): | |||
| 855 | 868 | ||
| 856 | pd = persist_data.persist(d) | 869 | pd = persist_data.persist(d) |
| 857 | localcounts = pd['BB_URI_LOCALCOUNT'] | 870 | localcounts = pd['BB_URI_LOCALCOUNT'] |
| 858 | key = self.generate_revision_key(url, ud, d) | 871 | key = self.generate_revision_key(url, ud, d, name) |
| 859 | 872 | ||
| 860 | latest_rev = self._build_revision(url, ud, d) | 873 | latest_rev = self._build_revision(url, ud, d, name) |
| 861 | last_rev = localcounts[key + '_rev'] | 874 | last_rev = localcounts[key + '_rev'] |
| 862 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | 875 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False |
| 863 | count = None | 876 | count = None |
| @@ -885,8 +898,8 @@ class Fetch(object): | |||
| 885 | 898 | ||
| 886 | return str(count + "+" + latest_rev) | 899 | return str(count + "+" + latest_rev) |
| 887 | 900 | ||
| 888 | def generate_revision_key(self, url, ud, d): | 901 | def generate_revision_key(self, url, ud, d, name): |
| 889 | key = self._revision_key(url, ud, d) | 902 | key = self._revision_key(url, ud, d, name) |
| 890 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | 903 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") |
| 891 | 904 | ||
| 892 | from . import cvs | 905 | from . import cvs |
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py index 6e1970b715..80c02f835b 100644 --- a/bitbake/lib/bb/fetch2/bzr.py +++ b/bitbake/lib/bb/fetch2/bzr.py | |||
| @@ -120,13 +120,13 @@ class Bzr(Fetch): | |||
| 120 | def supports_srcrev(self): | 120 | def supports_srcrev(self): |
| 121 | return True | 121 | return True |
| 122 | 122 | ||
| 123 | def _revision_key(self, url, ud, d): | 123 | def _revision_key(self, url, ud, d, name): |
| 124 | """ | 124 | """ |
| 125 | Return a unique key for the url | 125 | Return a unique key for the url |
| 126 | """ | 126 | """ |
| 127 | return "bzr:" + ud.pkgdir | 127 | return "bzr:" + ud.pkgdir |
| 128 | 128 | ||
| 129 | def _latest_revision(self, url, ud, d): | 129 | def _latest_revision(self, url, ud, d, name): |
| 130 | """ | 130 | """ |
| 131 | Return the latest upstream revision number | 131 | Return the latest upstream revision number |
| 132 | """ | 132 | """ |
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index 07af02f061..c54d826a01 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
| @@ -57,7 +57,13 @@ class Git(Fetch): | |||
| 57 | if 'nocheckout' in ud.parm: | 57 | if 'nocheckout' in ud.parm: |
| 58 | ud.nocheckout = True | 58 | ud.nocheckout = True |
| 59 | 59 | ||
| 60 | ud.branch = ud.parm.get("branch", "master") | 60 | branches = ud.parm.get("branch", "master").split(',') |
| 61 | if len(branches) != len(ud.names): | ||
| 62 | raise bb.fetch2.ParameterError("SRC_URI (%) name and branch number mismatch" % ud.url) | ||
| 63 | ud.branches = {} | ||
| 64 | for name in ud.names: | ||
| 65 | branch = branches[ud.names.index(name)] | ||
| 66 | ud.branches[name] = branch | ||
| 61 | 67 | ||
| 62 | gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) | 68 | gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) |
| 63 | ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) | 69 | ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) |
| @@ -66,25 +72,18 @@ class Git(Fetch): | |||
| 66 | ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" | 72 | ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" |
| 67 | 73 | ||
| 68 | def localpath(self, url, ud, d): | 74 | def localpath(self, url, ud, d): |
| 69 | ud.tag = ud.revision | 75 | for name in ud.names: |
| 70 | if not ud.tag or ud.tag == "master": | 76 | if not ud.revisions[name] or ud.revisions[name] == "master": |
| 71 | ud.tag = self.latest_revision(url, ud, d) | 77 | ud.revisions[name] = self.latest_revision(url, ud, d, name) |
| 72 | 78 | ||
| 73 | ud.localfile = ud.mirrortarball | 79 | ud.localfile = ud.mirrortarball |
| 74 | 80 | ||
| 75 | if 'noclone' in ud.parm: | ||
| 76 | ud.localfile = None | ||
| 77 | return None | ||
| 78 | |||
| 79 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | 81 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) |
| 80 | 82 | ||
| 81 | def forcefetch(self, url, ud, d): | 83 | def forcefetch(self, url, ud, d): |
| 82 | if 'fullclone' in ud.parm: | 84 | for name in ud.names: |
| 83 | return True | 85 | if not self._contains_ref(ud.revisions[name], d): |
| 84 | if 'noclone' in ud.parm: | 86 | return True |
| 85 | return False | ||
| 86 | if not self._contains_ref(ud.tag, d): | ||
| 87 | return True | ||
| 88 | return False | 87 | return False |
| 89 | 88 | ||
| 90 | def try_premirror(self, u, ud, d): | 89 | def try_premirror(self, u, ud, d): |
| @@ -122,18 +121,15 @@ class Git(Fetch): | |||
| 122 | 121 | ||
| 123 | os.chdir(ud.clonedir) | 122 | os.chdir(ud.clonedir) |
| 124 | # Update the checkout if needed | 123 | # Update the checkout if needed |
| 125 | if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: | 124 | for name in ud.names: |
| 126 | # Remove all but the .git directory | 125 | if not self._contains_ref(ud.revisions[name], d): |
| 127 | bb.fetch2.check_network_access(d, "git fetch %s%s" %(ud.host, ud.path)) | 126 | # Remove all but the .git directory |
| 128 | runfetchcmd("rm * -Rf", d) | 127 | bb.fetch2.check_network_access(d, "git fetch %s%s" %(ud.host, ud.path)) |
| 129 | if 'fullclone' in ud.parm: | 128 | runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name]), d) |
| 130 | runfetchcmd("%s fetch --all" % (ud.basecmd), d) | 129 | runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) |
| 131 | else: | 130 | runfetchcmd("%s prune-packed" % ud.basecmd, d) |
| 132 | runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) | 131 | runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) |
| 133 | runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) | 132 | ud.repochanged = True |
| 134 | runfetchcmd("%s prune-packed" % ud.basecmd, d) | ||
| 135 | runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) | ||
| 136 | ud.repochanged = True | ||
| 137 | 133 | ||
| 138 | def build_mirror_data(self, url, ud, d): | 134 | def build_mirror_data(self, url, ud, d): |
| 139 | # Generate a mirror tarball if needed | 135 | # Generate a mirror tarball if needed |
| @@ -141,7 +137,7 @@ class Git(Fetch): | |||
| 141 | 137 | ||
| 142 | os.chdir(ud.clonedir) | 138 | os.chdir(ud.clonedir) |
| 143 | mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) | 139 | mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) |
| 144 | if (mirror_tarballs != "0" or 'fullclone' in ud.parm) and ud.repochanged: | 140 | if mirror_tarballs != "0" and ud.repochanged: |
| 145 | logger.info("Creating tarball of git repository") | 141 | logger.info("Creating tarball of git repository") |
| 146 | runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) | 142 | runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) |
| 147 | 143 | ||
| @@ -165,7 +161,7 @@ class Git(Fetch): | |||
| 165 | runfetchcmd("cp -af %s/.git/packed-refs %s/.git/" %(ud.clonedir, destdir), d) | 161 | runfetchcmd("cp -af %s/.git/packed-refs %s/.git/" %(ud.clonedir, destdir), d) |
| 166 | if not ud.nocheckout: | 162 | if not ud.nocheckout: |
| 167 | os.chdir(destdir) | 163 | os.chdir(destdir) |
| 168 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) | 164 | runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d) |
| 169 | runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d) | 165 | runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d) |
| 170 | return True | 166 | return True |
| 171 | 167 | ||
| @@ -177,13 +173,13 @@ class Git(Fetch): | |||
| 177 | output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) | 173 | output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) |
| 178 | return output.split()[0] != "0" | 174 | return output.split()[0] != "0" |
| 179 | 175 | ||
| 180 | def _revision_key(self, url, ud, d): | 176 | def _revision_key(self, url, ud, d, name): |
| 181 | """ | 177 | """ |
| 182 | Return a unique key for the url | 178 | Return a unique key for the url |
| 183 | """ | 179 | """ |
| 184 | return "git:" + ud.host + ud.path.replace('/', '.') + ud.branch | 180 | return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name] |
| 185 | 181 | ||
| 186 | def _latest_revision(self, url, ud, d): | 182 | def _latest_revision(self, url, ud, d, name): |
| 187 | """ | 183 | """ |
| 188 | Compute the HEAD revision for the url | 184 | Compute the HEAD revision for the url |
| 189 | """ | 185 | """ |
| @@ -192,16 +188,16 @@ class Git(Fetch): | |||
| 192 | else: | 188 | else: |
| 193 | username = "" | 189 | username = "" |
| 194 | 190 | ||
| 195 | bb.fetch2.check_network_access(d, "git ls-remote %s%s %s" % (ud.host, ud.path, ud.branch)) | 191 | bb.fetch2.check_network_access(d, "git ls-remote %s%s %s" % (ud.host, ud.path, ud.branches[name])) |
| 196 | basecmd = data.getVar("FETCHCMD_git", d, True) or "git" | 192 | basecmd = data.getVar("FETCHCMD_git", d, True) or "git" |
| 197 | cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch) | 193 | cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name]) |
| 198 | output = runfetchcmd(cmd, d, True) | 194 | output = runfetchcmd(cmd, d, True) |
| 199 | if not output: | 195 | if not output: |
| 200 | raise bb.fetch2.FetchError("Fetch command %s gave empty output\n" % (cmd)) | 196 | raise bb.fetch2.FetchError("Fetch command %s gave empty output\n" % (cmd)) |
| 201 | return output.split()[0] | 197 | return output.split()[0] |
| 202 | 198 | ||
| 203 | def _build_revision(self, url, ud, d): | 199 | def _build_revision(self, url, ud, d, name): |
| 204 | return ud.tag | 200 | return ud.revisions[name] |
| 205 | 201 | ||
| 206 | def _sortable_buildindex_disabled(self, url, ud, d, rev): | 202 | def _sortable_buildindex_disabled(self, url, ud, d, rev): |
| 207 | """ | 203 | """ |
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py index 4ba28c7100..13e9b8673b 100644 --- a/bitbake/lib/bb/fetch2/hg.py +++ b/bitbake/lib/bb/fetch2/hg.py | |||
| @@ -163,7 +163,7 @@ class Hg(Fetch): | |||
| 163 | def supports_srcrev(self): | 163 | def supports_srcrev(self): |
| 164 | return True | 164 | return True |
| 165 | 165 | ||
| 166 | def _latest_revision(self, url, ud, d): | 166 | def _latest_revision(self, url, ud, d, name): |
| 167 | """ | 167 | """ |
| 168 | Compute tip revision for the url | 168 | Compute tip revision for the url |
| 169 | """ | 169 | """ |
| @@ -174,7 +174,7 @@ class Hg(Fetch): | |||
| 174 | def _build_revision(self, url, ud, d): | 174 | def _build_revision(self, url, ud, d): |
| 175 | return ud.revision | 175 | return ud.revision |
| 176 | 176 | ||
| 177 | def _revision_key(self, url, ud, d): | 177 | def _revision_key(self, url, ud, d, name): |
| 178 | """ | 178 | """ |
| 179 | Return a unique key for the url | 179 | Return a unique key for the url |
| 180 | """ | 180 | """ |
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index 8d768026be..96d5b1683a 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
| @@ -172,13 +172,13 @@ class Svn(Fetch): | |||
| 172 | def supports_srcrev(self): | 172 | def supports_srcrev(self): |
| 173 | return True | 173 | return True |
| 174 | 174 | ||
| 175 | def _revision_key(self, url, ud, d): | 175 | def _revision_key(self, url, ud, d, name): |
| 176 | """ | 176 | """ |
| 177 | Return a unique key for the url | 177 | Return a unique key for the url |
| 178 | """ | 178 | """ |
| 179 | return "svn:" + ud.moddir | 179 | return "svn:" + ud.moddir |
| 180 | 180 | ||
| 181 | def _latest_revision(self, url, ud, d): | 181 | def _latest_revision(self, url, ud, d, name): |
| 182 | """ | 182 | """ |
| 183 | Return the latest upstream revision number | 183 | Return the latest upstream revision number |
| 184 | """ | 184 | """ |
