diff options
-rw-r--r-- | bitbake/lib/bb/fetch2/gitsm.py | 83 |
1 files changed, 75 insertions, 8 deletions
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py index 0aff1008e5..86773094dc 100644 --- a/bitbake/lib/bb/fetch2/gitsm.py +++ b/bitbake/lib/bb/fetch2/gitsm.py | |||
@@ -98,7 +98,7 @@ class GitSM(Git): | |||
98 | for line in lines: | 98 | for line in lines: |
99 | f.write(line) | 99 | f.write(line) |
100 | 100 | ||
101 | def update_submodules(self, ud, d): | 101 | def update_submodules(self, ud, d, allow_network): |
102 | # We have to convert bare -> full repo, do the submodule bit, then convert back | 102 | # We have to convert bare -> full repo, do the submodule bit, then convert back |
103 | tmpclonedir = ud.clonedir + ".tmp" | 103 | tmpclonedir = ud.clonedir + ".tmp" |
104 | gitdir = tmpclonedir + os.sep + ".git" | 104 | gitdir = tmpclonedir + os.sep + ".git" |
@@ -108,11 +108,46 @@ class GitSM(Git): | |||
108 | runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d) | 108 | runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d) |
109 | runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir) | 109 | runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir) |
110 | runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir) | 110 | runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir) |
111 | runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir) | 111 | |
112 | self._set_relative_paths(tmpclonedir) | 112 | try: |
113 | runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir) | 113 | if allow_network: |
114 | os.rename(gitdir, ud.clonedir,) | 114 | fetch_flags = "" |
115 | bb.utils.remove(tmpclonedir, True) | 115 | else: |
116 | fetch_flags = "--no-fetch" | ||
117 | |||
118 | # The 'git submodule sync' sandwiched between two successive 'git submodule update' commands is | ||
119 | # intentional. See the notes on the similar construction in download() for an explanation. | ||
120 | runfetchcmd("%(basecmd)s submodule update --init --recursive %(fetch_flags)s || (%(basecmd)s submodule sync --recursive && %(basecmd)s submodule update --init --recursive %(fetch_flags)s)" % {'basecmd': ud.basecmd, 'fetch_flags' : fetch_flags}, d, workdir=tmpclonedir) | ||
121 | except bb.fetch.FetchError: | ||
122 | if allow_network: | ||
123 | raise | ||
124 | else: | ||
125 | # This method was called as a probe to see whether the submodule history | ||
126 | # is complete enough to allow the current working copy to have its | ||
127 | # modules filled in. It's not, so swallow up the exception and report | ||
128 | # the negative result. | ||
129 | return False | ||
130 | finally: | ||
131 | self._set_relative_paths(tmpclonedir) | ||
132 | runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir) | ||
133 | os.rename(gitdir, ud.clonedir,) | ||
134 | bb.utils.remove(tmpclonedir, True) | ||
135 | |||
136 | return True | ||
137 | |||
138 | def need_update(self, ud, d): | ||
139 | main_repo_needs_update = Git.need_update(self, ud, d) | ||
140 | |||
141 | # First check that the main repository has enough history fetched. If it doesn't, then we don't | ||
142 | # even have the .gitmodules and gitlinks for the submodules to attempt asking whether the | ||
143 | # submodules' histories are recent enough. | ||
144 | if main_repo_needs_update: | ||
145 | return True | ||
146 | |||
147 | # Now check that the submodule histories are new enough. The git-submodule command doesn't have | ||
148 | # any clean interface for doing this aside from just attempting the checkout (with network | ||
149 | # fetched disabled). | ||
150 | return not self.update_submodules(ud, d, allow_network=False) | ||
116 | 151 | ||
117 | def download(self, ud, d): | 152 | def download(self, ud, d): |
118 | Git.download(self, ud, d) | 153 | Git.download(self, ud, d) |
@@ -120,7 +155,7 @@ class GitSM(Git): | |||
120 | if not ud.shallow or ud.localpath != ud.fullshallow: | 155 | if not ud.shallow or ud.localpath != ud.fullshallow: |
121 | submodules = self.uses_submodules(ud, d, ud.clonedir) | 156 | submodules = self.uses_submodules(ud, d, ud.clonedir) |
122 | if submodules: | 157 | if submodules: |
123 | self.update_submodules(ud, d) | 158 | self.update_submodules(ud, d, allow_network=True) |
124 | 159 | ||
125 | def clone_shallow_local(self, ud, dest, d): | 160 | def clone_shallow_local(self, ud, dest, d): |
126 | super(GitSM, self).clone_shallow_local(ud, dest, d) | 161 | super(GitSM, self).clone_shallow_local(ud, dest, d) |
@@ -132,4 +167,36 @@ class GitSM(Git): | |||
132 | 167 | ||
133 | if self.uses_submodules(ud, d, ud.destdir): | 168 | if self.uses_submodules(ud, d, ud.destdir): |
134 | runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir) | 169 | runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir) |
135 | runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir) | 170 | |
171 | # Copy over the submodules' fetched histories too. | ||
172 | if ud.bareclone: | ||
173 | repo_conf = ud.destdir | ||
174 | else: | ||
175 | repo_conf = os.path.join(ud.destdir, '.git') | ||
176 | |||
177 | if os.path.exists(ud.clonedir): | ||
178 | # This is not a copy unpacked from a shallow mirror clone. So | ||
179 | # the manual intervention to populate the .git/modules done | ||
180 | # in clone_shallow_local() won't have been done yet. | ||
181 | runfetchcmd("cp -fpPRH %s %s" % (os.path.join(ud.clonedir, 'modules'), repo_conf), d) | ||
182 | fetch_flags = "--no-fetch" | ||
183 | elif os.path.exists(os.path.join(repo_conf, 'modules')): | ||
184 | # Unpacked from a shallow mirror clone. Manual population of | ||
185 | # .git/modules is already done. | ||
186 | fetch_flags = "--no-fetch" | ||
187 | else: | ||
188 | # This isn't fatal; git-submodule will just fetch it | ||
189 | # during do_unpack(). | ||
190 | fetch_flags = "" | ||
191 | bb.error("submodule history not retrieved during do_fetch()") | ||
192 | |||
193 | # Careful not to hit the network during unpacking; all history should already | ||
194 | # be fetched. | ||
195 | # | ||
196 | # The repeated attempts to do the submodule initialization sandwiched around a sync to | ||
197 | # install the correct remote URLs into the submodules' .git/config metadata are deliberate. | ||
198 | # Bad remote URLs are leftover in the modules' .git/config files from the unpack of bare | ||
199 | # clone tarballs and an initial 'git submodule update' is necessary to prod them back to | ||
200 | # enough life so that the 'git submodule sync' realizes the existing module .git/config | ||
201 | # files exist to be updated. | ||
202 | runfetchcmd("%(basecmd)s submodule update --init --recursive %(fetch_flags)s || (%(basecmd)s submodule sync --recursive && %(basecmd)s submodule update --init --recursive %(fetch_flags)s)" % {'basecmd': ud.basecmd, 'fetch_flags': fetch_flags}, d, workdir=ud.destdir) | ||