diff options
author | Mark Hatle <mark.hatle@windriver.com> | 2019-01-15 16:31:35 -0500 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2019-01-16 15:35:08 +0000 |
commit | cd1430e379c9c63bacd4d0ccce0fc567d98a1e4a (patch) | |
tree | ad4b3cd621244bf714f9aafbb00c50cf8bbe29d0 /bitbake/lib | |
parent | 80f41edbf7a3d2da1625b5dcc15902054ed014de (diff) | |
download | poky-cd1430e379c9c63bacd4d0ccce0fc567d98a1e4a.tar.gz |
bitbake: gitsm.py: revise unpack
Greatly simply the unpack rule by copying the general functionality of
update_submodules as unpack_submodules. This will recursively construct
a set of urls and unpack them using the standard system behaviors.
The overall code may be slightly bigger, but this ensures that all of the
standard locks are inplace, ensuring the code doesn't change out from
under the unpack function. (This could have happened before due to using
'cp' instead of further unpacks on submodules. This may still happen in
shallow clones.)
(Bitbake rev: 7d7ee630f1c65e7dd234f945edf5e3b3bcb0fc30)
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib')
-rw-r--r-- | bitbake/lib/bb/fetch2/gitsm.py | 116 |
1 files changed, 78 insertions, 38 deletions
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py index 11bfa6684f..c172ab1660 100644 --- a/bitbake/lib/bb/fetch2/gitsm.py +++ b/bitbake/lib/bb/fetch2/gitsm.py | |||
@@ -213,61 +213,101 @@ class GitSM(Git): | |||
213 | submodules = self.parse_gitmodules(gitmodules) | 213 | submodules = self.parse_gitmodules(gitmodules) |
214 | self.copy_submodules(submodules, ud, name, dest, d) | 214 | self.copy_submodules(submodules, ud, name, dest, d) |
215 | 215 | ||
216 | def unpack(self, ud, destdir, d): | 216 | def unpack_submodules(self, repo_conf, ud, d): |
217 | Git.unpack(self, ud, destdir, d) | 217 | submodules = [] |
218 | |||
219 | # Copy over the submodules' fetched histories too. | ||
220 | if ud.bareclone: | ||
221 | repo_conf = ud.destdir | ||
222 | else: | ||
223 | repo_conf = os.path.join(ud.destdir, '.git') | ||
224 | |||
225 | update_submodules = False | ||
226 | paths = {} | 218 | paths = {} |
219 | revision = {} | ||
227 | uris = {} | 220 | uris = {} |
228 | local_paths = {} | 221 | local_paths = {} |
222 | |||
229 | for name in ud.names: | 223 | for name in ud.names: |
230 | try: | 224 | try: |
231 | gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) | 225 | gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.destdir) |
232 | except: | 226 | except: |
233 | # No submodules to update | 227 | # No submodules to update |
234 | continue | 228 | continue |
235 | 229 | ||
236 | submodules = self.parse_gitmodules(gitmodules) | 230 | for m, md in self.parse_gitmodules(gitmodules).items(): |
237 | self.copy_submodules(submodules, ud, name, ud.destdir, d) | 231 | submodules.append(m) |
232 | paths[m] = md['path'] | ||
233 | revision[m] = ud.revisions[name] | ||
234 | uris[m] = md['url'] | ||
235 | if uris[m].startswith('..'): | ||
236 | newud = copy.copy(ud) | ||
237 | newud.path = os.path.realpath(os.path.join(newud.path, md['url'])) | ||
238 | uris[m] = Git._get_repo_url(self, newud) | ||
238 | 239 | ||
239 | submodules_queue = [(module, os.path.join(repo_conf, 'modules', md['path'])) for module, md in submodules.items()] | 240 | modules_updated = False |
240 | while len(submodules_queue) != 0: | ||
241 | module, modpath = submodules_queue.pop() | ||
242 | 241 | ||
243 | # add submodule children recursively | 242 | for module in submodules: |
244 | try: | 243 | try: |
245 | gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath) | 244 | module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, revision[module], paths[module]), d, quiet=True, workdir=ud.destdir) |
246 | for m, md in self.parse_gitmodules(gitmodules).items(): | 245 | except: |
247 | submodules_queue.append([m, os.path.join(modpath, 'modules', md['path'])]) | 246 | # If the command fails, we don't have a valid file to check. If it doesn't |
248 | except: | 247 | # fail -- it still might be a failure, see next check... |
249 | # no children | 248 | module_hash = "" |
250 | pass | ||
251 | 249 | ||
250 | if not module_hash: | ||
251 | logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", module) | ||
252 | continue | ||
252 | 253 | ||
253 | # There are submodules to update | 254 | modules_updated = True |
254 | update_submodules = True | ||
255 | 255 | ||
256 | # Determine (from the submodule) the correct url to reference | 256 | module_hash = module_hash.split()[2] |
257 | try: | ||
258 | output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath) | ||
259 | except bb.fetch2.FetchError as e: | ||
260 | # No remote url defined in this submodule | ||
261 | continue | ||
262 | 257 | ||
263 | local_paths[module] = output | 258 | # Build new SRC_URI |
259 | if "://" not in uris[module]: | ||
260 | # It's ssh if the format does NOT have "://", but has a ':' | ||
261 | if ":" in uris[module]: | ||
262 | proto = "ssh" | ||
263 | if ":/" in uris[module]: | ||
264 | url = "gitsm://" + uris[module].replace(':/', '/', 1) | ||
265 | else: | ||
266 | url = "gitsm://" + uris[module].replace(':', '/', 1) | ||
267 | else: # Fall back to 'file' if there is no ':' | ||
268 | proto = "file" | ||
269 | url = "gitsm://" + uris[module] | ||
270 | else: | ||
271 | proto = uris[module].split(':', 1)[0] | ||
272 | url = uris[module].replace('%s:' % proto, 'gitsm:', 1) | ||
264 | 273 | ||
265 | # Setup the local URL properly (like git submodule init or sync would do...) | 274 | url += ';protocol=%s' % proto |
266 | runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir) | 275 | url += ";name=%s" % module |
276 | url += ";bareclone=1;nobranch=1;subpath=%s" % paths[module] | ||
277 | |||
278 | ld = d.createCopy() | ||
279 | # Not necessary to set SRC_URI, since we're passing the URI to | ||
280 | # Fetch. | ||
281 | #ld.setVar('SRC_URI', url) | ||
282 | ld.setVar('SRCREV_%s' % module, module_hash) | ||
267 | 283 | ||
268 | # Ensure the submodule repository is NOT set to bare, since we're checking it out... | 284 | # Workaround for issues with SRCPV/SRCREV_FORMAT errors |
269 | runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath) | 285 | # error refer to 'multiple' repositories. Only the repository |
286 | # in the original SRC_URI actually matters... | ||
287 | ld.setVar('SRCPV', d.getVar('SRCPV')) | ||
288 | ld.setVar('SRCREV_FORMAT', module) | ||
289 | |||
290 | newfetch = Fetch([url], ld, cache=False) | ||
291 | newfetch.unpack(root=os.path.join(repo_conf, 'modules')) | ||
292 | local_paths[module] = newfetch.localpath(url) | ||
293 | |||
294 | # Correct the submodule references to the local download version... | ||
295 | runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir) | ||
296 | |||
297 | # Ensure the submodule repository is NOT set to bare, since we're checking it out... | ||
298 | runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', paths[module])) | ||
299 | |||
300 | return modules_updated | ||
301 | |||
302 | def unpack(self, ud, destdir, d): | ||
303 | Git.unpack(self, ud, destdir, d) | ||
304 | |||
305 | # Copy over the submodules' fetched histories too. | ||
306 | if ud.bareclone: | ||
307 | repo_conf = ud.destdir | ||
308 | else: | ||
309 | repo_conf = os.path.join(ud.destdir, '.git') | ||
270 | 310 | ||
271 | if update_submodules: | 311 | if self.unpack_submodules(repo_conf, ud, d): |
272 | # Run submodule update, this sets up the directories -- without touching the config | 312 | # Run submodule update, this sets up the directories -- without touching the config |
273 | runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) | 313 | runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) |