diff options
author | Richard Purdie <rpurdie@linux.intel.com> | 2010-10-19 12:30:21 +0100 |
---|---|---|
committer | Richard Purdie <rpurdie@linux.intel.com> | 2010-10-19 12:47:45 +0100 |
commit | 05c8ee60f164bec5a34e432312c0e65e927b40cf (patch) | |
tree | 9a4ac23d7e58259ecaaebd11eded27c36c4604be /bitbake/lib/bb/fetch | |
parent | 1077021f7010388e889d85630b13c4c7901d7718 (diff) | |
download | poky-05c8ee60f164bec5a34e432312c0e65e927b40cf.tar.gz |
bitbake/fetcher: Deal with a ton of different bugs
The more we try and patch up the fetcher code, the more things break. The
code blocks in question are practically unreadable and are full of corner
cases where fetching could fail. In summary the issues noticed included:
a) Always fetching strange broken urls from the premirror for "noclone"
git repositories
b) Not creating or rewriting .md5 stamp files inconsistently
c) Always fetching git source mirror tarballs from the premirror even
if they already exist but the checkout directory does now
d) Passing "None" values to os.access() and os.path.extsts() checks under
certain circumstances
e) Not using fetched git mirror tarballs if the preexist and always
try and fetch them.
This patch rewrites the sections of code in question to be simpler and
more readable, fixing the above problems and most likely other odd
corner cases.
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
Diffstat (limited to 'bitbake/lib/bb/fetch')
-rw-r--r-- | bitbake/lib/bb/fetch/__init__.py | 46 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/git.py | 28 |
2 files changed, 38 insertions, 36 deletions
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index 55ffdb84c9..5fcb9b83e2 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py | |||
@@ -237,32 +237,14 @@ def go(d, urls = None): | |||
237 | for u in urls: | 237 | for u in urls: |
238 | ud = urldata[u] | 238 | ud = urldata[u] |
239 | m = ud.method | 239 | m = ud.method |
240 | premirror_fetch = True | ||
241 | localpath = "" | 240 | localpath = "" |
242 | 241 | ||
243 | if ud.localfile: | 242 | if not ud.localfile: |
244 | if not m.try_premirror(u, ud, d): | 243 | continue |
245 | premirror_fetch = False | ||
246 | # File already present along with md5 stamp file | ||
247 | # Touch md5 file to show activity | ||
248 | try: | ||
249 | os.utime(ud.md5, None) | ||
250 | except: | ||
251 | # Errors aren't fatal here | ||
252 | pass | ||
253 | |||
254 | lf = bb.utils.lockfile(ud.lockfile) | ||
255 | if not m.try_premirror(u, ud, d): | ||
256 | premirror_fetch = False | ||
257 | # If someone else fetched this before we got the lock, | ||
258 | # notice and don't try again | ||
259 | try: | ||
260 | os.utime(ud.md5, None) | ||
261 | except: | ||
262 | # Errors aren't fatal here | ||
263 | pass | ||
264 | 244 | ||
265 | if premirror_fetch: | 245 | lf = bb.utils.lockfile(ud.lockfile) |
246 | |||
247 | if m.try_premirror(u, ud, d): | ||
266 | # First try fetching uri, u, from PREMIRRORS | 248 | # First try fetching uri, u, from PREMIRRORS |
267 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | 249 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) |
268 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) | 250 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) |
@@ -282,14 +264,18 @@ def go(d, urls = None): | |||
282 | if not localpath or not os.path.exists(localpath): | 264 | if not localpath or not os.path.exists(localpath): |
283 | raise FetchError("Unable to fetch URL %s from any source." % u) | 265 | raise FetchError("Unable to fetch URL %s from any source." % u) |
284 | 266 | ||
285 | if localpath: | 267 | ud.localpath = localpath |
286 | ud.localpath = localpath | 268 | if os.path.exists(ud.md5): |
287 | 269 | # Touch the md5 file to show active use of the download | |
288 | if ud.localfile: | 270 | try: |
289 | if not m.forcefetch(u, ud, d): | 271 | os.utime(ud.md5, None) |
290 | Fetch.write_md5sum(u, ud, d) | 272 | except: |
291 | bb.utils.unlockfile(lf) | 273 | # Errors aren't fatal here |
274 | pass | ||
275 | else: | ||
276 | Fetch.write_md5sum(u, ud, d) | ||
292 | 277 | ||
278 | bb.utils.unlockfile(lf) | ||
293 | 279 | ||
294 | def checkstatus(d): | 280 | def checkstatus(d): |
295 | """ | 281 | """ |
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index 449f1e4dba..33e9a95baf 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py | |||
@@ -88,15 +88,21 @@ class Git(Fetch): | |||
88 | def forcefetch(self, url, ud, d): | 88 | def forcefetch(self, url, ud, d): |
89 | if 'fullclone' in ud.parm: | 89 | if 'fullclone' in ud.parm: |
90 | return True | 90 | return True |
91 | if os.path.exists(self.localpath(url, ud, d)): | 91 | if 'noclone' in ud.parm: |
92 | return False | ||
93 | if os.path.exists(ud.localpath): | ||
92 | return False | 94 | return False |
93 | if not self._contains_ref(ud.tag, d): | 95 | if not self._contains_ref(ud.tag, d): |
94 | return True | 96 | return True |
95 | return False | 97 | return False |
96 | 98 | ||
97 | def try_premirror(self, u, ud, d): | 99 | def try_premirror(self, u, ud, d): |
100 | if 'noclone' in ud.parm: | ||
101 | return False | ||
98 | if os.path.exists(ud.clonedir): | 102 | if os.path.exists(ud.clonedir): |
99 | return False | 103 | return False |
104 | if os.path.exists(ud.localpath): | ||
105 | return False | ||
100 | 106 | ||
101 | return True | 107 | return True |
102 | 108 | ||
@@ -113,16 +119,25 @@ class Git(Fetch): | |||
113 | coname = '%s' % (ud.tag) | 119 | coname = '%s' % (ud.tag) |
114 | codir = os.path.join(ud.clonedir, coname) | 120 | codir = os.path.join(ud.clonedir, coname) |
115 | 121 | ||
116 | if not os.path.exists(ud.clonedir): | 122 | # If we have no existing clone and no mirror tarball, try and obtain one |
123 | if not os.path.exists(ud.clonedir) and not os.path.exists(repofile): | ||
117 | try: | 124 | try: |
118 | Fetch.try_mirrors(ud.mirrortarball) | 125 | Fetch.try_mirrors(ud.mirrortarball) |
119 | bb.mkdirhier(ud.clonedir) | ||
120 | os.chdir(ud.clonedir) | ||
121 | runfetchcmd("tar -xzf %s" % (repofile), d) | ||
122 | except: | 126 | except: |
123 | runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) | 127 | pass |
128 | |||
129 | # If the checkout doesn't exist and the mirror tarball does, extract it | ||
130 | if not os.path.exists(ud.clonedir) and os.path.exists(repofile): | ||
131 | bb.mkdirhier(ud.clonedir) | ||
132 | os.chdir(ud.clonedir) | ||
133 | runfetchcmd("tar -xzf %s" % (repofile), d) | ||
134 | |||
135 | # If the repo still doesn't exist, fallback to cloning it | ||
136 | if not os.path.exists(ud.clonedir): | ||
137 | runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) | ||
124 | 138 | ||
125 | os.chdir(ud.clonedir) | 139 | os.chdir(ud.clonedir) |
140 | # Update the checkout if needed | ||
126 | if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: | 141 | if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: |
127 | # Remove all but the .git directory | 142 | # Remove all but the .git directory |
128 | runfetchcmd("rm * -Rf", d) | 143 | runfetchcmd("rm * -Rf", d) |
@@ -131,6 +146,7 @@ class Git(Fetch): | |||
131 | runfetchcmd("%s prune-packed" % ud.basecmd, d) | 146 | runfetchcmd("%s prune-packed" % ud.basecmd, d) |
132 | runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) | 147 | runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) |
133 | 148 | ||
149 | # Generate a mirror tarball if needed | ||
134 | os.chdir(ud.clonedir) | 150 | os.chdir(ud.clonedir) |
135 | mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) | 151 | mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) |
136 | if mirror_tarballs != "0" or 'fullclone' in ud.parm: | 152 | if mirror_tarballs != "0" or 'fullclone' in ud.parm: |