summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMatt Madison <matt@madison.systems>2016-08-10 10:08:16 -0700
committerRichard Purdie <richard.purdie@linuxfoundation.org>2016-08-20 16:08:59 +0100
commitab09541d5517da9b1a23923ea8f5c26ddf745084 (patch)
treeb0b81a809ec783b7481c012b430b9f6618e87a73
parenteefb4b66c8628fbf366ebc5c23cfe013c8fa3756 (diff)
downloadpoky-ab09541d5517da9b1a23923ea8f5c26ddf745084.tar.gz
bitbake: fetch2: preserve current working directory
Fix the methods in all fetchers so they don't change the current working directory of the calling process, which could lead to "changed cwd" warnings from bitbake. (Bitbake rev: 6aa78bf3bd1f75728209e2d01faef31cb8887333) Signed-off-by: Matt Madison <matt@madison.systems> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py21
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py11
-rw-r--r--bitbake/lib/bb/fetch2/clearcase.py6
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py15
-rw-r--r--bitbake/lib/bb/fetch2/git.py45
-rw-r--r--bitbake/lib/bb/fetch2/gitannex.py25
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py23
-rw-r--r--bitbake/lib/bb/fetch2/hg.py28
-rw-r--r--bitbake/lib/bb/fetch2/npm.py22
-rw-r--r--bitbake/lib/bb/fetch2/osc.py10
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py6
-rw-r--r--bitbake/lib/bb/fetch2/repo.py13
-rw-r--r--bitbake/lib/bb/fetch2/svn.py13
13 files changed, 101 insertions, 137 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 9054b2ec18..7a3eb3c5ab 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -779,7 +779,7 @@ def localpath(url, d):
779 fetcher = bb.fetch2.Fetch([url], d) 779 fetcher = bb.fetch2.Fetch([url], d)
780 return fetcher.localpath(url) 780 return fetcher.localpath(url)
781 781
782def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None): 782def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
783 """ 783 """
784 Run cmd returning the command output 784 Run cmd returning the command output
785 Raise an error if interrupted or cmd fails 785 Raise an error if interrupted or cmd fails
@@ -821,7 +821,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None):
821 error_message = "" 821 error_message = ""
822 822
823 try: 823 try:
824 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE) 824 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
825 success = True 825 success = True
826 except bb.process.NotFoundError as e: 826 except bb.process.NotFoundError as e:
827 error_message = "Fetch command %s" % (e.command) 827 error_message = "Fetch command %s" % (e.command)
@@ -1436,17 +1436,11 @@ class FetchMethod(object):
1436 if not cmd: 1436 if not cmd:
1437 return 1437 return
1438 1438
1439 # Change to unpackdir before executing command
1440 save_cwd = os.getcwd();
1441 os.chdir(unpackdir)
1442
1443 path = data.getVar('PATH', True) 1439 path = data.getVar('PATH', True)
1444 if path: 1440 if path:
1445 cmd = "PATH=\"%s\" %s" % (path, cmd) 1441 cmd = "PATH=\"%s\" %s" % (path, cmd)
1446 bb.note("Unpacking %s to %s/" % (file, os.getcwd())) 1442 bb.note("Unpacking %s to %s/" % (file, unpackdir))
1447 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) 1443 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
1448
1449 os.chdir(save_cwd)
1450 1444
1451 if ret != 0: 1445 if ret != 0:
1452 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url) 1446 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
@@ -1559,6 +1553,8 @@ class Fetch(object):
1559 network = self.d.getVar("BB_NO_NETWORK", True) 1553 network = self.d.getVar("BB_NO_NETWORK", True)
1560 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") 1554 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1561 1555
1556 save_cwd = os.getcwd()
1557
1562 for u in urls: 1558 for u in urls:
1563 ud = self.ud[u] 1559 ud = self.ud[u]
1564 ud.setup_localpath(self.d) 1560 ud.setup_localpath(self.d)
@@ -1633,6 +1629,7 @@ class Fetch(object):
1633 raise 1629 raise
1634 1630
1635 finally: 1631 finally:
1632 os.chdir(save_cwd)
1636 if ud.lockfile: 1633 if ud.lockfile:
1637 bb.utils.unlockfile(lf) 1634 bb.utils.unlockfile(lf)
1638 1635
@@ -1641,6 +1638,8 @@ class Fetch(object):
1641 Check all urls exist upstream 1638 Check all urls exist upstream
1642 """ 1639 """
1643 1640
1641 save_cwd = os.getcwd()
1642
1644 if not urls: 1643 if not urls:
1645 urls = self.urls 1644 urls = self.urls
1646 1645
@@ -1664,6 +1663,8 @@ class Fetch(object):
1664 if not ret: 1663 if not ret:
1665 raise FetchError("URL %s doesn't work" % u, u) 1664 raise FetchError("URL %s doesn't work" % u, u)
1666 1665
1666 os.chdir(save_cwd)
1667
1667 def unpack(self, root, urls=None): 1668 def unpack(self, root, urls=None):
1668 """ 1669 """
1669 Check all urls exist upstream 1670 Check all urls exist upstream
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
index ce11ee7c20..72264afb5a 100644
--- a/bitbake/lib/bb/fetch2/bzr.py
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -88,19 +88,15 @@ class Bzr(FetchMethod):
88 bzrcmd = self._buildbzrcommand(ud, d, "update") 88 bzrcmd = self._buildbzrcommand(ud, d, "update")
89 logger.debug(1, "BZR Update %s", ud.url) 89 logger.debug(1, "BZR Update %s", ud.url)
90 bb.fetch2.check_network_access(d, bzrcmd, ud.url) 90 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
91 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) 91 runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
92 runfetchcmd(bzrcmd, d)
93 else: 92 else:
94 bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) 93 bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
95 bzrcmd = self._buildbzrcommand(ud, d, "fetch") 94 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
96 bb.fetch2.check_network_access(d, bzrcmd, ud.url) 95 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
97 logger.debug(1, "BZR Checkout %s", ud.url) 96 logger.debug(1, "BZR Checkout %s", ud.url)
98 bb.utils.mkdirhier(ud.pkgdir) 97 bb.utils.mkdirhier(ud.pkgdir)
99 os.chdir(ud.pkgdir)
100 logger.debug(1, "Running %s", bzrcmd) 98 logger.debug(1, "Running %s", bzrcmd)
101 runfetchcmd(bzrcmd, d) 99 runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
102
103 os.chdir(ud.pkgdir)
104 100
105 scmdata = ud.parm.get("scmdata", "") 101 scmdata = ud.parm.get("scmdata", "")
106 if scmdata == "keep": 102 if scmdata == "keep":
@@ -109,7 +105,8 @@ class Bzr(FetchMethod):
109 tar_flags = "--exclude='.bzr' --exclude='.bzrtags'" 105 tar_flags = "--exclude='.bzr' --exclude='.bzrtags'"
110 106
111 # tar them up to a defined filename 107 # tar them up to a defined filename
112 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath]) 108 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)),
109 d, cleanup=[ud.localpath], workdir=ud.pkgdir)
113 110
114 def supports_srcrev(self): 111 def supports_srcrev(self):
115 return True 112 return True
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
index ba83e7cb60..70e280a8dd 100644
--- a/bitbake/lib/bb/fetch2/clearcase.py
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -202,11 +202,10 @@ class ClearCase(FetchMethod):
202 202
203 def _remove_view(self, ud, d): 203 def _remove_view(self, ud, d):
204 if os.path.exists(ud.viewdir): 204 if os.path.exists(ud.viewdir):
205 os.chdir(ud.ccasedir)
206 cmd = self._build_ccase_command(ud, 'rmview'); 205 cmd = self._build_ccase_command(ud, 'rmview');
207 logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname) 206 logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
208 bb.fetch2.check_network_access(d, cmd, ud.url) 207 bb.fetch2.check_network_access(d, cmd, ud.url)
209 output = runfetchcmd(cmd, d) 208 output = runfetchcmd(cmd, d, workdir=ud.ccasedir)
210 logger.info("rmview output: %s", output) 209 logger.info("rmview output: %s", output)
211 210
212 def need_update(self, ud, d): 211 def need_update(self, ud, d):
@@ -241,11 +240,10 @@ class ClearCase(FetchMethod):
241 raise e 240 raise e
242 241
243 # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec 242 # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
244 os.chdir(ud.viewdir)
245 cmd = self._build_ccase_command(ud, 'setcs'); 243 cmd = self._build_ccase_command(ud, 'setcs');
246 logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname) 244 logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
247 bb.fetch2.check_network_access(d, cmd, ud.url) 245 bb.fetch2.check_network_access(d, cmd, ud.url)
248 output = runfetchcmd(cmd, d) 246 output = runfetchcmd(cmd, d, workdir=ud.viewdir)
249 logger.info("%s", output) 247 logger.info("%s", output)
250 248
251 # Copy the configspec to the viewdir so we have it in our source tarball later 249 # Copy the configspec to the viewdir so we have it in our source tarball later
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
index 43a234f590..5ff70ba921 100644
--- a/bitbake/lib/bb/fetch2/cvs.py
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -123,22 +123,23 @@ class Cvs(FetchMethod):
123 pkg = d.getVar('PN', True) 123 pkg = d.getVar('PN', True)
124 pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) 124 pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
125 moddir = os.path.join(pkgdir, localdir) 125 moddir = os.path.join(pkgdir, localdir)
126 workdir = None
126 if os.access(os.path.join(moddir, 'CVS'), os.R_OK): 127 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
127 logger.info("Update " + ud.url) 128 logger.info("Update " + ud.url)
128 bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) 129 bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
129 # update sources there 130 # update sources there
130 os.chdir(moddir) 131 workdir = moddir
131 cmd = cvsupdatecmd 132 cmd = cvsupdatecmd
132 else: 133 else:
133 logger.info("Fetch " + ud.url) 134 logger.info("Fetch " + ud.url)
134 # check out sources there 135 # check out sources there
135 bb.utils.mkdirhier(pkgdir) 136 bb.utils.mkdirhier(pkgdir)
136 os.chdir(pkgdir) 137 workdir = pkgdir
137 logger.debug(1, "Running %s", cvscmd) 138 logger.debug(1, "Running %s", cvscmd)
138 bb.fetch2.check_network_access(d, cvscmd, ud.url) 139 bb.fetch2.check_network_access(d, cvscmd, ud.url)
139 cmd = cvscmd 140 cmd = cvscmd
140 141
141 runfetchcmd(cmd, d, cleanup = [moddir]) 142 runfetchcmd(cmd, d, cleanup=[moddir], workdir=workdir)
142 143
143 if not os.access(moddir, os.R_OK): 144 if not os.access(moddir, os.R_OK):
144 raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url) 145 raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
@@ -150,15 +151,15 @@ class Cvs(FetchMethod):
150 tar_flags = "--exclude='CVS'" 151 tar_flags = "--exclude='CVS'"
151 152
152 # tar them up to a defined filename 153 # tar them up to a defined filename
154 workdir = None
153 if 'fullpath' in ud.parm: 155 if 'fullpath' in ud.parm:
154 os.chdir(pkgdir) 156 workdir = pkgdir
155 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir) 157 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
156 else: 158 else:
157 os.chdir(moddir) 159 workdir = os.path.dirname(os.path.realpath(moddir))
158 os.chdir('..')
159 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)) 160 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
160 161
161 runfetchcmd(cmd, d, cleanup = [ud.localpath]) 162 runfetchcmd(cmd, d, cleanup=[ud.localpath], workdir=workdir)
162 163
163 def clean(self, ud, d): 164 def clean(self, ud, d):
164 """ Clean CVS Files and tarballs """ 165 """ Clean CVS Files and tarballs """
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index fd8f3fdf4d..1bec60ab71 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -219,9 +219,8 @@ class Git(FetchMethod):
219 def need_update(self, ud, d): 219 def need_update(self, ud, d):
220 if not os.path.exists(ud.clonedir): 220 if not os.path.exists(ud.clonedir):
221 return True 221 return True
222 os.chdir(ud.clonedir)
223 for name in ud.names: 222 for name in ud.names:
224 if not self._contains_ref(ud, d, name): 223 if not self._contains_ref(ud, d, name, ud.clonedir):
225 return True 224 return True
226 if ud.write_tarballs and not os.path.exists(ud.fullmirror): 225 if ud.write_tarballs and not os.path.exists(ud.fullmirror):
227 return True 226 return True
@@ -242,8 +241,7 @@ class Git(FetchMethod):
242 # If the checkout doesn't exist and the mirror tarball does, extract it 241 # If the checkout doesn't exist and the mirror tarball does, extract it
243 if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror): 242 if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
244 bb.utils.mkdirhier(ud.clonedir) 243 bb.utils.mkdirhier(ud.clonedir)
245 os.chdir(ud.clonedir) 244 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.clonedir)
246 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
247 245
248 repourl = self._get_repo_url(ud) 246 repourl = self._get_repo_url(ud)
249 247
@@ -258,34 +256,32 @@ class Git(FetchMethod):
258 progresshandler = GitProgressHandler(d) 256 progresshandler = GitProgressHandler(d)
259 runfetchcmd(clone_cmd, d, log=progresshandler) 257 runfetchcmd(clone_cmd, d, log=progresshandler)
260 258
261 os.chdir(ud.clonedir)
262 # Update the checkout if needed 259 # Update the checkout if needed
263 needupdate = False 260 needupdate = False
264 for name in ud.names: 261 for name in ud.names:
265 if not self._contains_ref(ud, d, name): 262 if not self._contains_ref(ud, d, name, ud.clonedir):
266 needupdate = True 263 needupdate = True
267 if needupdate: 264 if needupdate:
268 try: 265 try:
269 runfetchcmd("%s remote rm origin" % ud.basecmd, d) 266 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
270 except bb.fetch2.FetchError: 267 except bb.fetch2.FetchError:
271 logger.debug(1, "No Origin") 268 logger.debug(1, "No Origin")
272 269
273 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d) 270 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
274 fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl) 271 fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
275 if ud.proto.lower() != 'file': 272 if ud.proto.lower() != 'file':
276 bb.fetch2.check_network_access(d, fetch_cmd, ud.url) 273 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
277 progresshandler = GitProgressHandler(d) 274 progresshandler = GitProgressHandler(d)
278 runfetchcmd(fetch_cmd, d, log=progresshandler) 275 runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
279 runfetchcmd("%s prune-packed" % ud.basecmd, d) 276 runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
280 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) 277 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
281 try: 278 try:
282 os.unlink(ud.fullmirror) 279 os.unlink(ud.fullmirror)
283 except OSError as exc: 280 except OSError as exc:
284 if exc.errno != errno.ENOENT: 281 if exc.errno != errno.ENOENT:
285 raise 282 raise
286 os.chdir(ud.clonedir)
287 for name in ud.names: 283 for name in ud.names:
288 if not self._contains_ref(ud, d, name): 284 if not self._contains_ref(ud, d, name, ud.clonedir):
289 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name])) 285 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
290 286
291 def build_mirror_data(self, ud, d): 287 def build_mirror_data(self, ud, d):
@@ -295,10 +291,9 @@ class Git(FetchMethod):
295 if os.path.islink(ud.fullmirror): 291 if os.path.islink(ud.fullmirror):
296 os.unlink(ud.fullmirror) 292 os.unlink(ud.fullmirror)
297 293
298 os.chdir(ud.clonedir)
299 logger.info("Creating tarball of git repository") 294 logger.info("Creating tarball of git repository")
300 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d) 295 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d, workdir=ud.clonedir)
301 runfetchcmd("touch %s.done" % (ud.fullmirror), d) 296 runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.clonedir)
302 297
303 def unpack(self, ud, destdir, d): 298 def unpack(self, ud, destdir, d):
304 """ unpack the downloaded src to destdir""" 299 """ unpack the downloaded src to destdir"""
@@ -321,21 +316,21 @@ class Git(FetchMethod):
321 cloneflags += " --mirror" 316 cloneflags += " --mirror"
322 317
323 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, ud.clonedir, destdir), d) 318 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, ud.clonedir, destdir), d)
324 os.chdir(destdir)
325 repourl = self._get_repo_url(ud) 319 repourl = self._get_repo_url(ud)
326 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d) 320 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
327 if not ud.nocheckout: 321 if not ud.nocheckout:
328 if subdir != "": 322 if subdir != "":
329 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d) 323 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
330 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d) 324 workdir=destdir)
325 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
331 elif not ud.nobranch: 326 elif not ud.nobranch:
332 branchname = ud.branches[ud.names[0]] 327 branchname = ud.branches[ud.names[0]]
333 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ 328 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
334 ud.revisions[ud.names[0]]), d) 329 ud.revisions[ud.names[0]]), d, workdir=destdir)
335 runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \ 330 runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
336 branchname), d) 331 branchname), d, workdir=destdir)
337 else: 332 else:
338 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d) 333 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
339 334
340 return True 335 return True
341 336
@@ -349,7 +344,7 @@ class Git(FetchMethod):
349 def supports_srcrev(self): 344 def supports_srcrev(self):
350 return True 345 return True
351 346
352 def _contains_ref(self, ud, d, name): 347 def _contains_ref(self, ud, d, name, wd):
353 cmd = "" 348 cmd = ""
354 if ud.nobranch: 349 if ud.nobranch:
355 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( 350 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
@@ -358,7 +353,7 @@ class Git(FetchMethod):
358 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( 353 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
359 ud.basecmd, ud.revisions[name], ud.branches[name]) 354 ud.basecmd, ud.revisions[name], ud.branches[name])
360 try: 355 try:
361 output = runfetchcmd(cmd, d, quiet=True) 356 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
362 except bb.fetch2.FetchError: 357 except bb.fetch2.FetchError:
363 return False 358 return False
364 if len(output.split()) > 1: 359 if len(output.split()) > 1:
diff --git a/bitbake/lib/bb/fetch2/gitannex.py b/bitbake/lib/bb/fetch2/gitannex.py
index e4527f1c75..4937a10891 100644
--- a/bitbake/lib/bb/fetch2/gitannex.py
+++ b/bitbake/lib/bb/fetch2/gitannex.py
@@ -34,43 +34,42 @@ class GitANNEX(Git):
34 """ 34 """
35 return ud.type in ['gitannex'] 35 return ud.type in ['gitannex']
36 36
37 def uses_annex(self, ud, d): 37 def uses_annex(self, ud, d, wd):
38 for name in ud.names: 38 for name in ud.names:
39 try: 39 try:
40 runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True) 40 runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True, workdir=wd)
41 return True 41 return True
42 except bb.fetch.FetchError: 42 except bb.fetch.FetchError:
43 pass 43 pass
44 44
45 return False 45 return False
46 46
47 def update_annex(self, ud, d): 47 def update_annex(self, ud, d, wd):
48 try: 48 try:
49 runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True) 49 runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True, workdir=wd)
50 except bb.fetch.FetchError: 50 except bb.fetch.FetchError:
51 return False 51 return False
52 runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True) 52 runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True, workdir=wd)
53 53
54 return True 54 return True
55 55
56 def download(self, ud, d): 56 def download(self, ud, d):
57 Git.download(self, ud, d) 57 Git.download(self, ud, d)
58 58
59 os.chdir(ud.clonedir) 59 annex = self.uses_annex(ud, d, ud.clonedir)
60 annex = self.uses_annex(ud, d)
61 if annex: 60 if annex:
62 self.update_annex(ud, d) 61 self.update_annex(ud, d, ud.clonedir)
63 62
64 def unpack(self, ud, destdir, d): 63 def unpack(self, ud, destdir, d):
65 Git.unpack(self, ud, destdir, d) 64 Git.unpack(self, ud, destdir, d)
66 65
67 os.chdir(ud.destdir)
68 try: 66 try:
69 runfetchcmd("%s annex init" % (ud.basecmd), d) 67 runfetchcmd("%s annex init" % (ud.basecmd), d, workdir=ud.destdir)
70 except bb.fetch.FetchError: 68 except bb.fetch.FetchError:
71 pass 69 pass
72 70
73 annex = self.uses_annex(ud, d) 71 annex = self.uses_annex(ud, d, ud.destdir)
74 if annex: 72 if annex:
75 runfetchcmd("%s annex get" % (ud.basecmd), d) 73 runfetchcmd("%s annex get" % (ud.basecmd), d, workdir=ud.destdir)
76 runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True) 74 runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True, workdir=ud.destdir)
75
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
index 752f1d3c15..6613762048 100644
--- a/bitbake/lib/bb/fetch2/gitsm.py
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -43,10 +43,10 @@ class GitSM(Git):
43 """ 43 """
44 return ud.type in ['gitsm'] 44 return ud.type in ['gitsm']
45 45
46 def uses_submodules(self, ud, d): 46 def uses_submodules(self, ud, d, wd):
47 for name in ud.names: 47 for name in ud.names:
48 try: 48 try:
49 runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True) 49 runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
50 return True 50 return True
51 except bb.fetch.FetchError: 51 except bb.fetch.FetchError:
52 pass 52 pass
@@ -107,28 +107,25 @@ class GitSM(Git):
107 os.mkdir(tmpclonedir) 107 os.mkdir(tmpclonedir)
108 os.rename(ud.clonedir, gitdir) 108 os.rename(ud.clonedir, gitdir)
109 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d) 109 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
110 os.chdir(tmpclonedir) 110 runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
111 runfetchcmd(ud.basecmd + " reset --hard", d) 111 runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
112 runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d) 112 runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
113 runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
114 self._set_relative_paths(tmpclonedir) 113 self._set_relative_paths(tmpclonedir)
115 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d) 114 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
116 os.rename(gitdir, ud.clonedir,) 115 os.rename(gitdir, ud.clonedir,)
117 bb.utils.remove(tmpclonedir, True) 116 bb.utils.remove(tmpclonedir, True)
118 117
119 def download(self, ud, d): 118 def download(self, ud, d):
120 Git.download(self, ud, d) 119 Git.download(self, ud, d)
121 120
122 os.chdir(ud.clonedir) 121 submodules = self.uses_submodules(ud, d, ud.clonedir)
123 submodules = self.uses_submodules(ud, d)
124 if submodules: 122 if submodules:
125 self.update_submodules(ud, d) 123 self.update_submodules(ud, d)
126 124
127 def unpack(self, ud, destdir, d): 125 def unpack(self, ud, destdir, d):
128 Git.unpack(self, ud, destdir, d) 126 Git.unpack(self, ud, destdir, d)
129 127
130 os.chdir(ud.destdir) 128 submodules = self.uses_submodules(ud, d, ud.destdir)
131 submodules = self.uses_submodules(ud, d)
132 if submodules: 129 if submodules:
133 runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d) 130 runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
134 runfetchcmd(ud.basecmd + " submodule update --init --recursive", d) 131 runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
index 3b743ff51d..20df8016da 100644
--- a/bitbake/lib/bb/fetch2/hg.py
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -169,25 +169,22 @@ class Hg(FetchMethod):
169 # If the checkout doesn't exist and the mirror tarball does, extract it 169 # If the checkout doesn't exist and the mirror tarball does, extract it
170 if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): 170 if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
171 bb.utils.mkdirhier(ud.pkgdir) 171 bb.utils.mkdirhier(ud.pkgdir)
172 os.chdir(ud.pkgdir) 172 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.pkgdir)
173 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
174 173
175 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): 174 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
176 # Found the source, check whether need pull 175 # Found the source, check whether need pull
177 updatecmd = self._buildhgcommand(ud, d, "update") 176 updatecmd = self._buildhgcommand(ud, d, "update")
178 os.chdir(ud.moddir)
179 logger.debug(1, "Running %s", updatecmd) 177 logger.debug(1, "Running %s", updatecmd)
180 try: 178 try:
181 runfetchcmd(updatecmd, d) 179 runfetchcmd(updatecmd, d, workdir=ud.moddir)
182 except bb.fetch2.FetchError: 180 except bb.fetch2.FetchError:
183 # Runnning pull in the repo 181 # Runnning pull in the repo
184 pullcmd = self._buildhgcommand(ud, d, "pull") 182 pullcmd = self._buildhgcommand(ud, d, "pull")
185 logger.info("Pulling " + ud.url) 183 logger.info("Pulling " + ud.url)
186 # update sources there 184 # update sources there
187 os.chdir(ud.moddir)
188 logger.debug(1, "Running %s", pullcmd) 185 logger.debug(1, "Running %s", pullcmd)
189 bb.fetch2.check_network_access(d, pullcmd, ud.url) 186 bb.fetch2.check_network_access(d, pullcmd, ud.url)
190 runfetchcmd(pullcmd, d) 187 runfetchcmd(pullcmd, d, workdir=ud.moddir)
191 try: 188 try:
192 os.unlink(ud.fullmirror) 189 os.unlink(ud.fullmirror)
193 except OSError as exc: 190 except OSError as exc:
@@ -200,17 +197,15 @@ class Hg(FetchMethod):
200 logger.info("Fetch " + ud.url) 197 logger.info("Fetch " + ud.url)
201 # check out sources there 198 # check out sources there
202 bb.utils.mkdirhier(ud.pkgdir) 199 bb.utils.mkdirhier(ud.pkgdir)
203 os.chdir(ud.pkgdir)
204 logger.debug(1, "Running %s", fetchcmd) 200 logger.debug(1, "Running %s", fetchcmd)
205 bb.fetch2.check_network_access(d, fetchcmd, ud.url) 201 bb.fetch2.check_network_access(d, fetchcmd, ud.url)
206 runfetchcmd(fetchcmd, d) 202 runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
207 203
208 # Even when we clone (fetch), we still need to update as hg's clone 204 # Even when we clone (fetch), we still need to update as hg's clone
209 # won't checkout the specified revision if its on a branch 205 # won't checkout the specified revision if its on a branch
210 updatecmd = self._buildhgcommand(ud, d, "update") 206 updatecmd = self._buildhgcommand(ud, d, "update")
211 os.chdir(ud.moddir)
212 logger.debug(1, "Running %s", updatecmd) 207 logger.debug(1, "Running %s", updatecmd)
213 runfetchcmd(updatecmd, d) 208 runfetchcmd(updatecmd, d, workdir=ud.moddir)
214 209
215 def clean(self, ud, d): 210 def clean(self, ud, d):
216 """ Clean the hg dir """ 211 """ Clean the hg dir """
@@ -246,10 +241,9 @@ class Hg(FetchMethod):
246 if os.path.islink(ud.fullmirror): 241 if os.path.islink(ud.fullmirror):
247 os.unlink(ud.fullmirror) 242 os.unlink(ud.fullmirror)
248 243
249 os.chdir(ud.pkgdir)
250 logger.info("Creating tarball of hg repository") 244 logger.info("Creating tarball of hg repository")
251 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d) 245 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d, workdir=ud.pkgdir)
252 runfetchcmd("touch %s.done" % (ud.fullmirror), d) 246 runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.pkgdir)
253 247
254 def localpath(self, ud, d): 248 def localpath(self, ud, d):
255 return ud.pkgdir 249 return ud.pkgdir
@@ -269,10 +263,8 @@ class Hg(FetchMethod):
269 logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'") 263 logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
270 runfetchcmd("%s init %s" % (ud.basecmd, codir), d) 264 runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
271 logger.debug(2, "Unpack: updating source in '" + codir + "'") 265 logger.debug(2, "Unpack: updating source in '" + codir + "'")
272 os.chdir(codir) 266 runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d, workdir=codir)
273 runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d) 267 runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
274 runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
275 else: 268 else:
276 logger.debug(2, "Unpack: extracting source to '" + codir + "'") 269 logger.debug(2, "Unpack: extracting source to '" + codir + "'")
277 os.chdir(ud.moddir) 270 runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
278 runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index 2fd43034ba..b26ac22eff 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -113,16 +113,13 @@ class Npm(FetchMethod):
113 bb.fatal("NPM package %s downloaded not a tarball!" % file) 113 bb.fatal("NPM package %s downloaded not a tarball!" % file)
114 114
115 # Change to subdir before executing command 115 # Change to subdir before executing command
116 save_cwd = os.getcwd()
117 if not os.path.exists(destdir): 116 if not os.path.exists(destdir):
118 os.makedirs(destdir) 117 os.makedirs(destdir)
119 os.chdir(destdir)
120 path = d.getVar('PATH', True) 118 path = d.getVar('PATH', True)
121 if path: 119 if path:
122 cmd = "PATH=\"%s\" %s" % (path, cmd) 120 cmd = "PATH=\"%s\" %s" % (path, cmd)
123 bb.note("Unpacking %s to %s/" % (file, os.getcwd())) 121 bb.note("Unpacking %s to %s/" % (file, destdir))
124 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) 122 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)
125 os.chdir(save_cwd)
126 123
127 if ret != 0: 124 if ret != 0:
128 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url) 125 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
@@ -239,10 +236,7 @@ class Npm(FetchMethod):
239 if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): 236 if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
240 dest = d.getVar("DL_DIR", True) 237 dest = d.getVar("DL_DIR", True)
241 bb.utils.mkdirhier(dest) 238 bb.utils.mkdirhier(dest)
242 save_cwd = os.getcwd() 239 runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
243 os.chdir(dest)
244 runfetchcmd("tar -xJf %s" % (ud.fullmirror), d)
245 os.chdir(save_cwd)
246 return 240 return
247 241
248 shwrf = d.getVar('NPM_SHRINKWRAP', True) 242 shwrf = d.getVar('NPM_SHRINKWRAP', True)
@@ -275,10 +269,8 @@ class Npm(FetchMethod):
275 if os.path.islink(ud.fullmirror): 269 if os.path.islink(ud.fullmirror):
276 os.unlink(ud.fullmirror) 270 os.unlink(ud.fullmirror)
277 271
278 save_cwd = os.getcwd() 272 dldir = d.getVar("DL_DIR", True)
279 os.chdir(d.getVar("DL_DIR", True))
280 logger.info("Creating tarball of npm data") 273 logger.info("Creating tarball of npm data")
281 runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d) 274 runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
282 runfetchcmd("touch %s.done" % (ud.fullmirror), d) 275 workdir=dldir)
283 os.chdir(save_cwd) 276 runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
284
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
index d051dfdafa..295abf953b 100644
--- a/bitbake/lib/bb/fetch2/osc.py
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -88,23 +88,21 @@ class Osc(FetchMethod):
88 oscupdatecmd = self._buildosccommand(ud, d, "update") 88 oscupdatecmd = self._buildosccommand(ud, d, "update")
89 logger.info("Update "+ ud.url) 89 logger.info("Update "+ ud.url)
90 # update sources there 90 # update sources there
91 os.chdir(ud.moddir)
92 logger.debug(1, "Running %s", oscupdatecmd) 91 logger.debug(1, "Running %s", oscupdatecmd)
93 bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) 92 bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
94 runfetchcmd(oscupdatecmd, d) 93 runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
95 else: 94 else:
96 oscfetchcmd = self._buildosccommand(ud, d, "fetch") 95 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
97 logger.info("Fetch " + ud.url) 96 logger.info("Fetch " + ud.url)
98 # check out sources there 97 # check out sources there
99 bb.utils.mkdirhier(ud.pkgdir) 98 bb.utils.mkdirhier(ud.pkgdir)
100 os.chdir(ud.pkgdir)
101 logger.debug(1, "Running %s", oscfetchcmd) 99 logger.debug(1, "Running %s", oscfetchcmd)
102 bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) 100 bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
103 runfetchcmd(oscfetchcmd, d) 101 runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
104 102
105 os.chdir(os.path.join(ud.pkgdir + ud.path))
106 # tar them up to a defined filename 103 # tar them up to a defined filename
107 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath]) 104 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d,
105 cleanup=[ud.localpath], workdir=os.path.join(ud.pkgdir + ud.path))
108 106
109 def supports_srcrev(self): 107 def supports_srcrev(self):
110 return False 108 return False
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
index b8169f2cc9..50cb479096 100644
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -168,15 +168,13 @@ class Perforce(FetchMethod):
168 168
169 bb.utils.remove(ud.pkgdir, True) 169 bb.utils.remove(ud.pkgdir, True)
170 bb.utils.mkdirhier(ud.pkgdir) 170 bb.utils.mkdirhier(ud.pkgdir)
171 os.chdir(ud.pkgdir)
172 171
173 for afile in filelist: 172 for afile in filelist:
174 p4fetchcmd = self._buildp4command(ud, d, 'print', afile) 173 p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
175 bb.fetch2.check_network_access(d, p4fetchcmd) 174 bb.fetch2.check_network_access(d, p4fetchcmd)
176 runfetchcmd(p4fetchcmd, d) 175 runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
177 176
178 os.chdir(ud.pkgdir) 177 runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
179 runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup = [ud.localpath])
180 178
181 def clean(self, ud, d): 179 def clean(self, ud, d):
182 """ Cleanup p4 specific files and dirs""" 180 """ Cleanup p4 specific files and dirs"""
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
index fd64b7e626..ecc6e68e97 100644
--- a/bitbake/lib/bb/fetch2/repo.py
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -69,15 +69,14 @@ class Repo(FetchMethod):
69 else: 69 else:
70 username = "" 70 username = ""
71 71
72 bb.utils.mkdirhier(os.path.join(codir, "repo")) 72 repodir = os.path.join(codir, "repo")
73 os.chdir(os.path.join(codir, "repo")) 73 bb.utils.mkdirhier(repodir)
74 if not os.path.exists(os.path.join(codir, "repo", ".repo")): 74 if not os.path.exists(os.path.join(repodir, ".repo")):
75 bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url) 75 bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
76 runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) 76 runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
77 77
78 bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url) 78 bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
79 runfetchcmd("repo sync", d) 79 runfetchcmd("repo sync", d, workdir=repodir)
80 os.chdir(codir)
81 80
82 scmdata = ud.parm.get("scmdata", "") 81 scmdata = ud.parm.get("scmdata", "")
83 if scmdata == "keep": 82 if scmdata == "keep":
@@ -86,7 +85,7 @@ class Repo(FetchMethod):
86 tar_flags = "--exclude='.repo' --exclude='.git'" 85 tar_flags = "--exclude='.repo' --exclude='.git'"
87 86
88 # Create a cache 87 # Create a cache
89 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d) 88 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d, workdir=codir)
90 89
91 def supports_srcrev(self): 90 def supports_srcrev(self):
92 return False 91 return False
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index 968ca79b30..9ff94108ed 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -126,25 +126,22 @@ class Svn(FetchMethod):
126 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): 126 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
127 svnupdatecmd = self._buildsvncommand(ud, d, "update") 127 svnupdatecmd = self._buildsvncommand(ud, d, "update")
128 logger.info("Update " + ud.url) 128 logger.info("Update " + ud.url)
129 # update sources there
130 os.chdir(ud.moddir)
131 # We need to attempt to run svn upgrade first in case its an older working format 129 # We need to attempt to run svn upgrade first in case its an older working format
132 try: 130 try:
133 runfetchcmd(ud.basecmd + " upgrade", d) 131 runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
134 except FetchError: 132 except FetchError:
135 pass 133 pass
136 logger.debug(1, "Running %s", svnupdatecmd) 134 logger.debug(1, "Running %s", svnupdatecmd)
137 bb.fetch2.check_network_access(d, svnupdatecmd, ud.url) 135 bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
138 runfetchcmd(svnupdatecmd, d) 136 runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
139 else: 137 else:
140 svnfetchcmd = self._buildsvncommand(ud, d, "fetch") 138 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
141 logger.info("Fetch " + ud.url) 139 logger.info("Fetch " + ud.url)
142 # check out sources there 140 # check out sources there
143 bb.utils.mkdirhier(ud.pkgdir) 141 bb.utils.mkdirhier(ud.pkgdir)
144 os.chdir(ud.pkgdir)
145 logger.debug(1, "Running %s", svnfetchcmd) 142 logger.debug(1, "Running %s", svnfetchcmd)
146 bb.fetch2.check_network_access(d, svnfetchcmd, ud.url) 143 bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
147 runfetchcmd(svnfetchcmd, d) 144 runfetchcmd(svnfetchcmd, d, ud.pkgdir)
148 145
149 scmdata = ud.parm.get("scmdata", "") 146 scmdata = ud.parm.get("scmdata", "")
150 if scmdata == "keep": 147 if scmdata == "keep":
@@ -152,9 +149,9 @@ class Svn(FetchMethod):
152 else: 149 else:
153 tar_flags = "--exclude='.svn'" 150 tar_flags = "--exclude='.svn'"
154 151
155 os.chdir(ud.pkgdir)
156 # tar them up to a defined filename 152 # tar them up to a defined filename
157 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup = [ud.localpath]) 153 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
154 cleanup=[ud.localpath], workdir=ud.pkgdir)
158 155
159 def clean(self, ud, d): 156 def clean(self, ud, d):
160 """ Clean SVN specific files and dirs """ 157 """ Clean SVN specific files and dirs """