diff options
| author | Ulf Magnusson <Ulf.Magnusson@bmw.de> | 2018-11-29 14:21:34 +0200 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2018-12-01 11:32:08 +0000 |
| commit | db20fe6f0ad360cf5f5db32bc599554ad404ae97 (patch) | |
| tree | ff4587bd6f9bc1259ec3cb0417d99bc8877ccf1b | |
| parent | c5ca140d3813dcac6606090fde7298bf275b0c5d (diff) | |
| download | poky-db20fe6f0ad360cf5f5db32bc599554ad404ae97.tar.gz | |
bitbake: fetch2/svn: Fix SVN repository concurrent update race
The ${DL_DIR}/svn directory is used by BitBake to keep checked-out SVN
repositories from which tarballs are generated. These repositories were
protected from concurrent update with a lock on the tarballs. However,
the tarballs are specific to the SRCREV and module checked out (many
tarballs can come from the same repository), meaning a repository could
be modified concurrently if two recipes checked out two different
SRCREVs or modules from it in parallel. This caused errors like the
following:
ERROR: Fetcher failure: Fetch command failed with exit code 1, output:
svn: E155004: Run 'svn cleanup' to remove locks (type 'svn help cleanup' for details)
svn: E155004: Working copy '/home/foo/downloads/svn/repo/trunk' locked.
svn: E155004: '/home/foo/downloads/svn/repo/trunk' is already locked.
Fix it by adding a per-repository lock that's independent of the module
and SRCREV.
(Bitbake rev: 3f1f183a17bf3580da8a4ffd6dab30b62c2654a8)
Signed-off-by: Ulf Magnusson <Ulf.Magnusson@bmw.de>
Signed-off-by: Michael Ho <Michael.Ho@bmw.de>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
| -rw-r--r-- | bitbake/lib/bb/fetch2/svn.py | 64 |
1 files changed, 36 insertions, 28 deletions
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index ed70bcf8fb..9dcf3eb090 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
| @@ -63,6 +63,9 @@ class Svn(FetchMethod): | |||
| 63 | relpath = self._strip_leading_slashes(ud.path) | 63 | relpath = self._strip_leading_slashes(ud.path) |
| 64 | ud.pkgdir = os.path.join(svndir, ud.host, relpath) | 64 | ud.pkgdir = os.path.join(svndir, ud.host, relpath) |
| 65 | ud.moddir = os.path.join(ud.pkgdir, ud.module) | 65 | ud.moddir = os.path.join(ud.pkgdir, ud.module) |
| 66 | # Protects the repository from concurrent updates, e.g. from two | ||
| 67 | # recipes fetching different revisions at the same time | ||
| 68 | ud.svnlock = os.path.join(ud.pkgdir, "svn.lock") | ||
| 66 | 69 | ||
| 67 | ud.setup_revisions(d) | 70 | ud.setup_revisions(d) |
| 68 | 71 | ||
| @@ -123,35 +126,40 @@ class Svn(FetchMethod): | |||
| 123 | 126 | ||
| 124 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 127 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") |
| 125 | 128 | ||
| 126 | if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): | 129 | lf = bb.utils.lockfile(ud.svnlock) |
| 127 | svnupdatecmd = self._buildsvncommand(ud, d, "update") | 130 | |
| 128 | logger.info("Update " + ud.url) | 131 | try: |
| 129 | # We need to attempt to run svn upgrade first in case its an older working format | 132 | if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): |
| 130 | try: | 133 | svnupdatecmd = self._buildsvncommand(ud, d, "update") |
| 131 | runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) | 134 | logger.info("Update " + ud.url) |
| 132 | except FetchError: | 135 | # We need to attempt to run svn upgrade first in case its an older working format |
| 133 | pass | 136 | try: |
| 134 | logger.debug(1, "Running %s", svnupdatecmd) | 137 | runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) |
| 135 | bb.fetch2.check_network_access(d, svnupdatecmd, ud.url) | 138 | except FetchError: |
| 136 | runfetchcmd(svnupdatecmd, d, workdir=ud.moddir) | 139 | pass |
| 137 | else: | 140 | logger.debug(1, "Running %s", svnupdatecmd) |
| 138 | svnfetchcmd = self._buildsvncommand(ud, d, "fetch") | 141 | bb.fetch2.check_network_access(d, svnupdatecmd, ud.url) |
| 139 | logger.info("Fetch " + ud.url) | 142 | runfetchcmd(svnupdatecmd, d, workdir=ud.moddir) |
| 140 | # check out sources there | 143 | else: |
| 141 | bb.utils.mkdirhier(ud.pkgdir) | 144 | svnfetchcmd = self._buildsvncommand(ud, d, "fetch") |
| 142 | logger.debug(1, "Running %s", svnfetchcmd) | 145 | logger.info("Fetch " + ud.url) |
| 143 | bb.fetch2.check_network_access(d, svnfetchcmd, ud.url) | 146 | # check out sources there |
| 144 | runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir) | 147 | bb.utils.mkdirhier(ud.pkgdir) |
| 145 | 148 | logger.debug(1, "Running %s", svnfetchcmd) | |
| 146 | scmdata = ud.parm.get("scmdata", "") | 149 | bb.fetch2.check_network_access(d, svnfetchcmd, ud.url) |
| 147 | if scmdata == "keep": | 150 | runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir) |
| 148 | tar_flags = "" | 151 | |
| 149 | else: | 152 | scmdata = ud.parm.get("scmdata", "") |
| 150 | tar_flags = "--exclude='.svn'" | 153 | if scmdata == "keep": |
| 154 | tar_flags = "" | ||
| 155 | else: | ||
| 156 | tar_flags = "--exclude='.svn'" | ||
| 151 | 157 | ||
| 152 | # tar them up to a defined filename | 158 | # tar them up to a defined filename |
| 153 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, | 159 | runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, |
| 154 | cleanup=[ud.localpath], workdir=ud.pkgdir) | 160 | cleanup=[ud.localpath], workdir=ud.pkgdir) |
| 161 | finally: | ||
| 162 | bb.utils.unlockfile(lf) | ||
| 155 | 163 | ||
| 156 | def clean(self, ud, d): | 164 | def clean(self, ud, d): |
| 157 | """ Clean SVN specific files and dirs """ | 165 | """ Clean SVN specific files and dirs """ |
