diff options
| author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-11-25 14:57:53 +0000 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-11-27 10:35:30 +0000 |
| commit | 4cd96710785eb05abeff1f281878655118d4a7dd (patch) | |
| tree | f2e15210fa7057df398c6e20ecc51f1d747a12ab /bitbake/lib/bb/fetch | |
| parent | 0a434ac10158e2011d41a1189e65e9474b1672be (diff) | |
| download | poky-4cd96710785eb05abeff1f281878655118d4a7dd.tar.gz | |
bitbake: Update users of getVar/setVar to use the data store functions directly
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch')
| -rw-r--r-- | bitbake/lib/bb/fetch/__init__.py | 32 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch/git.py | 6 |
2 files changed, 19 insertions, 19 deletions
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index 5be4c9caea..ea98019207 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py | |||
| @@ -154,7 +154,7 @@ def fetcher_init(d): | |||
| 154 | Calls before this must not hit the cache. | 154 | Calls before this must not hit the cache. |
| 155 | """ | 155 | """ |
| 156 | # When to drop SCM head revisions controlled by user policy | 156 | # When to drop SCM head revisions controlled by user policy |
| 157 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" | 157 | srcrev_policy = d.getVar('BB_SRCREV_POLICY', 1) or "clear" |
| 158 | if srcrev_policy == "cache": | 158 | if srcrev_policy == "cache": |
| 159 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 159 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 160 | elif srcrev_policy == "clear": | 160 | elif srcrev_policy == "clear": |
| @@ -200,7 +200,7 @@ def fetcher_compare_revisions(d): | |||
| 200 | def init(urls, d, setup = True): | 200 | def init(urls, d, setup = True): |
| 201 | urldata = {} | 201 | urldata = {} |
| 202 | 202 | ||
| 203 | fn = bb.data.getVar('FILE', d, 1) | 203 | fn = d.getVar('FILE', 1) |
| 204 | if fn in urldata_cache: | 204 | if fn in urldata_cache: |
| 205 | urldata = urldata_cache[fn] | 205 | urldata = urldata_cache[fn] |
| 206 | 206 | ||
| @@ -243,7 +243,7 @@ def verify_checksum(u, ud, d): | |||
| 243 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', | 243 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', |
| 244 | ud.localpath, ud.md5_name, md5data, | 244 | ud.localpath, ud.md5_name, md5data, |
| 245 | ud.sha256_name, sha256data) | 245 | ud.sha256_name, sha256data) |
| 246 | if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": | 246 | if d.getVar("BB_STRICT_CHECKSUM", True) == "1": |
| 247 | raise FetchError("No checksum specified for %s." % u) | 247 | raise FetchError("No checksum specified for %s." % u) |
| 248 | return | 248 | return |
| 249 | 249 | ||
| @@ -276,7 +276,7 @@ def go(d, urls = None): | |||
| 276 | 276 | ||
| 277 | if m.try_premirror(u, ud, d): | 277 | if m.try_premirror(u, ud, d): |
| 278 | # First try fetching uri, u, from PREMIRRORS | 278 | # First try fetching uri, u, from PREMIRRORS |
| 279 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | 279 | mirrors = mirror_from_string(d.getVar('PREMIRRORS', True)) |
| 280 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) | 280 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) |
| 281 | elif os.path.exists(ud.localfile): | 281 | elif os.path.exists(ud.localfile): |
| 282 | localpath = ud.localfile | 282 | localpath = ud.localfile |
| @@ -291,7 +291,7 @@ def go(d, urls = None): | |||
| 291 | # Remove any incomplete file | 291 | # Remove any incomplete file |
| 292 | bb.utils.remove(ud.localpath) | 292 | bb.utils.remove(ud.localpath) |
| 293 | # Finally, try fetching uri, u, from MIRRORS | 293 | # Finally, try fetching uri, u, from MIRRORS |
| 294 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | 294 | mirrors = mirror_from_string(d.getVar('MIRRORS', True)) |
| 295 | localpath = try_mirrors (d, u, mirrors) | 295 | localpath = try_mirrors (d, u, mirrors) |
| 296 | if not localpath or not os.path.exists(localpath): | 296 | if not localpath or not os.path.exists(localpath): |
| 297 | raise FetchError("Unable to fetch URL %s from any source." % u) | 297 | raise FetchError("Unable to fetch URL %s from any source." % u) |
| @@ -327,7 +327,7 @@ def checkstatus(d, urls = None): | |||
| 327 | m = ud.method | 327 | m = ud.method |
| 328 | logger.debug(1, "Testing URL %s", u) | 328 | logger.debug(1, "Testing URL %s", u) |
| 329 | # First try checking uri, u, from PREMIRRORS | 329 | # First try checking uri, u, from PREMIRRORS |
| 330 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | 330 | mirrors = mirror_from_string(d.getVar('PREMIRRORS', True)) |
| 331 | ret = try_mirrors(d, u, mirrors, True) | 331 | ret = try_mirrors(d, u, mirrors, True) |
| 332 | if not ret: | 332 | if not ret: |
| 333 | # Next try checking from the original uri, u | 333 | # Next try checking from the original uri, u |
| @@ -335,7 +335,7 @@ def checkstatus(d, urls = None): | |||
| 335 | ret = m.checkstatus(u, ud, d) | 335 | ret = m.checkstatus(u, ud, d) |
| 336 | except: | 336 | except: |
| 337 | # Finally, try checking uri, u, from MIRRORS | 337 | # Finally, try checking uri, u, from MIRRORS |
| 338 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | 338 | mirrors = mirror_from_string(d.getVar('MIRRORS', True)) |
| 339 | ret = try_mirrors (d, u, mirrors, True) | 339 | ret = try_mirrors (d, u, mirrors, True) |
| 340 | 340 | ||
| 341 | if not ret: | 341 | if not ret: |
| @@ -383,7 +383,7 @@ def get_srcrev(d): | |||
| 383 | scms = [] | 383 | scms = [] |
| 384 | 384 | ||
| 385 | # Only call setup_localpath on URIs which supports_srcrev() | 385 | # Only call setup_localpath on URIs which supports_srcrev() |
| 386 | urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) | 386 | urldata = init(d.getVar('SRC_URI', 1).split(), d, False) |
| 387 | for u in urldata: | 387 | for u in urldata: |
| 388 | ud = urldata[u] | 388 | ud = urldata[u] |
| 389 | if ud.method.supports_srcrev(): | 389 | if ud.method.supports_srcrev(): |
| @@ -395,8 +395,8 @@ def get_srcrev(d): | |||
| 395 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") | 395 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") |
| 396 | raise ParameterError | 396 | raise ParameterError |
| 397 | 397 | ||
| 398 | if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": | 398 | if d.getVar('BB_SRCREV_POLICY', True) != "cache": |
| 399 | bb.data.setVar('__BB_DONT_CACHE', '1', d) | 399 | d.setVar('__BB_DONT_CACHE', '1') |
| 400 | 400 | ||
| 401 | if len(scms) == 1: | 401 | if len(scms) == 1: |
| 402 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) | 402 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) |
| @@ -404,7 +404,7 @@ def get_srcrev(d): | |||
| 404 | # | 404 | # |
| 405 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 405 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
| 406 | # | 406 | # |
| 407 | format = bb.data.getVar('SRCREV_FORMAT', d, 1) | 407 | format = d.getVar('SRCREV_FORMAT', 1) |
| 408 | if not format: | 408 | if not format: |
| 409 | logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | 409 | logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") |
| 410 | raise ParameterError | 410 | raise ParameterError |
| @@ -539,8 +539,8 @@ class FetchData(object): | |||
| 539 | else: | 539 | else: |
| 540 | self.md5_name = "md5sum" | 540 | self.md5_name = "md5sum" |
| 541 | self.sha256_name = "sha256sum" | 541 | self.sha256_name = "sha256sum" |
| 542 | self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) | 542 | self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) |
| 543 | self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) | 543 | self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) |
| 544 | 544 | ||
| 545 | for m in methods: | 545 | for m in methods: |
| 546 | if m.supports(url, self, d): | 546 | if m.supports(url, self, d): |
| @@ -555,7 +555,7 @@ class FetchData(object): | |||
| 555 | self.localpath = self.parm["localpath"] | 555 | self.localpath = self.parm["localpath"] |
| 556 | self.basename = os.path.basename(self.localpath) | 556 | self.basename = os.path.basename(self.localpath) |
| 557 | else: | 557 | else: |
| 558 | premirrors = bb.data.getVar('PREMIRRORS', d, True) | 558 | premirrors = d.getVar('PREMIRRORS', True) |
| 559 | local = "" | 559 | local = "" |
| 560 | if premirrors and self.url: | 560 | if premirrors and self.url: |
| 561 | aurl = self.url.split(";")[0] | 561 | aurl = self.url.split(";")[0] |
| @@ -775,7 +775,7 @@ class Fetch(object): | |||
| 775 | 775 | ||
| 776 | latest_rev = self._build_revision(url, ud, d) | 776 | latest_rev = self._build_revision(url, ud, d) |
| 777 | last_rev = localcounts.get(key + '_rev') | 777 | last_rev = localcounts.get(key + '_rev') |
| 778 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | 778 | uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False |
| 779 | count = None | 779 | count = None |
| 780 | if uselocalcount: | 780 | if uselocalcount: |
| 781 | count = Fetch.localcount_internal_helper(ud, d) | 781 | count = Fetch.localcount_internal_helper(ud, d) |
| @@ -803,7 +803,7 @@ class Fetch(object): | |||
| 803 | 803 | ||
| 804 | def generate_revision_key(self, url, ud, d): | 804 | def generate_revision_key(self, url, ud, d): |
| 805 | key = self._revision_key(url, ud, d) | 805 | key = self._revision_key(url, ud, d) |
| 806 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | 806 | return "%s-%s" % (key, d.getVar("PN", True) or "") |
| 807 | 807 | ||
| 808 | from . import cvs | 808 | from . import cvs |
| 809 | from . import git | 809 | from . import git |
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index 49c1cfe8f9..7160919d5a 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py | |||
| @@ -34,7 +34,7 @@ class Git(Fetch): | |||
| 34 | # | 34 | # |
| 35 | # Only enable _sortable revision if the key is set | 35 | # Only enable _sortable revision if the key is set |
| 36 | # | 36 | # |
| 37 | if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True): | 37 | if d.getVar("BB_GIT_CLONE_FOR_SRCREV", True): |
| 38 | self._sortable_buildindex = self._sortable_buildindex_disabled | 38 | self._sortable_buildindex = self._sortable_buildindex_disabled |
| 39 | def supports(self, url, ud, d): | 39 | def supports(self, url, ud, d): |
| 40 | """ | 40 | """ |
| @@ -220,7 +220,7 @@ class Git(Fetch): | |||
| 220 | 220 | ||
| 221 | def generate_revision_key(self, url, ud, d, branch=False): | 221 | def generate_revision_key(self, url, ud, d, branch=False): |
| 222 | key = self._revision_key(url, ud, d, branch) | 222 | key = self._revision_key(url, ud, d, branch) |
| 223 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | 223 | return "%s-%s" % (key, d.getVar("PN", True) or "") |
| 224 | 224 | ||
| 225 | def _latest_revision(self, url, ud, d): | 225 | def _latest_revision(self, url, ud, d): |
| 226 | """ | 226 | """ |
| @@ -276,7 +276,7 @@ class Git(Fetch): | |||
| 276 | del localcounts[oldkey + '_rev'] | 276 | del localcounts[oldkey + '_rev'] |
| 277 | localcounts[key + '_rev'] = last_rev | 277 | localcounts[key + '_rev'] = last_rev |
| 278 | 278 | ||
| 279 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | 279 | uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False |
| 280 | count = None | 280 | count = None |
| 281 | if uselocalcount: | 281 | if uselocalcount: |
| 282 | count = Fetch.localcount_internal_helper(ud, d) | 282 | count = Fetch.localcount_internal_helper(ud, d) |
