diff options
| author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-11-25 14:57:53 +0000 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-11-27 10:35:30 +0000 |
| commit | 4cd96710785eb05abeff1f281878655118d4a7dd (patch) | |
| tree | f2e15210fa7057df398c6e20ecc51f1d747a12ab /bitbake/lib/bb/fetch2 | |
| parent | 0a434ac10158e2011d41a1189e65e9474b1672be (diff) | |
| download | poky-4cd96710785eb05abeff1f281878655118d4a7dd.tar.gz | |
bitbake: Update users of getVar/setVar to use the data store functions directly
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2')
| -rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 78 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch2/git.py | 4 |
2 files changed, 41 insertions, 41 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index d3c761d00d..19a79fdbfe 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
| @@ -28,7 +28,7 @@ from __future__ import absolute_import | |||
| 28 | from __future__ import print_function | 28 | from __future__ import print_function |
| 29 | import os, re | 29 | import os, re |
| 30 | import logging | 30 | import logging |
| 31 | import bb.data, bb.persist_data, bb.utils | 31 | import bb.persist_data, bb.utils |
| 32 | from bb import data | 32 | from bb import data |
| 33 | 33 | ||
| 34 | __version__ = "2" | 34 | __version__ = "2" |
| @@ -211,7 +211,7 @@ def fetcher_init(d): | |||
| 211 | Calls before this must not hit the cache. | 211 | Calls before this must not hit the cache. |
| 212 | """ | 212 | """ |
| 213 | # When to drop SCM head revisions controlled by user policy | 213 | # When to drop SCM head revisions controlled by user policy |
| 214 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear" | 214 | srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" |
| 215 | if srcrev_policy == "cache": | 215 | if srcrev_policy == "cache": |
| 216 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 216 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 217 | elif srcrev_policy == "clear": | 217 | elif srcrev_policy == "clear": |
| @@ -271,7 +271,7 @@ def verify_checksum(u, ud, d): | |||
| 271 | sha256data = bb.utils.sha256_file(ud.localpath) | 271 | sha256data = bb.utils.sha256_file(ud.localpath) |
| 272 | 272 | ||
| 273 | # If strict checking enabled and neither sum defined, raise error | 273 | # If strict checking enabled and neither sum defined, raise error |
| 274 | strict = bb.data.getVar("BB_STRICT_CHECKSUM", d, True) or None | 274 | strict = d.getVar("BB_STRICT_CHECKSUM", True) or None |
| 275 | if (strict and ud.md5_expected == None and ud.sha256_expected == None): | 275 | if (strict and ud.md5_expected == None and ud.sha256_expected == None): |
| 276 | raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n' | 276 | raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n' |
| 277 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', u, | 277 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', u, |
| @@ -336,8 +336,8 @@ def subprocess_setup(): | |||
| 336 | 336 | ||
| 337 | def get_autorev(d): | 337 | def get_autorev(d): |
| 338 | # only not cache src rev in autorev case | 338 | # only not cache src rev in autorev case |
| 339 | if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": | 339 | if d.getVar('BB_SRCREV_POLICY', True) != "cache": |
| 340 | bb.data.setVar('__BB_DONT_CACHE', '1', d) | 340 | d.setVar('__BB_DONT_CACHE', '1') |
| 341 | return "AUTOINC" | 341 | return "AUTOINC" |
| 342 | 342 | ||
| 343 | def get_srcrev(d): | 343 | def get_srcrev(d): |
| @@ -350,7 +350,7 @@ def get_srcrev(d): | |||
| 350 | """ | 350 | """ |
| 351 | 351 | ||
| 352 | scms = [] | 352 | scms = [] |
| 353 | fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d) | 353 | fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) |
| 354 | urldata = fetcher.ud | 354 | urldata = fetcher.ud |
| 355 | for u in urldata: | 355 | for u in urldata: |
| 356 | if urldata[u].method.supports_srcrev(): | 356 | if urldata[u].method.supports_srcrev(): |
| @@ -365,7 +365,7 @@ def get_srcrev(d): | |||
| 365 | # | 365 | # |
| 366 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | 366 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
| 367 | # | 367 | # |
| 368 | format = bb.data.getVar('SRCREV_FORMAT', d, True) | 368 | format = d.getVar('SRCREV_FORMAT', True) |
| 369 | if not format: | 369 | if not format: |
| 370 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | 370 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") |
| 371 | 371 | ||
| @@ -400,7 +400,7 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []): | |||
| 400 | 'GIT_PROXY_IGNORE', 'SOCKS5_USER', 'SOCKS5_PASSWD'] | 400 | 'GIT_PROXY_IGNORE', 'SOCKS5_USER', 'SOCKS5_PASSWD'] |
| 401 | 401 | ||
| 402 | for var in exportvars: | 402 | for var in exportvars: |
| 403 | val = bb.data.getVar(var, d, True) | 403 | val = d.getVar(var, True) |
| 404 | if val: | 404 | if val: |
| 405 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) | 405 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) |
| 406 | 406 | ||
| @@ -440,7 +440,7 @@ def check_network_access(d, info = "", url = None): | |||
| 440 | """ | 440 | """ |
| 441 | log remote network access, and error if BB_NO_NETWORK is set | 441 | log remote network access, and error if BB_NO_NETWORK is set |
| 442 | """ | 442 | """ |
| 443 | if bb.data.getVar("BB_NO_NETWORK", d, True) == "1": | 443 | if d.getVar("BB_NO_NETWORK", True) == "1": |
| 444 | raise NetworkAccess(url, info) | 444 | raise NetworkAccess(url, info) |
| 445 | else: | 445 | else: |
| 446 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) | 446 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) |
| @@ -526,15 +526,15 @@ def srcrev_internal_helper(ud, d, name): | |||
| 526 | return ud.parm['tag'] | 526 | return ud.parm['tag'] |
| 527 | 527 | ||
| 528 | rev = None | 528 | rev = None |
| 529 | pn = bb.data.getVar("PN", d, True) | 529 | pn = d.getVar("PN", True) |
| 530 | if name != '': | 530 | if name != '': |
| 531 | rev = bb.data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True) | 531 | rev = d.getVar("SRCREV_%s_pn-%s" % (name, pn), True) |
| 532 | if not rev: | 532 | if not rev: |
| 533 | rev = bb.data.getVar("SRCREV_%s" % name, d, True) | 533 | rev = d.getVar("SRCREV_%s" % name, True) |
| 534 | if not rev: | 534 | if not rev: |
| 535 | rev = bb.data.getVar("SRCREV_pn-%s" % pn, d, True) | 535 | rev = d.getVar("SRCREV_pn-%s" % pn, True) |
| 536 | if not rev: | 536 | if not rev: |
| 537 | rev = bb.data.getVar("SRCREV", d, True) | 537 | rev = d.getVar("SRCREV", True) |
| 538 | if rev == "INVALID": | 538 | if rev == "INVALID": |
| 539 | raise FetchError("Please set SRCREV to a valid value", ud.url) | 539 | raise FetchError("Please set SRCREV to a valid value", ud.url) |
| 540 | if rev == "AUTOINC": | 540 | if rev == "AUTOINC": |
| @@ -572,11 +572,11 @@ class FetchData(object): | |||
| 572 | if self.md5_name in self.parm: | 572 | if self.md5_name in self.parm: |
| 573 | self.md5_expected = self.parm[self.md5_name] | 573 | self.md5_expected = self.parm[self.md5_name] |
| 574 | else: | 574 | else: |
| 575 | self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) | 575 | self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) |
| 576 | if self.sha256_name in self.parm: | 576 | if self.sha256_name in self.parm: |
| 577 | self.sha256_expected = self.parm[self.sha256_name] | 577 | self.sha256_expected = self.parm[self.sha256_name] |
| 578 | else: | 578 | else: |
| 579 | self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) | 579 | self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) |
| 580 | 580 | ||
| 581 | self.names = self.parm.get("name",'default').split(',') | 581 | self.names = self.parm.get("name",'default').split(',') |
| 582 | 582 | ||
| @@ -600,7 +600,7 @@ class FetchData(object): | |||
| 600 | self.localpath = self.method.localpath(self.url, self, d) | 600 | self.localpath = self.method.localpath(self.url, self, d) |
| 601 | 601 | ||
| 602 | # Note: These files should always be in DL_DIR whereas localpath may not be. | 602 | # Note: These files should always be in DL_DIR whereas localpath may not be. |
| 603 | basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename), d) | 603 | basepath = d.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename)) |
| 604 | self.donestamp = basepath + '.done' | 604 | self.donestamp = basepath + '.done' |
| 605 | self.lockfile = basepath + '.lock' | 605 | self.lockfile = basepath + '.lock' |
| 606 | 606 | ||
| @@ -626,12 +626,12 @@ class FetchData(object): | |||
| 626 | if "srcdate" in self.parm: | 626 | if "srcdate" in self.parm: |
| 627 | return self.parm['srcdate'] | 627 | return self.parm['srcdate'] |
| 628 | 628 | ||
| 629 | pn = bb.data.getVar("PN", d, True) | 629 | pn = d.getVar("PN", True) |
| 630 | 630 | ||
| 631 | if pn: | 631 | if pn: |
| 632 | return bb.data.getVar("SRCDATE_%s" % pn, d, True) or bb.data.getVar("SRCDATE", d, True) or bb.data.getVar("DATE", d, True) | 632 | return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) |
| 633 | 633 | ||
| 634 | return bb.data.getVar("SRCDATE", d, True) or bb.data.getVar("DATE", d, True) | 634 | return d.getVar("SRCDATE", True) or d.getVar("DATE", True) |
| 635 | 635 | ||
| 636 | class FetchMethod(object): | 636 | class FetchMethod(object): |
| 637 | """Base class for 'fetch'ing data""" | 637 | """Base class for 'fetch'ing data""" |
| @@ -703,7 +703,7 @@ class FetchMethod(object): | |||
| 703 | 703 | ||
| 704 | dots = file.split(".") | 704 | dots = file.split(".") |
| 705 | if dots[-1] in ['gz', 'bz2', 'Z']: | 705 | if dots[-1] in ['gz', 'bz2', 'Z']: |
| 706 | efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1]))) | 706 | efile = os.path.join(data.getVar('WORKDIR', True),os.path.basename('.'.join(dots[0:-1]))) |
| 707 | else: | 707 | else: |
| 708 | efile = file | 708 | efile = file |
| 709 | cmd = None | 709 | cmd = None |
| @@ -747,7 +747,7 @@ class FetchMethod(object): | |||
| 747 | dest = os.path.join(rootdir, os.path.basename(file)) | 747 | dest = os.path.join(rootdir, os.path.basename(file)) |
| 748 | if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)): | 748 | if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)): |
| 749 | if os.path.isdir(file): | 749 | if os.path.isdir(file): |
| 750 | filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True)) | 750 | filesdir = os.path.realpath(data.getVar("FILESDIR", True)) |
| 751 | destdir = "." | 751 | destdir = "." |
| 752 | if file[0:len(filesdir)] == filesdir: | 752 | if file[0:len(filesdir)] == filesdir: |
| 753 | destdir = file[len(filesdir):file.rfind('/')] | 753 | destdir = file[len(filesdir):file.rfind('/')] |
| @@ -779,7 +779,7 @@ class FetchMethod(object): | |||
| 779 | bb.utils.mkdirhier(newdir) | 779 | bb.utils.mkdirhier(newdir) |
| 780 | os.chdir(newdir) | 780 | os.chdir(newdir) |
| 781 | 781 | ||
| 782 | cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd) | 782 | cmd = "PATH=\"%s\" %s" % (data.getVar('PATH', True), cmd) |
| 783 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) | 783 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) |
| 784 | ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) | 784 | ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) |
| 785 | 785 | ||
| @@ -824,10 +824,10 @@ class FetchMethod(object): | |||
| 824 | 824 | ||
| 825 | localcount = None | 825 | localcount = None |
| 826 | if name != '': | 826 | if name != '': |
| 827 | pn = bb.data.getVar("PN", d, True) | 827 | pn = d.getVar("PN", True) |
| 828 | localcount = bb.data.getVar("LOCALCOUNT_" + name, d, True) | 828 | localcount = d.getVar("LOCALCOUNT_" + name, True) |
| 829 | if not localcount: | 829 | if not localcount: |
| 830 | localcount = bb.data.getVar("LOCALCOUNT", d, True) | 830 | localcount = d.getVar("LOCALCOUNT", True) |
| 831 | return localcount | 831 | return localcount |
| 832 | 832 | ||
| 833 | localcount_internal_helper = staticmethod(localcount_internal_helper) | 833 | localcount_internal_helper = staticmethod(localcount_internal_helper) |
| @@ -859,7 +859,7 @@ class FetchMethod(object): | |||
| 859 | 859 | ||
| 860 | latest_rev = self._build_revision(url, ud, d, name) | 860 | latest_rev = self._build_revision(url, ud, d, name) |
| 861 | last_rev = localcounts.get(key + '_rev') | 861 | last_rev = localcounts.get(key + '_rev') |
| 862 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | 862 | uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False |
| 863 | count = None | 863 | count = None |
| 864 | if uselocalcount: | 864 | if uselocalcount: |
| 865 | count = FetchMethod.localcount_internal_helper(ud, d, name) | 865 | count = FetchMethod.localcount_internal_helper(ud, d, name) |
| @@ -887,7 +887,7 @@ class FetchMethod(object): | |||
| 887 | 887 | ||
| 888 | def generate_revision_key(self, url, ud, d, name): | 888 | def generate_revision_key(self, url, ud, d, name): |
| 889 | key = self._revision_key(url, ud, d, name) | 889 | key = self._revision_key(url, ud, d, name) |
| 890 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | 890 | return "%s-%s" % (key, d.getVar("PN", True) or "") |
| 891 | 891 | ||
| 892 | class Fetch(object): | 892 | class Fetch(object): |
| 893 | def __init__(self, urls, d, cache = True): | 893 | def __init__(self, urls, d, cache = True): |
| @@ -897,7 +897,7 @@ class Fetch(object): | |||
| 897 | self.d = d | 897 | self.d = d |
| 898 | self.ud = {} | 898 | self.ud = {} |
| 899 | 899 | ||
| 900 | fn = bb.data.getVar('FILE', d, True) | 900 | fn = d.getVar('FILE', True) |
| 901 | if cache and fn in urldata_cache: | 901 | if cache and fn in urldata_cache: |
| 902 | self.ud = urldata_cache[fn] | 902 | self.ud = urldata_cache[fn] |
| 903 | 903 | ||
| @@ -913,7 +913,7 @@ class Fetch(object): | |||
| 913 | self.ud[url] = FetchData(url, self.d) | 913 | self.ud[url] = FetchData(url, self.d) |
| 914 | 914 | ||
| 915 | self.ud[url].setup_localpath(self.d) | 915 | self.ud[url].setup_localpath(self.d) |
| 916 | return bb.data.expand(self.ud[url].localpath, self.d) | 916 | return self.d.expand(self.ud[url].localpath) |
| 917 | 917 | ||
| 918 | def localpaths(self): | 918 | def localpaths(self): |
| 919 | """ | 919 | """ |
| @@ -935,8 +935,8 @@ class Fetch(object): | |||
| 935 | if len(urls) == 0: | 935 | if len(urls) == 0: |
| 936 | urls = self.urls | 936 | urls = self.urls |
| 937 | 937 | ||
| 938 | network = bb.data.getVar("BB_NO_NETWORK", self.d, True) | 938 | network = self.d.getVar("BB_NO_NETWORK", True) |
| 939 | premirroronly = (bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) == "1") | 939 | premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") |
| 940 | 940 | ||
| 941 | for u in urls: | 941 | for u in urls: |
| 942 | ud = self.ud[u] | 942 | ud = self.ud[u] |
| @@ -947,17 +947,17 @@ class Fetch(object): | |||
| 947 | lf = bb.utils.lockfile(ud.lockfile) | 947 | lf = bb.utils.lockfile(ud.lockfile) |
| 948 | 948 | ||
| 949 | try: | 949 | try: |
| 950 | bb.data.setVar("BB_NO_NETWORK", network, self.d) | 950 | self.d.setVar("BB_NO_NETWORK", network) |
| 951 | 951 | ||
| 952 | if not m.need_update(u, ud, self.d): | 952 | if not m.need_update(u, ud, self.d): |
| 953 | localpath = ud.localpath | 953 | localpath = ud.localpath |
| 954 | elif m.try_premirror(u, ud, self.d): | 954 | elif m.try_premirror(u, ud, self.d): |
| 955 | logger.debug(1, "Trying PREMIRRORS") | 955 | logger.debug(1, "Trying PREMIRRORS") |
| 956 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) | 956 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) |
| 957 | localpath = try_mirrors(self.d, ud, mirrors, False) | 957 | localpath = try_mirrors(self.d, ud, mirrors, False) |
| 958 | 958 | ||
| 959 | if premirroronly: | 959 | if premirroronly: |
| 960 | bb.data.setVar("BB_NO_NETWORK", "1", self.d) | 960 | self.d.setVar("BB_NO_NETWORK", "1") |
| 961 | 961 | ||
| 962 | if not localpath and m.need_update(u, ud, self.d): | 962 | if not localpath and m.need_update(u, ud, self.d): |
| 963 | try: | 963 | try: |
| @@ -979,7 +979,7 @@ class Fetch(object): | |||
| 979 | if os.path.isfile(ud.localpath): | 979 | if os.path.isfile(ud.localpath): |
| 980 | bb.utils.remove(ud.localpath) | 980 | bb.utils.remove(ud.localpath) |
| 981 | logger.debug(1, "Trying MIRRORS") | 981 | logger.debug(1, "Trying MIRRORS") |
| 982 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True)) | 982 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) |
| 983 | localpath = try_mirrors (self.d, ud, mirrors) | 983 | localpath = try_mirrors (self.d, ud, mirrors) |
| 984 | 984 | ||
| 985 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): | 985 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): |
| @@ -1004,7 +1004,7 @@ class Fetch(object): | |||
| 1004 | m = ud.method | 1004 | m = ud.method |
| 1005 | logger.debug(1, "Testing URL %s", u) | 1005 | logger.debug(1, "Testing URL %s", u) |
| 1006 | # First try checking uri, u, from PREMIRRORS | 1006 | # First try checking uri, u, from PREMIRRORS |
| 1007 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) | 1007 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) |
| 1008 | ret = try_mirrors(self.d, ud, mirrors, True) | 1008 | ret = try_mirrors(self.d, ud, mirrors, True) |
| 1009 | if not ret: | 1009 | if not ret: |
| 1010 | # Next try checking from the original uri, u | 1010 | # Next try checking from the original uri, u |
| @@ -1012,7 +1012,7 @@ class Fetch(object): | |||
| 1012 | ret = m.checkstatus(u, ud, self.d) | 1012 | ret = m.checkstatus(u, ud, self.d) |
| 1013 | except: | 1013 | except: |
| 1014 | # Finally, try checking uri, u, from MIRRORS | 1014 | # Finally, try checking uri, u, from MIRRORS |
| 1015 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True)) | 1015 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) |
| 1016 | ret = try_mirrors (self.d, ud, mirrors, True) | 1016 | ret = try_mirrors (self.d, ud, mirrors, True) |
| 1017 | 1017 | ||
| 1018 | if not ret: | 1018 | if not ret: |
| @@ -1030,7 +1030,7 @@ class Fetch(object): | |||
| 1030 | ud = self.ud[u] | 1030 | ud = self.ud[u] |
| 1031 | ud.setup_localpath(self.d) | 1031 | ud.setup_localpath(self.d) |
| 1032 | 1032 | ||
| 1033 | if bb.data.expand(self.localpath, self.d) is None: | 1033 | if self.d.expand(self.localpath) is None: |
| 1034 | continue | 1034 | continue |
| 1035 | 1035 | ||
| 1036 | if ud.lockfile: | 1036 | if ud.lockfile: |
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index 87a35d97a3..f203c5abb1 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
| @@ -68,7 +68,7 @@ class Git(FetchMethod): | |||
| 68 | # | 68 | # |
| 69 | # Only enable _sortable revision if the key is set | 69 | # Only enable _sortable revision if the key is set |
| 70 | # | 70 | # |
| 71 | if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True): | 71 | if d.getVar("BB_GIT_CLONE_FOR_SRCREV", True): |
| 72 | self._sortable_buildindex = self._sortable_buildindex_disabled | 72 | self._sortable_buildindex = self._sortable_buildindex_disabled |
| 73 | def supports(self, url, ud, d): | 73 | def supports(self, url, ud, d): |
| 74 | """ | 74 | """ |
| @@ -146,7 +146,7 @@ class Git(FetchMethod): | |||
| 146 | def try_premirror(self, u, ud, d): | 146 | def try_premirror(self, u, ud, d): |
| 147 | # If we don't do this, updating an existing checkout with only premirrors | 147 | # If we don't do this, updating an existing checkout with only premirrors |
| 148 | # is not possible | 148 | # is not possible |
| 149 | if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None: | 149 | if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: |
| 150 | return True | 150 | return True |
| 151 | if os.path.exists(ud.clonedir): | 151 | if os.path.exists(ud.clonedir): |
| 152 | return False | 152 | return False |
