summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-04 14:40:41 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-07 09:06:37 +0000
commit7202a77134029cb37540c785ce0161a4dd574853 (patch)
treee9b8d7d3c180260317b661ce0f325a5492d9a6db /bitbake
parent8f2abf4a9f000d42e98c4936e393bd5033a1af48 (diff)
downloadpoky-7202a77134029cb37540c785ce0161a4dd574853.tar.gz
bitbake/fetch2: Use True instead of integer values
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py30
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py4
-rw-r--r--bitbake/lib/bb/fetch2/git.py4
-rw-r--r--bitbake/lib/bb/fetch2/local.py4
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py10
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py2
-rw-r--r--bitbake/lib/bb/fetch2/svk.py2
-rw-r--r--bitbake/lib/bb/fetch2/wget.py6
8 files changed, 31 insertions, 31 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index c7e058d0d6..cf9456b758 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -201,7 +201,7 @@ def fetcher_init(d):
201 """ 201 """
202 pd = persist_data.persist(d) 202 pd = persist_data.persist(d)
203 # When to drop SCM head revisions controlled by user policy 203 # When to drop SCM head revisions controlled by user policy
204 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" 204 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
205 if srcrev_policy == "cache": 205 if srcrev_policy == "cache":
206 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) 206 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
207 elif srcrev_policy == "clear": 207 elif srcrev_policy == "clear":
@@ -322,7 +322,7 @@ def get_srcrev(d):
322 # 322 #
323 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT 323 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
324 # 324 #
325 format = bb.data.getVar('SRCREV_FORMAT', d, 1) 325 format = bb.data.getVar('SRCREV_FORMAT', d, True)
326 if not format: 326 if not format:
327 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") 327 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
328 328
@@ -410,7 +410,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
410 uri is the original uri we're trying to download 410 uri is the original uri we're trying to download
411 mirrors is the list of mirrors we're going to try 411 mirrors is the list of mirrors we're going to try
412 """ 412 """
413 fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) 413 fpath = os.path.join(data.getVar("DL_DIR", d, True), os.path.basename(uri))
414 if not check and os.access(fpath, os.R_OK) and not force: 414 if not check and os.access(fpath, os.R_OK) and not force:
415 logger.debug(1, "%s already exists, skipping checkout.", fpath) 415 logger.debug(1, "%s already exists, skipping checkout.", fpath)
416 return fpath 416 return fpath
@@ -463,12 +463,12 @@ def srcrev_internal_helper(ud, d, name):
463 463
464 rev = None 464 rev = None
465 if name != '': 465 if name != '':
466 pn = data.getVar("PN", d, 1) 466 pn = data.getVar("PN", d, True)
467 rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, 1) 467 rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
468 if not rev: 468 if not rev:
469 rev = data.getVar("SRCREV_%s" % name, d, 1) 469 rev = data.getVar("SRCREV_%s" % name, d, True)
470 if not rev: 470 if not rev:
471 rev = data.getVar("SRCREV", d, 1) 471 rev = data.getVar("SRCREV", d, True)
472 if rev == "INVALID": 472 if rev == "INVALID":
473 raise FetchError("Please set SRCREV to a valid value", ud.url) 473 raise FetchError("Please set SRCREV to a valid value", ud.url)
474 if rev == "AUTOINC": 474 if rev == "AUTOINC":
@@ -618,7 +618,7 @@ class FetchMethod(object):
618 file = urldata.localpath 618 file = urldata.localpath
619 dots = file.split(".") 619 dots = file.split(".")
620 if dots[-1] in ['gz', 'bz2', 'Z']: 620 if dots[-1] in ['gz', 'bz2', 'Z']:
621 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1]))) 621 efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1])))
622 else: 622 else:
623 efile = file 623 efile = file
624 cmd = None 624 cmd = None
@@ -642,7 +642,7 @@ class FetchMethod(object):
642 cmd = '%s -a' % cmd 642 cmd = '%s -a' % cmd
643 cmd = "%s '%s'" % (cmd, file) 643 cmd = "%s '%s'" % (cmd, file)
644 elif os.path.isdir(file): 644 elif os.path.isdir(file):
645 filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1)) 645 filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True))
646 destdir = "." 646 destdir = "."
647 if file[0:len(filesdir)] == filesdir: 647 if file[0:len(filesdir)] == filesdir:
648 destdir = file[len(filesdir):file.rfind('/')] 648 destdir = file[len(filesdir):file.rfind('/')]
@@ -679,7 +679,7 @@ class FetchMethod(object):
679 bb.mkdirhier(newdir) 679 bb.mkdirhier(newdir)
680 os.chdir(newdir) 680 os.chdir(newdir)
681 681
682 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd) 682 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd)
683 bb.note("Unpacking %s to %s/" % (file, os.getcwd())) 683 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
684 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) 684 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
685 685
@@ -718,10 +718,10 @@ class FetchMethod(object):
718 718
719 localcount = None 719 localcount = None
720 if name != '': 720 if name != '':
721 pn = data.getVar("PN", d, 1) 721 pn = data.getVar("PN", d, True)
722 localcount = data.getVar("LOCALCOUNT_" + name, d, 1) 722 localcount = data.getVar("LOCALCOUNT_" + name, d, True)
723 if not localcount: 723 if not localcount:
724 localcount = data.getVar("LOCALCOUNT", d, 1) 724 localcount = data.getVar("LOCALCOUNT", d, True)
725 return localcount 725 return localcount
726 726
727 localcount_internal_helper = staticmethod(localcount_internal_helper) 727 localcount_internal_helper = staticmethod(localcount_internal_helper)
@@ -789,12 +789,12 @@ class FetchMethod(object):
789class Fetch(object): 789class Fetch(object):
790 def __init__(self, urls, d): 790 def __init__(self, urls, d):
791 if len(urls) == 0: 791 if len(urls) == 0:
792 urls = d.getVar("SRC_URI", 1).split() 792 urls = d.getVar("SRC_URI", True).split()
793 self.urls = urls 793 self.urls = urls
794 self.d = d 794 self.d = d
795 self.ud = {} 795 self.ud = {}
796 796
797 fn = bb.data.getVar('FILE', d, 1) 797 fn = bb.data.getVar('FILE', d, True)
798 if fn in urldata_cache: 798 if fn in urldata_cache:
799 self.ud = urldata_cache[fn] 799 self.ud = urldata_cache[fn]
800 800
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
index b440ed7b17..b77e742c33 100644
--- a/bitbake/lib/bb/fetch2/cvs.py
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -115,8 +115,8 @@ class Cvs(FetchMethod):
115 data.setVar('CVSROOT', cvsroot, localdata) 115 data.setVar('CVSROOT', cvsroot, localdata)
116 data.setVar('CVSCOOPTS', " ".join(options), localdata) 116 data.setVar('CVSCOOPTS', " ".join(options), localdata)
117 data.setVar('CVSMODULE', ud.module, localdata) 117 data.setVar('CVSMODULE', ud.module, localdata)
118 cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) 118 cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
119 cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) 119 cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)
120 120
121 if cvs_rsh: 121 if cvs_rsh:
122 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) 122 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index 38e2c93be4..35031d28c4 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -101,7 +101,7 @@ class Git(FetchMethod):
101 else: 101 else:
102 username = "" 102 username = ""
103 103
104 repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) 104 repofile = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
105 105
106 ud.repochanged = not os.path.exists(repofile) 106 ud.repochanged = not os.path.exists(repofile)
107 107
@@ -138,7 +138,7 @@ class Git(FetchMethod):
138 138
139 def build_mirror_data(self, url, ud, d): 139 def build_mirror_data(self, url, ud, d):
140 # Generate a mirror tarball if needed 140 # Generate a mirror tarball if needed
141 repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) 141 repofile = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
142 142
143 os.chdir(ud.clonedir) 143 os.chdir(ud.clonedir)
144 mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) 144 mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index 7b840a46d3..d77d39375e 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -50,11 +50,11 @@ class Local(FetchMethod):
50 path = path.split(";")[0] 50 path = path.split(";")[0]
51 newpath = path 51 newpath = path
52 if path[0] != "/": 52 if path[0] != "/":
53 filespath = data.getVar('FILESPATH', d, 1) 53 filespath = data.getVar('FILESPATH', d, True)
54 if filespath: 54 if filespath:
55 newpath = bb.utils.which(filespath, path) 55 newpath = bb.utils.which(filespath, path)
56 if not newpath: 56 if not newpath:
57 filesdir = data.getVar('FILESDIR', d, 1) 57 filesdir = data.getVar('FILESDIR', d, True)
58 if filesdir: 58 if filesdir:
59 newpath = os.path.join(filesdir, path) 59 newpath = os.path.join(filesdir, path)
60 return newpath 60 return newpath
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
index 583dfb93bf..e3e9c71da0 100644
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -79,7 +79,7 @@ class Perforce(FetchMethod):
79 if host: 79 if host:
80 p4opt += " -p %s" % (host) 80 p4opt += " -p %s" % (host)
81 81
82 p4date = data.getVar("P4DATE", d, 1) 82 p4date = data.getVar("P4DATE", d, True)
83 if "revision" in parm: 83 if "revision" in parm:
84 depot += "#%s" % (parm["revision"]) 84 depot += "#%s" % (parm["revision"])
85 elif "label" in parm: 85 elif "label" in parm:
@@ -87,7 +87,7 @@ class Perforce(FetchMethod):
87 elif p4date: 87 elif p4date:
88 depot += "@%s" % (p4date) 88 depot += "@%s" % (p4date)
89 89
90 p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) 90 p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
91 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot) 91 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
92 p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) 92 p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
93 cset = p4file.readline().strip() 93 cset = p4file.readline().strip()
@@ -105,7 +105,7 @@ class Perforce(FetchMethod):
105 105
106 if "label" in parm: 106 if "label" in parm:
107 ud.localfile = "%s.tar.gz" % (parm["label"]) 107 ud.localfile = "%s.tar.gz" % (parm["label"])
108 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) 108 return
109 109
110 base = path 110 base = path
111 which = path.find('/...') 111 which = path.find('/...')
@@ -147,13 +147,13 @@ class Perforce(FetchMethod):
147 if host: 147 if host:
148 p4opt += " -p %s" % (host) 148 p4opt += " -p %s" % (host)
149 149
150 p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) 150 p4cmd = data.getVar('FETCHCOMMAND', localdata, True)
151 151
152 # create temp directory 152 # create temp directory
153 logger.debug(2, "Fetch: creating temporary directory") 153 logger.debug(2, "Fetch: creating temporary directory")
154 bb.mkdirhier(data.expand('${WORKDIR}', localdata)) 154 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
155 data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) 155 data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
156 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") 156 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
157 tmpfile = tmppipe.readline().strip() 157 tmpfile = tmppipe.readline().strip()
158 if not tmpfile: 158 if not tmpfile:
159 raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) 159 raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 8b07b3a710..081fe1335c 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -75,7 +75,7 @@ class SSH(FetchMethod):
75 return lpath 75 return lpath
76 76
77 def download(self, url, urldata, d): 77 def download(self, url, urldata, d):
78 dldir = data.getVar('DL_DIR', d, 1) 78 dldir = data.getVar('DL_DIR', d, True)
79 79
80 m = __pattern__.match(url) 80 m = __pattern__.match(url)
81 path = m.group('path') 81 path = m.group('path')
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py
index 213d0d3ec4..70f72c80ab 100644
--- a/bitbake/lib/bb/fetch2/svk.py
+++ b/bitbake/lib/bb/fetch2/svk.py
@@ -72,7 +72,7 @@ class Svk(FetchMethod):
72 logger.debug(2, "Fetch: creating temporary directory") 72 logger.debug(2, "Fetch: creating temporary directory")
73 bb.mkdirhier(data.expand('${WORKDIR}', localdata)) 73 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
74 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) 74 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
75 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") 75 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
76 tmpfile = tmppipe.readline().strip() 76 tmpfile = tmppipe.readline().strip()
77 if not tmpfile: 77 if not tmpfile:
78 logger.error() 78 logger.error()
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index 8e34b0c2bd..5a15147e67 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -50,12 +50,12 @@ class Wget(FetchMethod):
50 50
51 def fetch_uri(uri, ud, d): 51 def fetch_uri(uri, ud, d):
52 if checkonly: 52 if checkonly:
53 fetchcmd = data.getVar("CHECKCOMMAND", d, 1) 53 fetchcmd = data.getVar("CHECKCOMMAND", d, True)
54 elif os.path.exists(ud.localpath): 54 elif os.path.exists(ud.localpath):
55 # file exists, but we didnt complete it.. trying again.. 55 # file exists, but we didnt complete it.. trying again..
56 fetchcmd = data.getVar("RESUMECOMMAND", d, 1) 56 fetchcmd = data.getVar("RESUMECOMMAND", d, True)
57 else: 57 else:
58 fetchcmd = data.getVar("FETCHCOMMAND", d, 1) 58 fetchcmd = data.getVar("FETCHCOMMAND", d, True)
59 59
60 uri = uri.split(";")[0] 60 uri = uri.split(";")[0]
61 uri_decoded = list(decodeurl(uri)) 61 uri_decoded = list(decodeurl(uri))