summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe
diff options
context:
space:
mode:
authorAníbal Limón <anibal.limon@linux.intel.com>2016-06-10 10:12:10 -0500
committerRichard Purdie <richard.purdie@linuxfoundation.org>2016-06-12 23:47:19 +0100
commitdb84521aacaf71b20aa12009f38c057a6bf416bc (patch)
tree290b9bce75827b8d894d2d3cc7a775762c4ed263 /meta/lib/oe
parent4c38798cae14182433f2729d957e370fa3d56c51 (diff)
downloadpoky-db84521aacaf71b20aa12009f38c057a6bf416bc.tar.gz
oe/distro_check.py: Fixes for python3
create_socket: Use urllib because urllib2 is now urllib in python3 and proxies as argument are deprecated so export them in the environ instead. get_links_from_url: Change usage of sgmllib for parsing HTML because is deprecated in python 3, use instead bs4 that is already imported in the bitbake tree. [YOCTO #9744] (From OE-Core rev: ee26ecf58277560459dd01992bb3f486f92c1531) Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com> Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/lib/oe')
-rw-r--r--meta/lib/oe/distro_check.py82
1 files changed, 34 insertions, 48 deletions
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
index f1f1fbbb28..87c52fae9c 100644
--- a/meta/lib/oe/distro_check.py
+++ b/meta/lib/oe/distro_check.py
@@ -1,53 +1,35 @@
1from contextlib import contextmanager 1from contextlib import contextmanager
2@contextmanager 2
3from bb.utils import export_proxies
4
3def create_socket(url, d): 5def create_socket(url, d):
4 import urllib.request, urllib.parse, urllib.error 6 import urllib
5 socket = urllib.request.urlopen(url, proxies=get_proxies(d)) 7
8 socket = None
6 try: 9 try:
7 yield socket 10 export_proxies(d)
8 finally: 11 socket = urllib.request.urlopen(url)
9 socket.close() 12 except:
13 bb.warn("distro_check: create_socket url %s can't access" % url)
10 14
11def get_proxies(d): 15 return socket
12 proxies = {}
13 for key in ['http', 'https', 'ftp', 'ftps', 'no', 'all']:
14 proxy = d.getVar(key + '_proxy', True)
15 if proxy:
16 proxies[key] = proxy
17 return proxies
18 16
19def get_links_from_url(url, d): 17def get_links_from_url(url, d):
20 "Return all the href links found on the web location" 18 "Return all the href links found on the web location"
21 19
22 import sgmllib 20 from bs4 import BeautifulSoup, SoupStrainer
23
24 class LinksParser(sgmllib.SGMLParser):
25 def parse(self, s):
26 "Parse the given string 's'."
27 self.feed(s)
28 self.close()
29
30 def __init__(self, verbose=0):
31 "Initialise an object passing 'verbose' to the superclass."
32 sgmllib.SGMLParser.__init__(self, verbose)
33 self.hyperlinks = []
34
35 def start_a(self, attributes):
36 "Process a hyperlink and its 'attributes'."
37 for name, value in attributes:
38 if name == "href":
39 self.hyperlinks.append(value.strip('/'))
40
41 def get_hyperlinks(self):
42 "Return the list of hyperlinks."
43 return self.hyperlinks
44 21
45 with create_socket(url,d) as sock: 22 hyperlinks = []
23
24 webpage = ''
25 sock = create_socket(url,d)
26 if sock:
46 webpage = sock.read() 27 webpage = sock.read()
47 28
48 linksparser = LinksParser() 29 soup = BeautifulSoup(webpage, "html.parser", parse_only=SoupStrainer("a"))
49 linksparser.parse(webpage) 30 for line in soup.find_all('a', href=True):
50 return linksparser.get_hyperlinks() 31 hyperlinks.append(line['href'].strip('/'))
32 return hyperlinks
51 33
52def find_latest_numeric_release(url, d): 34def find_latest_numeric_release(url, d):
53 "Find the latest listed numeric release on the given url" 35 "Find the latest listed numeric release on the given url"
@@ -162,14 +144,18 @@ def find_latest_debian_release(url, d):
162 144
163def get_debian_style_source_package_list(url, section, d): 145def get_debian_style_source_package_list(url, section, d):
164 "Return the list of package-names stored in the debian style Sources.gz file" 146 "Return the list of package-names stored in the debian style Sources.gz file"
165 with create_socket(url,d) as sock: 147 import tempfile
166 webpage = sock.read()
167 import tempfile
168 tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False)
169 tmpfilename=tmpfile.name
170 tmpfile.write(sock.read())
171 tmpfile.close()
172 import gzip 148 import gzip
149
150 webpage = ''
151 sock = create_socket(url,d)
152 if sock:
153 webpage = sock.read()
154
155 tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False)
156 tmpfilename=tmpfile.name
157 tmpfile.write(sock.read())
158 tmpfile.close()
173 bb.note("Reading %s: %s" % (url, section)) 159 bb.note("Reading %s: %s" % (url, section))
174 160
175 f = gzip.open(tmpfilename) 161 f = gzip.open(tmpfilename)
@@ -266,9 +252,9 @@ def update_distro_data(distro_check_dir, datetime, d):
266 import fcntl 252 import fcntl
267 try: 253 try:
268 if not os.path.exists(datetime_file): 254 if not os.path.exists(datetime_file):
269 open(datetime_file, 'w+b').close() # touch the file so that the next open won't fail 255 open(datetime_file, 'w+').close() # touch the file so that the next open won't fail
270 256
271 f = open(datetime_file, "r+b") 257 f = open(datetime_file, "r+")
272 fcntl.lockf(f, fcntl.LOCK_EX) 258 fcntl.lockf(f, fcntl.LOCK_EX)
273 saved_datetime = f.read() 259 saved_datetime = f.read()
274 if saved_datetime[0:8] != datetime[0:8]: 260 if saved_datetime[0:8] != datetime[0:8]: