diff options
author | Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com> | 2015-09-02 08:59:43 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2015-10-01 15:07:48 +0100 |
commit | 12cd705b28412c1342c1c022340c13b1ef2c8a71 (patch) | |
tree | 605515e411259ce02b09d0edb21a6e47d7dc8ef8 | |
parent | f047ee8c953526e4b571745fb91da3ce8522d758 (diff) | |
download | poky-12cd705b28412c1342c1c022340c13b1ef2c8a71.tar.gz |
distrodata: Take account proxies on distrodata tasks
Proxies defined in the enviroment where not taken into account
on the distrodata tasks. This commit implied passing the datastore
into the distro_check library and context manager for the
urllib.urlopen function.
One way to run distrodata tasks is using 'universe' as target and the
'all' distrodata task:
$ bitbake universe -c distrodataall
$ bitbake universe -c distro_checkall
$ bitbake universe -c checklicenseall
Logs are located under TMPDIR/log
[YOCTO #7567]
(From OE-Core rev: 7d1c3470bb06e43245ccb7f067121de606506430)
Signed-off-by: Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | meta/classes/distrodata.bbclass | 6 | ||||
-rw-r--r-- | meta/lib/oe/distro_check.py | 113 |
2 files changed, 69 insertions, 50 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index e6eb3f32b3..5a4c1b6faf 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -33,7 +33,7 @@ python do_distrodata_np() { | |||
33 | tmpdir = d.getVar('TMPDIR', True) | 33 | tmpdir = d.getVar('TMPDIR', True) |
34 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 34 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
35 | datetime = localdata.getVar('DATETIME', True) | 35 | datetime = localdata.getVar('DATETIME', True) |
36 | dist_check.update_distro_data(distro_check_dir, datetime) | 36 | dist_check.update_distro_data(distro_check_dir, datetime, localdata) |
37 | 37 | ||
38 | if pn.find("-native") != -1: | 38 | if pn.find("-native") != -1: |
39 | pnstripped = pn.split("-native") | 39 | pnstripped = pn.split("-native") |
@@ -118,7 +118,7 @@ python do_distrodata() { | |||
118 | tmpdir = d.getVar('TMPDIR', True) | 118 | tmpdir = d.getVar('TMPDIR', True) |
119 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 119 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
120 | datetime = localdata.getVar('DATETIME', True) | 120 | datetime = localdata.getVar('DATETIME', True) |
121 | dist_check.update_distro_data(distro_check_dir, datetime) | 121 | dist_check.update_distro_data(distro_check_dir, datetime, localdata) |
122 | 122 | ||
123 | pn = d.getVar("PN", True) | 123 | pn = d.getVar("PN", True) |
124 | bb.note("Package Name: %s" % pn) | 124 | bb.note("Package Name: %s" % pn) |
@@ -406,7 +406,7 @@ python do_distro_check() { | |||
406 | bb.utils.mkdirhier(logpath) | 406 | bb.utils.mkdirhier(logpath) |
407 | result_file = os.path.join(logpath, "distrocheck.csv") | 407 | result_file = os.path.join(logpath, "distrocheck.csv") |
408 | datetime = localdata.getVar('DATETIME', True) | 408 | datetime = localdata.getVar('DATETIME', True) |
409 | dc.update_distro_data(distro_check_dir, datetime) | 409 | dc.update_distro_data(distro_check_dir, datetime, localdata) |
410 | 410 | ||
411 | # do the comparison | 411 | # do the comparison |
412 | result = dc.compare_in_distro_packages_list(distro_check_dir, d) | 412 | result = dc.compare_in_distro_packages_list(distro_check_dir, d) |
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index 8ed5b0ec80..b3419ce03a 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py | |||
@@ -1,7 +1,28 @@ | |||
1 | def get_links_from_url(url): | 1 | from contextlib import contextmanager |
2 | @contextmanager | ||
3 | def create_socket(url, d): | ||
4 | import urllib | ||
5 | socket = urllib.urlopen(url, proxies=get_proxies(d)) | ||
6 | try: | ||
7 | yield socket | ||
8 | finally: | ||
9 | socket.close() | ||
10 | |||
11 | def get_proxies(d): | ||
12 | import os | ||
13 | proxykeys = ['HTTP_PROXY', 'http_proxy', | ||
14 | 'HTTPS_PROXY', 'https_proxy', | ||
15 | 'FTP_PROXY', 'ftp_proxy', | ||
16 | 'FTPS_PROXY', 'ftps_proxy', | ||
17 | 'NO_PROXY', 'no_proxy', | ||
18 | 'ALL_PROXY', 'all_proxy'] | ||
19 | proxyvalues = map(lambda key: d.getVar(key, True), proxykeys) | ||
20 | return dict(zip(proxykeys, proxyvalues)) | ||
21 | |||
22 | def get_links_from_url(url, d): | ||
2 | "Return all the href links found on the web location" | 23 | "Return all the href links found on the web location" |
3 | 24 | ||
4 | import urllib, sgmllib | 25 | import sgmllib |
5 | 26 | ||
6 | class LinksParser(sgmllib.SGMLParser): | 27 | class LinksParser(sgmllib.SGMLParser): |
7 | def parse(self, s): | 28 | def parse(self, s): |
@@ -24,19 +45,18 @@ def get_links_from_url(url): | |||
24 | "Return the list of hyperlinks." | 45 | "Return the list of hyperlinks." |
25 | return self.hyperlinks | 46 | return self.hyperlinks |
26 | 47 | ||
27 | sock = urllib.urlopen(url) | 48 | with create_socket(url,d) as sock: |
28 | webpage = sock.read() | 49 | webpage = sock.read() |
29 | sock.close() | ||
30 | 50 | ||
31 | linksparser = LinksParser() | 51 | linksparser = LinksParser() |
32 | linksparser.parse(webpage) | 52 | linksparser.parse(webpage) |
33 | return linksparser.get_hyperlinks() | 53 | return linksparser.get_hyperlinks() |
34 | 54 | ||
35 | def find_latest_numeric_release(url): | 55 | def find_latest_numeric_release(url, d): |
36 | "Find the latest listed numeric release on the given url" | 56 | "Find the latest listed numeric release on the given url" |
37 | max=0 | 57 | max=0 |
38 | maxstr="" | 58 | maxstr="" |
39 | for link in get_links_from_url(url): | 59 | for link in get_links_from_url(url, d): |
40 | try: | 60 | try: |
41 | release = float(link) | 61 | release = float(link) |
42 | except: | 62 | except: |
@@ -70,7 +90,7 @@ def clean_package_list(package_list): | |||
70 | return set.keys() | 90 | return set.keys() |
71 | 91 | ||
72 | 92 | ||
73 | def get_latest_released_meego_source_package_list(): | 93 | def get_latest_released_meego_source_package_list(d): |
74 | "Returns list of all the name os packages in the latest meego distro" | 94 | "Returns list of all the name os packages in the latest meego distro" |
75 | 95 | ||
76 | package_names = [] | 96 | package_names = [] |
@@ -82,11 +102,11 @@ def get_latest_released_meego_source_package_list(): | |||
82 | package_list=clean_package_list(package_names) | 102 | package_list=clean_package_list(package_names) |
83 | return "1.0", package_list | 103 | return "1.0", package_list |
84 | 104 | ||
85 | def get_source_package_list_from_url(url, section): | 105 | def get_source_package_list_from_url(url, section, d): |
86 | "Return a sectioned list of package names from a URL list" | 106 | "Return a sectioned list of package names from a URL list" |
87 | 107 | ||
88 | bb.note("Reading %s: %s" % (url, section)) | 108 | bb.note("Reading %s: %s" % (url, section)) |
89 | links = get_links_from_url(url) | 109 | links = get_links_from_url(url, d) |
90 | srpms = filter(is_src_rpm, links) | 110 | srpms = filter(is_src_rpm, links) |
91 | names_list = map(package_name_from_srpm, srpms) | 111 | names_list = map(package_name_from_srpm, srpms) |
92 | 112 | ||
@@ -96,44 +116,44 @@ def get_source_package_list_from_url(url, section): | |||
96 | 116 | ||
97 | return new_pkgs | 117 | return new_pkgs |
98 | 118 | ||
99 | def get_latest_released_fedora_source_package_list(): | 119 | def get_latest_released_fedora_source_package_list(d): |
100 | "Returns list of all the name os packages in the latest fedora distro" | 120 | "Returns list of all the name os packages in the latest fedora distro" |
101 | latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/") | 121 | latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/", d) |
102 | 122 | ||
103 | package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main") | 123 | package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main", d) |
104 | 124 | ||
105 | # package_names += get_source_package_list_from_url("http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Everything/source/SPRMS/" % latest, "everything") | 125 | # package_names += get_source_package_list_from_url("http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Everything/source/SPRMS/" % latest, "everything") |
106 | package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates") | 126 | package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d) |
107 | 127 | ||
108 | package_list=clean_package_list(package_names) | 128 | package_list=clean_package_list(package_names) |
109 | 129 | ||
110 | return latest, package_list | 130 | return latest, package_list |
111 | 131 | ||
112 | def get_latest_released_opensuse_source_package_list(): | 132 | def get_latest_released_opensuse_source_package_list(d): |
113 | "Returns list of all the name os packages in the latest opensuse distro" | 133 | "Returns list of all the name os packages in the latest opensuse distro" |
114 | latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/") | 134 | latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/",d) |
115 | 135 | ||
116 | package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main") | 136 | package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main", d) |
117 | package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates") | 137 | package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates", d) |
118 | 138 | ||
119 | package_list=clean_package_list(package_names) | 139 | package_list=clean_package_list(package_names) |
120 | return latest, package_list | 140 | return latest, package_list |
121 | 141 | ||
122 | def get_latest_released_mandriva_source_package_list(): | 142 | def get_latest_released_mandriva_source_package_list(d): |
123 | "Returns list of all the name os packages in the latest mandriva distro" | 143 | "Returns list of all the name os packages in the latest mandriva distro" |
124 | latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/") | 144 | latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/", d) |
125 | package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main") | 145 | package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main", d) |
126 | # package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/contrib/release/" % latest, "contrib") | 146 | # package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/contrib/release/" % latest, "contrib") |
127 | package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates") | 147 | package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates", d) |
128 | 148 | ||
129 | package_list=clean_package_list(package_names) | 149 | package_list=clean_package_list(package_names) |
130 | return latest, package_list | 150 | return latest, package_list |
131 | 151 | ||
132 | def find_latest_debian_release(url): | 152 | def find_latest_debian_release(url, d): |
133 | "Find the latest listed debian release on the given url" | 153 | "Find the latest listed debian release on the given url" |
134 | 154 | ||
135 | releases = [] | 155 | releases = [] |
136 | for link in get_links_from_url(url): | 156 | for link in get_links_from_url(url, d): |
137 | if link[:6] == "Debian": | 157 | if link[:6] == "Debian": |
138 | if ';' not in link: | 158 | if ';' not in link: |
139 | releases.append(link) | 159 | releases.append(link) |
@@ -143,16 +163,15 @@ def find_latest_debian_release(url): | |||
143 | except: | 163 | except: |
144 | return "_NotFound_" | 164 | return "_NotFound_" |
145 | 165 | ||
146 | def get_debian_style_source_package_list(url, section): | 166 | def get_debian_style_source_package_list(url, section, d): |
147 | "Return the list of package-names stored in the debian style Sources.gz file" | 167 | "Return the list of package-names stored in the debian style Sources.gz file" |
148 | import urllib | 168 | with create_socket(url,d) as sock: |
149 | sock = urllib.urlopen(url) | 169 | webpage = sock.read() |
150 | import tempfile | 170 | import tempfile |
151 | tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False) | 171 | tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False) |
152 | tmpfilename=tmpfile.name | 172 | tmpfilename=tmpfile.name |
153 | tmpfile.write(sock.read()) | 173 | tmpfile.write(sock.read()) |
154 | sock.close() | 174 | tmpfile.close() |
155 | tmpfile.close() | ||
156 | import gzip | 175 | import gzip |
157 | bb.note("Reading %s: %s" % (url, section)) | 176 | bb.note("Reading %s: %s" % (url, section)) |
158 | 177 | ||
@@ -165,41 +184,41 @@ def get_debian_style_source_package_list(url, section): | |||
165 | 184 | ||
166 | return package_names | 185 | return package_names |
167 | 186 | ||
168 | def get_latest_released_debian_source_package_list(): | 187 | def get_latest_released_debian_source_package_list(d): |
169 | "Returns list of all the name os packages in the latest debian distro" | 188 | "Returns list of all the name os packages in the latest debian distro" |
170 | latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/") | 189 | latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/", d) |
171 | url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" | 190 | url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" |
172 | package_names = get_debian_style_source_package_list(url, "main") | 191 | package_names = get_debian_style_source_package_list(url, "main", d) |
173 | # url = "http://ftp.debian.org/debian/dists/stable/contrib/source/Sources.gz" | 192 | # url = "http://ftp.debian.org/debian/dists/stable/contrib/source/Sources.gz" |
174 | # package_names += get_debian_style_source_package_list(url, "contrib") | 193 | # package_names += get_debian_style_source_package_list(url, "contrib") |
175 | url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" | 194 | url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" |
176 | package_names += get_debian_style_source_package_list(url, "updates") | 195 | package_names += get_debian_style_source_package_list(url, "updates", d) |
177 | package_list=clean_package_list(package_names) | 196 | package_list=clean_package_list(package_names) |
178 | return latest, package_list | 197 | return latest, package_list |
179 | 198 | ||
180 | def find_latest_ubuntu_release(url): | 199 | def find_latest_ubuntu_release(url, d): |
181 | "Find the latest listed ubuntu release on the given url" | 200 | "Find the latest listed ubuntu release on the given url" |
182 | url += "?C=M;O=D" # Descending Sort by Last Modified | 201 | url += "?C=M;O=D" # Descending Sort by Last Modified |
183 | for link in get_links_from_url(url): | 202 | for link in get_links_from_url(url, d): |
184 | if link[-8:] == "-updates": | 203 | if link[-8:] == "-updates": |
185 | return link[:-8] | 204 | return link[:-8] |
186 | return "_NotFound_" | 205 | return "_NotFound_" |
187 | 206 | ||
188 | def get_latest_released_ubuntu_source_package_list(): | 207 | def get_latest_released_ubuntu_source_package_list(d): |
189 | "Returns list of all the name os packages in the latest ubuntu distro" | 208 | "Returns list of all the name os packages in the latest ubuntu distro" |
190 | latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/") | 209 | latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/", d) |
191 | url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest | 210 | url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest |
192 | package_names = get_debian_style_source_package_list(url, "main") | 211 | package_names = get_debian_style_source_package_list(url, "main", d) |
193 | # url = "http://archive.ubuntu.com/ubuntu/dists/%s/multiverse/source/Sources.gz" % latest | 212 | # url = "http://archive.ubuntu.com/ubuntu/dists/%s/multiverse/source/Sources.gz" % latest |
194 | # package_names += get_debian_style_source_package_list(url, "multiverse") | 213 | # package_names += get_debian_style_source_package_list(url, "multiverse") |
195 | # url = "http://archive.ubuntu.com/ubuntu/dists/%s/universe/source/Sources.gz" % latest | 214 | # url = "http://archive.ubuntu.com/ubuntu/dists/%s/universe/source/Sources.gz" % latest |
196 | # package_names += get_debian_style_source_package_list(url, "universe") | 215 | # package_names += get_debian_style_source_package_list(url, "universe") |
197 | url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest | 216 | url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest |
198 | package_names += get_debian_style_source_package_list(url, "updates") | 217 | package_names += get_debian_style_source_package_list(url, "updates", d) |
199 | package_list=clean_package_list(package_names) | 218 | package_list=clean_package_list(package_names) |
200 | return latest, package_list | 219 | return latest, package_list |
201 | 220 | ||
202 | def create_distro_packages_list(distro_check_dir): | 221 | def create_distro_packages_list(distro_check_dir, d): |
203 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | 222 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") |
204 | if not os.path.isdir (pkglst_dir): | 223 | if not os.path.isdir (pkglst_dir): |
205 | os.makedirs(pkglst_dir) | 224 | os.makedirs(pkglst_dir) |
@@ -220,7 +239,7 @@ def create_distro_packages_list(distro_check_dir): | |||
220 | begin = datetime.now() | 239 | begin = datetime.now() |
221 | for distro in per_distro_functions: | 240 | for distro in per_distro_functions: |
222 | name = distro[0] | 241 | name = distro[0] |
223 | release, package_list = distro[1]() | 242 | release, package_list = distro[1](d) |
224 | bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list))) | 243 | bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list))) |
225 | package_list_file = os.path.join(pkglst_dir, name + "-" + release) | 244 | package_list_file = os.path.join(pkglst_dir, name + "-" + release) |
226 | f = open(package_list_file, "w+b") | 245 | f = open(package_list_file, "w+b") |
@@ -231,7 +250,7 @@ def create_distro_packages_list(distro_check_dir): | |||
231 | delta = end - begin | 250 | delta = end - begin |
232 | bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds) | 251 | bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds) |
233 | 252 | ||
234 | def update_distro_data(distro_check_dir, datetime): | 253 | def update_distro_data(distro_check_dir, datetime, d): |
235 | """ | 254 | """ |
236 | If distro packages list data is old then rebuild it. | 255 | If distro packages list data is old then rebuild it. |
237 | The operations has to be protected by a lock so that | 256 | The operations has to be protected by a lock so that |
@@ -258,7 +277,7 @@ def update_distro_data(distro_check_dir, datetime): | |||
258 | if saved_datetime[0:8] != datetime[0:8]: | 277 | if saved_datetime[0:8] != datetime[0:8]: |
259 | bb.note("The build datetime did not match: saved:%s current:%s" % (saved_datetime, datetime)) | 278 | bb.note("The build datetime did not match: saved:%s current:%s" % (saved_datetime, datetime)) |
260 | bb.note("Regenerating distro package lists") | 279 | bb.note("Regenerating distro package lists") |
261 | create_distro_packages_list(distro_check_dir) | 280 | create_distro_packages_list(distro_check_dir, d) |
262 | f.seek(0) | 281 | f.seek(0) |
263 | f.write(datetime) | 282 | f.write(datetime) |
264 | 283 | ||