diff options
Diffstat (limited to 'meta/lib/oe/distro_check.py')
-rw-r--r-- | meta/lib/oe/distro_check.py | 281 |
1 files changed, 110 insertions, 171 deletions
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index 87c52fae9c..00c827e92c 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py | |||
@@ -1,32 +1,17 @@ | |||
1 | from contextlib import contextmanager | ||
2 | |||
3 | from bb.utils import export_proxies | ||
4 | |||
5 | def create_socket(url, d): | 1 | def create_socket(url, d): |
6 | import urllib | 2 | import urllib |
3 | from bb.utils import export_proxies | ||
7 | 4 | ||
8 | socket = None | 5 | export_proxies(d) |
9 | try: | 6 | return urllib.request.urlopen(url) |
10 | export_proxies(d) | ||
11 | socket = urllib.request.urlopen(url) | ||
12 | except: | ||
13 | bb.warn("distro_check: create_socket url %s can't access" % url) | ||
14 | |||
15 | return socket | ||
16 | 7 | ||
17 | def get_links_from_url(url, d): | 8 | def get_links_from_url(url, d): |
18 | "Return all the href links found on the web location" | 9 | "Return all the href links found on the web location" |
19 | 10 | ||
20 | from bs4 import BeautifulSoup, SoupStrainer | 11 | from bs4 import BeautifulSoup, SoupStrainer |
21 | 12 | ||
13 | soup = BeautifulSoup(create_socket(url,d), "html.parser", parse_only=SoupStrainer("a")) | ||
22 | hyperlinks = [] | 14 | hyperlinks = [] |
23 | |||
24 | webpage = '' | ||
25 | sock = create_socket(url,d) | ||
26 | if sock: | ||
27 | webpage = sock.read() | ||
28 | |||
29 | soup = BeautifulSoup(webpage, "html.parser", parse_only=SoupStrainer("a")) | ||
30 | for line in soup.find_all('a', href=True): | 15 | for line in soup.find_all('a', href=True): |
31 | hyperlinks.append(line['href'].strip('/')) | 16 | hyperlinks.append(line['href'].strip('/')) |
32 | return hyperlinks | 17 | return hyperlinks |
@@ -37,6 +22,7 @@ def find_latest_numeric_release(url, d): | |||
37 | maxstr="" | 22 | maxstr="" |
38 | for link in get_links_from_url(url, d): | 23 | for link in get_links_from_url(url, d): |
39 | try: | 24 | try: |
25 | # TODO use LooseVersion | ||
40 | release = float(link) | 26 | release = float(link) |
41 | except: | 27 | except: |
42 | release = 0 | 28 | release = 0 |
@@ -47,144 +33,116 @@ def find_latest_numeric_release(url, d): | |||
47 | 33 | ||
48 | def is_src_rpm(name): | 34 | def is_src_rpm(name): |
49 | "Check if the link is pointing to a src.rpm file" | 35 | "Check if the link is pointing to a src.rpm file" |
50 | if name[-8:] == ".src.rpm": | 36 | return name.endswith(".src.rpm") |
51 | return True | ||
52 | else: | ||
53 | return False | ||
54 | 37 | ||
55 | def package_name_from_srpm(srpm): | 38 | def package_name_from_srpm(srpm): |
56 | "Strip out the package name from the src.rpm filename" | 39 | "Strip out the package name from the src.rpm filename" |
57 | strings = srpm.split('-') | ||
58 | package_name = strings[0] | ||
59 | for i in range(1, len (strings) - 1): | ||
60 | str = strings[i] | ||
61 | if not str[0].isdigit(): | ||
62 | package_name += '-' + str | ||
63 | return package_name | ||
64 | |||
65 | def clean_package_list(package_list): | ||
66 | "Removes multiple entries of packages and sorts the list" | ||
67 | set = {} | ||
68 | map(set.__setitem__, package_list, []) | ||
69 | return set.keys() | ||
70 | 40 | ||
41 | # ca-certificates-2016.2.7-1.0.fc24.src.rpm | ||
42 | # ^name ^ver ^release^removed | ||
43 | (name, version, release) = srpm.replace(".src.rpm", "").rsplit("-", 2) | ||
44 | return name | ||
71 | 45 | ||
72 | def get_latest_released_meego_source_package_list(d): | 46 | def get_latest_released_meego_source_package_list(d): |
73 | "Returns list of all the name os packages in the latest meego distro" | 47 | "Returns list of all the name os packages in the latest meego distro" |
74 | 48 | ||
75 | package_names = [] | 49 | package_names = set() |
76 | try: | 50 | with open("/tmp/Meego-1.1", "r") as f: |
77 | f = open("/tmp/Meego-1.1", "r") | ||
78 | for line in f: | 51 | for line in f: |
79 | package_names.append(line[:-1] + ":" + "main") # Also strip the '\n' at the end | 52 | package_names.add(line.strip() + ":" + "main") |
80 | except IOError: pass | 53 | return "1.1", package_names |
81 | package_list=clean_package_list(package_names) | ||
82 | return "1.0", package_list | ||
83 | 54 | ||
84 | def get_source_package_list_from_url(url, section, d): | 55 | def get_source_package_list_from_url(url, section, d): |
85 | "Return a sectioned list of package names from a URL list" | 56 | "Return a sectioned list of package names from a URL list" |
86 | 57 | ||
87 | bb.note("Reading %s: %s" % (url, section)) | 58 | bb.note("Reading %s: %s" % (url, section)) |
88 | links = get_links_from_url(url, d) | 59 | links = get_links_from_url(url, d) |
89 | srpms = list(filter(is_src_rpm, links)) | 60 | srpms = filter(is_src_rpm, links) |
90 | names_list = list(map(package_name_from_srpm, srpms)) | 61 | names_list = map(package_name_from_srpm, srpms) |
91 | 62 | ||
92 | new_pkgs = [] | 63 | new_pkgs = set() |
93 | for pkgs in names_list: | 64 | for pkgs in names_list: |
94 | new_pkgs.append(pkgs + ":" + section) | 65 | new_pkgs.add(pkgs + ":" + section) |
95 | |||
96 | return new_pkgs | 66 | return new_pkgs |
97 | 67 | ||
68 | def get_source_package_list_from_url_by_letter(url, section, d): | ||
69 | import string | ||
70 | from urllib.error import HTTPError | ||
71 | packages = set() | ||
72 | for letter in (string.ascii_lowercase + string.digits): | ||
73 | # Not all subfolders may exist, so silently handle 404 | ||
74 | try: | ||
75 | packages |= get_source_package_list_from_url(url + "/" + letter, section, d) | ||
76 | except HTTPError as e: | ||
77 | if e.code != 404: raise | ||
78 | return packages | ||
79 | |||
98 | def get_latest_released_fedora_source_package_list(d): | 80 | def get_latest_released_fedora_source_package_list(d): |
99 | "Returns list of all the name os packages in the latest fedora distro" | 81 | "Returns list of all the name os packages in the latest fedora distro" |
100 | latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/", d) | 82 | latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/", d) |
101 | 83 | package_names = get_source_package_list_from_url_by_letter("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Everything/source/tree/Packages/" % latest, "main", d) | |
102 | package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main", d) | 84 | package_names |= get_source_package_list_from_url_by_letter("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d) |
103 | 85 | return latest, package_names | |
104 | # package_names += get_source_package_list_from_url("http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Everything/source/SPRMS/" % latest, "everything") | ||
105 | package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d) | ||
106 | |||
107 | package_list=clean_package_list(package_names) | ||
108 | |||
109 | return latest, package_list | ||
110 | 86 | ||
111 | def get_latest_released_opensuse_source_package_list(d): | 87 | def get_latest_released_opensuse_source_package_list(d): |
112 | "Returns list of all the name os packages in the latest opensuse distro" | 88 | "Returns list of all the name os packages in the latest opensuse distro" |
113 | latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/",d) | 89 | latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/",d) |
114 | 90 | ||
115 | package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main", d) | 91 | package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main", d) |
116 | package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates", d) | 92 | package_names |= get_source_package_list_from_url("http://download.opensuse.org/update/%s/src/" % latest, "updates", d) |
117 | 93 | return latest, package_names | |
118 | package_list=clean_package_list(package_names) | ||
119 | return latest, package_list | ||
120 | 94 | ||
121 | def get_latest_released_mandriva_source_package_list(d): | 95 | def get_latest_released_mandriva_source_package_list(d): |
122 | "Returns list of all the name os packages in the latest mandriva distro" | 96 | "Returns list of all the name os packages in the latest mandriva distro" |
123 | latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/", d) | 97 | latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/", d) |
124 | package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main", d) | 98 | package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main", d) |
125 | # package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/contrib/release/" % latest, "contrib") | 99 | package_names |= get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates", d) |
126 | package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates", d) | 100 | return latest, package_names |
127 | |||
128 | package_list=clean_package_list(package_names) | ||
129 | return latest, package_list | ||
130 | 101 | ||
131 | def find_latest_debian_release(url, d): | 102 | def find_latest_debian_release(url, d): |
132 | "Find the latest listed debian release on the given url" | 103 | "Find the latest listed debian release on the given url" |
133 | 104 | ||
134 | releases = [] | 105 | releases = [link.replace("Debian", "") |
135 | for link in get_links_from_url(url, d): | 106 | for link in get_links_from_url(url, d) |
136 | if link[:6] == "Debian": | 107 | if link.startswith("Debian")] |
137 | if ';' not in link: | ||
138 | releases.append(link) | ||
139 | releases.sort() | 108 | releases.sort() |
140 | try: | 109 | try: |
141 | return releases.pop()[6:] | 110 | return releases[-1] |
142 | except: | 111 | except: |
143 | return "_NotFound_" | 112 | return "_NotFound_" |
144 | 113 | ||
145 | def get_debian_style_source_package_list(url, section, d): | 114 | def get_debian_style_source_package_list(url, section, d): |
146 | "Return the list of package-names stored in the debian style Sources.gz file" | 115 | "Return the list of package-names stored in the debian style Sources.gz file" |
147 | import tempfile | ||
148 | import gzip | 116 | import gzip |
149 | 117 | ||
150 | webpage = '' | 118 | package_names = set() |
151 | sock = create_socket(url,d) | 119 | for line in gzip.open(create_socket(url, d), mode="rt"): |
152 | if sock: | 120 | if line.startswith("Package:"): |
153 | webpage = sock.read() | 121 | pkg = line.split(":", 1)[1].strip() |
154 | 122 | package_names.add(pkg + ":" + section) | |
155 | tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False) | ||
156 | tmpfilename=tmpfile.name | ||
157 | tmpfile.write(sock.read()) | ||
158 | tmpfile.close() | ||
159 | bb.note("Reading %s: %s" % (url, section)) | ||
160 | |||
161 | f = gzip.open(tmpfilename) | ||
162 | package_names = [] | ||
163 | for line in f: | ||
164 | if line[:9] == "Package: ": | ||
165 | package_names.append(line[9:-1] + ":" + section) # Also strip the '\n' at the end | ||
166 | os.unlink(tmpfilename) | ||
167 | |||
168 | return package_names | 123 | return package_names |
169 | 124 | ||
170 | def get_latest_released_debian_source_package_list(d): | 125 | def get_latest_released_debian_source_package_list(d): |
171 | "Returns list of all the name os packages in the latest debian distro" | 126 | "Returns list of all the name of packages in the latest debian distro" |
172 | latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/", d) | 127 | latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/", d) |
173 | url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" | 128 | url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" |
174 | package_names = get_debian_style_source_package_list(url, "main", d) | 129 | package_names = get_debian_style_source_package_list(url, "main", d) |
175 | # url = "http://ftp.debian.org/debian/dists/stable/contrib/source/Sources.gz" | 130 | url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" |
176 | # package_names += get_debian_style_source_package_list(url, "contrib") | 131 | package_names |= get_debian_style_source_package_list(url, "updates", d) |
177 | url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" | 132 | return latest, package_names |
178 | package_names += get_debian_style_source_package_list(url, "updates", d) | ||
179 | package_list=clean_package_list(package_names) | ||
180 | return latest, package_list | ||
181 | 133 | ||
182 | def find_latest_ubuntu_release(url, d): | 134 | def find_latest_ubuntu_release(url, d): |
183 | "Find the latest listed ubuntu release on the given url" | 135 | """ |
136 | Find the latest listed Ubuntu release on the given ubuntu/dists/ URL. | ||
137 | |||
138 | To avoid matching development releases look for distributions that have | ||
139 | updates, so the resulting distro could be any supported release. | ||
140 | """ | ||
184 | url += "?C=M;O=D" # Descending Sort by Last Modified | 141 | url += "?C=M;O=D" # Descending Sort by Last Modified |
185 | for link in get_links_from_url(url, d): | 142 | for link in get_links_from_url(url, d): |
186 | if link[-8:] == "-updates": | 143 | if "-updates" in link: |
187 | return link[:-8] | 144 | distro = link.replace("-updates", "") |
145 | return distro | ||
188 | return "_NotFound_" | 146 | return "_NotFound_" |
189 | 147 | ||
190 | def get_latest_released_ubuntu_source_package_list(d): | 148 | def get_latest_released_ubuntu_source_package_list(d): |
@@ -192,52 +150,45 @@ def get_latest_released_ubuntu_source_package_list(d): | |||
192 | latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/", d) | 150 | latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/", d) |
193 | url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest | 151 | url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest |
194 | package_names = get_debian_style_source_package_list(url, "main", d) | 152 | package_names = get_debian_style_source_package_list(url, "main", d) |
195 | # url = "http://archive.ubuntu.com/ubuntu/dists/%s/multiverse/source/Sources.gz" % latest | ||
196 | # package_names += get_debian_style_source_package_list(url, "multiverse") | ||
197 | # url = "http://archive.ubuntu.com/ubuntu/dists/%s/universe/source/Sources.gz" % latest | ||
198 | # package_names += get_debian_style_source_package_list(url, "universe") | ||
199 | url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest | 153 | url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest |
200 | package_names += get_debian_style_source_package_list(url, "updates", d) | 154 | package_names |= get_debian_style_source_package_list(url, "updates", d) |
201 | package_list=clean_package_list(package_names) | 155 | return latest, package_names |
202 | return latest, package_list | ||
203 | 156 | ||
204 | def create_distro_packages_list(distro_check_dir, d): | 157 | def create_distro_packages_list(distro_check_dir, d): |
158 | import shutil | ||
159 | |||
205 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | 160 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") |
206 | if not os.path.isdir (pkglst_dir): | 161 | bb.utils.remove(pkglst_dir, True) |
207 | os.makedirs(pkglst_dir) | 162 | bb.utils.mkdirhier(pkglst_dir) |
208 | # first clear old stuff | 163 | |
209 | for file in os.listdir(pkglst_dir): | 164 | per_distro_functions = ( |
210 | os.unlink(os.path.join(pkglst_dir, file)) | 165 | ("Debian", get_latest_released_debian_source_package_list), |
211 | 166 | ("Ubuntu", get_latest_released_ubuntu_source_package_list), | |
212 | per_distro_functions = [ | 167 | ("Fedora", get_latest_released_fedora_source_package_list), |
213 | ["Debian", get_latest_released_debian_source_package_list], | 168 | ("OpenSuSE", get_latest_released_opensuse_source_package_list), |
214 | ["Ubuntu", get_latest_released_ubuntu_source_package_list], | 169 | ("Mandriva", get_latest_released_mandriva_source_package_list), |
215 | ["Fedora", get_latest_released_fedora_source_package_list], | 170 | ("Meego", get_latest_released_meego_source_package_list) |
216 | ["OpenSuSE", get_latest_released_opensuse_source_package_list], | 171 | ) |
217 | ["Mandriva", get_latest_released_mandriva_source_package_list], | 172 | |
218 | ["Meego", get_latest_released_meego_source_package_list] | 173 | for name, fetcher_func in per_distro_functions: |
219 | ] | 174 | try: |
220 | 175 | release, package_list = fetcher_func(d) | |
221 | from datetime import datetime | 176 | except Exception as e: |
222 | begin = datetime.now() | 177 | bb.warn("Cannot fetch packages for %s: %s" % (name, e)) |
223 | for distro in per_distro_functions: | ||
224 | name = distro[0] | ||
225 | release, package_list = distro[1](d) | ||
226 | bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list))) | 178 | bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list))) |
179 | if len(package_list) == 0: | ||
180 | bb.error("Didn't fetch any packages for %s %s" % (name, release)) | ||
181 | |||
227 | package_list_file = os.path.join(pkglst_dir, name + "-" + release) | 182 | package_list_file = os.path.join(pkglst_dir, name + "-" + release) |
228 | f = open(package_list_file, "w+b") | 183 | with open(package_list_file, 'w') as f: |
229 | for pkg in package_list: | 184 | for pkg in sorted(package_list): |
230 | f.write(pkg + "\n") | 185 | f.write(pkg + "\n") |
231 | f.close() | ||
232 | end = datetime.now() | ||
233 | delta = end - begin | ||
234 | bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds) | ||
235 | 186 | ||
236 | def update_distro_data(distro_check_dir, datetime, d): | 187 | def update_distro_data(distro_check_dir, datetime, d): |
237 | """ | 188 | """ |
238 | If distro packages list data is old then rebuild it. | 189 | If distro packages list data is old then rebuild it. |
239 | The operations has to be protected by a lock so that | 190 | The operations has to be protected by a lock so that |
240 | only one thread performes it at a time. | 191 | only one thread performes it at a time. |
241 | """ | 192 | """ |
242 | if not os.path.isdir (distro_check_dir): | 193 | if not os.path.isdir (distro_check_dir): |
243 | try: | 194 | try: |
@@ -264,25 +215,22 @@ def update_distro_data(distro_check_dir, datetime, d): | |||
264 | f.seek(0) | 215 | f.seek(0) |
265 | f.write(datetime) | 216 | f.write(datetime) |
266 | 217 | ||
267 | except OSError: | 218 | except OSError as e: |
268 | raise Exception('Unable to read/write this file: %s' % (datetime_file)) | 219 | raise Exception('Unable to open timestamp: %s' % e) |
269 | finally: | 220 | finally: |
270 | fcntl.lockf(f, fcntl.LOCK_UN) | 221 | fcntl.lockf(f, fcntl.LOCK_UN) |
271 | f.close() | 222 | f.close() |
272 | 223 | ||
273 | def compare_in_distro_packages_list(distro_check_dir, d): | 224 | def compare_in_distro_packages_list(distro_check_dir, d): |
274 | if not os.path.isdir(distro_check_dir): | 225 | if not os.path.isdir(distro_check_dir): |
275 | raise Exception("compare_in_distro_packages_list: invalid distro_check_dir passed") | 226 | raise Exception("compare_in_distro_packages_list: invalid distro_check_dir passed") |
276 | 227 | ||
277 | localdata = bb.data.createCopy(d) | 228 | localdata = bb.data.createCopy(d) |
278 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | 229 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") |
279 | matching_distros = [] | 230 | matching_distros = [] |
280 | pn = d.getVar('PN', True) | 231 | pn = recipe_name = d.getVar('PN', True) |
281 | recipe_name = d.getVar('PN', True) | ||
282 | bb.note("Checking: %s" % pn) | 232 | bb.note("Checking: %s" % pn) |
283 | 233 | ||
284 | trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"}) | ||
285 | |||
286 | if pn.find("-native") != -1: | 234 | if pn.find("-native") != -1: |
287 | pnstripped = pn.split("-native") | 235 | pnstripped = pn.split("-native") |
288 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 236 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
@@ -308,27 +256,22 @@ def compare_in_distro_packages_list(distro_check_dir, d): | |||
308 | recipe_name = pnstripped[0] | 256 | recipe_name = pnstripped[0] |
309 | 257 | ||
310 | bb.note("Recipe: %s" % recipe_name) | 258 | bb.note("Recipe: %s" % recipe_name) |
311 | tmp = localdata.getVar('DISTRO_PN_ALIAS', True) | ||
312 | 259 | ||
313 | distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) | 260 | distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) |
314 | 261 | tmp = localdata.getVar('DISTRO_PN_ALIAS', True) or "" | |
315 | if tmp: | 262 | for str in tmp.split(): |
316 | list = tmp.split(' ') | 263 | if str and str.find("=") == -1 and distro_exceptions[str]: |
317 | for str in list: | 264 | matching_distros.append(str) |
318 | if str and str.find("=") == -1 and distro_exceptions[str]: | ||
319 | matching_distros.append(str) | ||
320 | 265 | ||
321 | distro_pn_aliases = {} | 266 | distro_pn_aliases = {} |
322 | if tmp: | 267 | for str in tmp.split(): |
323 | list = tmp.split(' ') | 268 | if "=" in str: |
324 | for str in list: | 269 | (dist, pn_alias) = str.split('=') |
325 | if str.find("=") != -1: | 270 | distro_pn_aliases[dist.strip().lower()] = pn_alias.strip() |
326 | (dist, pn_alias) = str.split('=') | 271 | |
327 | distro_pn_aliases[dist.strip().lower()] = pn_alias.strip() | ||
328 | |||
329 | for file in os.listdir(pkglst_dir): | 272 | for file in os.listdir(pkglst_dir): |
330 | (distro, distro_release) = file.split("-") | 273 | (distro, distro_release) = file.split("-") |
331 | f = open(os.path.join(pkglst_dir, file), "rb") | 274 | f = open(os.path.join(pkglst_dir, file), "r") |
332 | for line in f: | 275 | for line in f: |
333 | (pkg, section) = line.split(":") | 276 | (pkg, section) = line.split(":") |
334 | if distro.lower() in distro_pn_aliases: | 277 | if distro.lower() in distro_pn_aliases: |
@@ -341,16 +284,12 @@ def compare_in_distro_packages_list(distro_check_dir, d): | |||
341 | break | 284 | break |
342 | f.close() | 285 | f.close() |
343 | 286 | ||
344 | 287 | for item in tmp.split(): | |
345 | if tmp != None: | 288 | matching_distros.append(item) |
346 | list = tmp.split(' ') | ||
347 | for item in list: | ||
348 | matching_distros.append(item) | ||
349 | bb.note("Matching: %s" % matching_distros) | 289 | bb.note("Matching: %s" % matching_distros) |
350 | return matching_distros | 290 | return matching_distros |
351 | 291 | ||
352 | def create_log_file(d, logname): | 292 | def create_log_file(d, logname): |
353 | import subprocess | ||
354 | logpath = d.getVar('LOG_DIR', True) | 293 | logpath = d.getVar('LOG_DIR', True) |
355 | bb.utils.mkdirhier(logpath) | 294 | bb.utils.mkdirhier(logpath) |
356 | logfn, logsuffix = os.path.splitext(logname) | 295 | logfn, logsuffix = os.path.splitext(logname) |
@@ -359,7 +298,7 @@ def create_log_file(d, logname): | |||
359 | slogfile = os.path.join(logpath, logname) | 298 | slogfile = os.path.join(logpath, logname) |
360 | if os.path.exists(slogfile): | 299 | if os.path.exists(slogfile): |
361 | os.remove(slogfile) | 300 | os.remove(slogfile) |
362 | subprocess.call("touch %s" % logfile, shell=True) | 301 | open(logfile, 'w+').close() |
363 | os.symlink(logfile, slogfile) | 302 | os.symlink(logfile, slogfile) |
364 | d.setVar('LOG_FILE', logfile) | 303 | d.setVar('LOG_FILE', logfile) |
365 | return logfile | 304 | return logfile |
@@ -371,8 +310,8 @@ def save_distro_check_result(result, datetime, result_file, d): | |||
371 | if not logdir: | 310 | if not logdir: |
372 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") | 311 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") |
373 | return | 312 | return |
374 | if not os.path.isdir(logdir): | 313 | bb.utils.mkdirhier(logdir) |
375 | os.makedirs(logdir) | 314 | |
376 | line = pn | 315 | line = pn |
377 | for i in result: | 316 | for i in result: |
378 | line = line + "," + i | 317 | line = line + "," + i |