diff options
author | Nitin A Kamble <nitin.a.kamble@intel.com> | 2010-05-27 12:18:23 -0700 |
---|---|---|
committer | Richard Purdie <rpurdie@linux.intel.com> | 2010-06-02 14:06:02 +0100 |
commit | 5ae3f6553141aecce3b98e0f06e4b99f36764917 (patch) | |
tree | 2fe1765ea1bcf00c112794514b62cf1cfa2c76fa /meta/lib/oe | |
parent | 8514bcf5f857db19789b6f0f88300609622a98fe (diff) | |
download | poky-5ae3f6553141aecce3b98e0f06e4b99f36764917.tar.gz |
do_distro_check: Recipe exists in other distros?
This adds a new task (distro_check) for each recipe.
The task generates the source package list for Fedora OpenSuSE
Ubuntu Debian & Mandriva Linux distros.
As one recipe or source package can generate multiple target packages
the recipe name is compared with the source package name list of LInux
distributions.
Thread locking is used to avoid multiple threads racing for the
package list update.
Then the recipe name (PN) is checked if it exists in the package
list of distros. And if the DISTRO_PN_ALIAS then it is used to copmare
pacakge_name instead of the PN variable. Just for example the
DISTRO_PN_ALIAS can be defined in the recipe (.bb) files like this
In the file xset_1.0.4.bb:
DISTRO_PN_ALIAS = "Fedora=xorg-x11-server-utils;\
Ubuntu=x11-xserver-utils; Debian=x11-xserver-utils;Opensuse=xorg-x11"
The final results are stored in the tmp/log/distro_check-${DATETIME}.result
file.
FYI this command will generate the results for all recipies:
bitbake world -f -c distro_check
Signed-off-by: Nitin A Kamble <nitin.a.kamble@intel.com>
Diffstat (limited to 'meta/lib/oe')
-rw-r--r-- | meta/lib/oe/distro_check.py | 298 |
1 files changed, 298 insertions, 0 deletions
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py new file mode 100644 index 0000000000..189f5ef20d --- /dev/null +++ b/meta/lib/oe/distro_check.py | |||
@@ -0,0 +1,298 @@ | |||
1 | def get_links_from_url(url): | ||
2 | "Return all the href links found on the web location" | ||
3 | |||
4 | import urllib, sgmllib | ||
5 | |||
6 | class LinksParser(sgmllib.SGMLParser): | ||
7 | def parse(self, s): | ||
8 | "Parse the given string 's'." | ||
9 | self.feed(s) | ||
10 | self.close() | ||
11 | |||
12 | def __init__(self, verbose=0): | ||
13 | "Initialise an object passing 'verbose' to the superclass." | ||
14 | sgmllib.SGMLParser.__init__(self, verbose) | ||
15 | self.hyperlinks = [] | ||
16 | |||
17 | def start_a(self, attributes): | ||
18 | "Process a hyperlink and its 'attributes'." | ||
19 | for name, value in attributes: | ||
20 | if name == "href": | ||
21 | self.hyperlinks.append(value.strip('/')) | ||
22 | |||
23 | def get_hyperlinks(self): | ||
24 | "Return the list of hyperlinks." | ||
25 | return self.hyperlinks | ||
26 | |||
27 | sock = urllib.urlopen(url) | ||
28 | webpage = sock.read() | ||
29 | sock.close() | ||
30 | |||
31 | linksparser = LinksParser() | ||
32 | linksparser.parse(webpage) | ||
33 | return linksparser.get_hyperlinks() | ||
34 | |||
35 | def find_latest_numeric_release(url): | ||
36 | "Find the latest listed numeric release on the given url" | ||
37 | max=0 | ||
38 | maxstr="" | ||
39 | for link in get_links_from_url(url): | ||
40 | try: | ||
41 | release = float(link) | ||
42 | except: | ||
43 | release = 0 | ||
44 | if release > max: | ||
45 | max = release | ||
46 | maxstr = link | ||
47 | return maxstr | ||
48 | |||
49 | def is_src_rpm(name): | ||
50 | "Check if the link is pointing to a src.rpm file" | ||
51 | if name[-8:] == ".src.rpm": | ||
52 | return True | ||
53 | else: | ||
54 | return False | ||
55 | |||
56 | def package_name_from_srpm(srpm): | ||
57 | "Strip out the package name from the src.rpm filename" | ||
58 | strings = srpm.split('-') | ||
59 | package_name = strings[0] | ||
60 | for i in range(1, len (strings) - 1): | ||
61 | str = strings[i] | ||
62 | if not str[0].isdigit(): | ||
63 | package_name += '-' + str | ||
64 | return package_name | ||
65 | |||
66 | def clean_package_list(package_list): | ||
67 | "Removes multiple entries of packages and sorts the list" | ||
68 | set = {} | ||
69 | map(set.__setitem__, package_list, []) | ||
70 | return set.keys() | ||
71 | |||
72 | |||
73 | def get_latest_released_fedora_source_package_list(): | ||
74 | "Returns list of all the name os packages in the latest fedora distro" | ||
75 | latest = find_latest_numeric_release("http://download.fedora.redhat.com/pub/fedora/linux/releases/") | ||
76 | |||
77 | url = "http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest | ||
78 | links = get_links_from_url(url) | ||
79 | url = "http://download.fedora.redhat.com/pub/fedora/linux/updates/%s/SRPMS/" % latest | ||
80 | links += get_links_from_url(url) | ||
81 | |||
82 | srpms = filter(is_src_rpm, links) | ||
83 | |||
84 | package_names = map(package_name_from_srpm, srpms) | ||
85 | package_list=clean_package_list(package_names) | ||
86 | |||
87 | return latest, package_list | ||
88 | |||
89 | def get_latest_released_opensuse_source_package_list(): | ||
90 | "Returns list of all the name os packages in the latest opensuse distro" | ||
91 | latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/") | ||
92 | |||
93 | url = "http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest | ||
94 | links = get_links_from_url(url) | ||
95 | url = "http://download.opensuse.org/update/%s/rpm/src/" % latest | ||
96 | links += get_links_from_url(url) | ||
97 | srpms = filter(is_src_rpm, links) | ||
98 | |||
99 | package_names = map(package_name_from_srpm, srpms) | ||
100 | package_list=clean_package_list(package_names) | ||
101 | return latest, package_list | ||
102 | |||
103 | def get_latest_released_mandriva_source_package_list(): | ||
104 | "Returns list of all the name os packages in the latest mandriva distro" | ||
105 | latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/") | ||
106 | url = "http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest | ||
107 | links = get_links_from_url(url) | ||
108 | url = "http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest | ||
109 | links += get_links_from_url(url) | ||
110 | |||
111 | srpms = filter(is_src_rpm, links) | ||
112 | |||
113 | package_names = map(package_name_from_srpm, srpms) | ||
114 | package_list=clean_package_list(package_names) | ||
115 | return latest, package_list | ||
116 | |||
117 | def find_latest_debian_release(url): | ||
118 | "Find the latest listed debian release on the given url" | ||
119 | |||
120 | releases = [] | ||
121 | for link in get_links_from_url(url): | ||
122 | if link[:6] == "Debian": | ||
123 | if ';' not in link: | ||
124 | releases.append(link) | ||
125 | releases.sort() | ||
126 | try: | ||
127 | return releases.pop()[6:] | ||
128 | except: | ||
129 | return "_NotFound_" | ||
130 | |||
131 | def get_debian_style_source_package_list(url): | ||
132 | "Return the list of package-names stored in the debian style Sources.gz file" | ||
133 | import urllib | ||
134 | sock = urllib.urlopen(url) | ||
135 | import tempfile | ||
136 | tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='poky.', suffix='.tmp', delete=False) | ||
137 | tmpfilename=tmpfile.name | ||
138 | tmpfile.write(sock.read()) | ||
139 | sock.close() | ||
140 | tmpfile.close() | ||
141 | import gzip | ||
142 | |||
143 | f = gzip.open(tmpfilename) | ||
144 | package_names = [] | ||
145 | for line in f: | ||
146 | if line[:9] == "Package: ": | ||
147 | package_names.append(line[9:-1]) # Also strip the '\n' at the end | ||
148 | os.unlink(tmpfilename) | ||
149 | |||
150 | return package_names | ||
151 | |||
152 | def get_latest_released_debian_source_package_list(): | ||
153 | "Returns list of all the name os packages in the latest debian distro" | ||
154 | latest = find_latest_debian_release("ftp://ftp.debian.org/debian/dists/") | ||
155 | url = "ftp://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" | ||
156 | package_names = get_debian_style_source_package_list(url) | ||
157 | url = "ftp://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" | ||
158 | package_names += get_debian_style_source_package_list(url) | ||
159 | package_list=clean_package_list(package_names) | ||
160 | return latest, package_list | ||
161 | |||
162 | def find_latest_ubuntu_release(url): | ||
163 | "Find the latest listed ubuntu release on the given url" | ||
164 | url += "?C=M;O=D" # Descending Sort by Last Modified | ||
165 | for link in get_links_from_url(url): | ||
166 | if link[-8:] == "-updates": | ||
167 | return link[:-8] | ||
168 | return "_NotFound_" | ||
169 | |||
170 | def get_latest_released_ubuntu_source_package_list(): | ||
171 | "Returns list of all the name os packages in the latest ubuntu distro" | ||
172 | latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/") | ||
173 | url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest | ||
174 | package_names = get_debian_style_source_package_list(url) | ||
175 | url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest | ||
176 | package_names += get_debian_style_source_package_list(url) | ||
177 | package_list=clean_package_list(package_names) | ||
178 | return latest, package_list | ||
179 | |||
180 | def create_distro_packages_list(distro_check_dir): | ||
181 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | ||
182 | if not os.path.isdir (pkglst_dir): | ||
183 | os.makedirs(pkglst_dir) | ||
184 | # first clear old stuff | ||
185 | for file in os.listdir(pkglst_dir): | ||
186 | os.unlink(os.path.join(pkglst_dir, file)) | ||
187 | |||
188 | per_distro_functions = [["Fedora", get_latest_released_fedora_source_package_list], | ||
189 | ["OpenSuSE", get_latest_released_opensuse_source_package_list], | ||
190 | ["Ubuntu", get_latest_released_ubuntu_source_package_list], | ||
191 | ["Debian", get_latest_released_debian_source_package_list], | ||
192 | ["Mandriva", get_latest_released_mandriva_source_package_list]] | ||
193 | |||
194 | from datetime import datetime | ||
195 | begin = datetime.now() | ||
196 | for distro in per_distro_functions: | ||
197 | name = distro[0] | ||
198 | release, package_list = distro[1]() | ||
199 | bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list))) | ||
200 | package_list_file = os.path.join(pkglst_dir, name + "-" + release) | ||
201 | f = open(package_list_file, "w+b") | ||
202 | for pkg in package_list: | ||
203 | f.write(pkg + "\n") | ||
204 | f.close() | ||
205 | end = datetime.now() | ||
206 | delta = end - begin | ||
207 | bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds) | ||
208 | |||
209 | def update_distro_data(distro_check_dir, datetime): | ||
210 | """ | ||
211 | If distro packages list data is old then rebuild it. | ||
212 | The operations has to be protected by a lock so that | ||
213 | only one thread performes it at a time. | ||
214 | """ | ||
215 | if not os.path.isdir (distro_check_dir): | ||
216 | try: | ||
217 | bb.note ("Making new directory: %s" % distro_check_dir) | ||
218 | os.makedirs (distro_check_dir) | ||
219 | except OSError: | ||
220 | raise Exception('Unable to create directory %s' % (distro_check_dir)) | ||
221 | |||
222 | |||
223 | datetime_file = os.path.join(distro_check_dir, "build_datetime") | ||
224 | saved_datetime = "_invalid_" | ||
225 | import fcntl | ||
226 | try: | ||
227 | if not os.path.exists(datetime_file): | ||
228 | open(datetime_file, 'w+b').close() # touch the file so that the next open won't fail | ||
229 | |||
230 | f = open(datetime_file, "r+b") | ||
231 | fcntl.lockf(f, fcntl.LOCK_EX) | ||
232 | saved_datetime = f.read() | ||
233 | if saved_datetime != datetime: | ||
234 | bb.note("The build datetime did not match: saved:%s current:%s" % (saved_datetime, datetime)) | ||
235 | bb.note("Regenerating distro package lists") | ||
236 | create_distro_packages_list(distro_check_dir) | ||
237 | f.seek(0) | ||
238 | f.write(datetime) | ||
239 | |||
240 | except OSError: | ||
241 | raise Exception('Unable to read/write this file: %s' % (datetime_file)) | ||
242 | finally: | ||
243 | fcntl.lockf(f, fcntl.LOCK_UN) | ||
244 | f.close() | ||
245 | |||
246 | def compare_in_distro_packages_list(distro_check_dir, d): | ||
247 | if not os.path.isdir(distro_check_dir): | ||
248 | raise Exception("compare_in_distro_packages_list: invalid distro_check_dir passed") | ||
249 | |||
250 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | ||
251 | matching_distros = [] | ||
252 | recipe_name = bb.data.getVar('PN', d, True) | ||
253 | tmp = bb.data.getVar('DISTRO_PN_ALIAS', d, True) | ||
254 | distro_pn_aliases = {} | ||
255 | if tmp: | ||
256 | list = tmp.split(';') | ||
257 | for str in list: | ||
258 | (dist, pn_alias) = str.split('=') | ||
259 | distro_pn_aliases[dist.strip().lower()] = pn_alias.strip() | ||
260 | |||
261 | for file in os.listdir(pkglst_dir): | ||
262 | (distro, distro_release) = file.split("-") | ||
263 | f = open(os.path.join(pkglst_dir, file), "rb") | ||
264 | for pkg in f: | ||
265 | if distro.lower() in distro_pn_aliases: | ||
266 | pn = distro_pn_aliases[distro.lower()] | ||
267 | else: | ||
268 | pn = recipe_name | ||
269 | if pn == pkg[:-1]: # strip the \n at the end | ||
270 | matching_distros.append(distro) | ||
271 | f.close() | ||
272 | break | ||
273 | f.close() | ||
274 | return matching_distros | ||
275 | |||
276 | def save_distro_check_result(result, datetime, d): | ||
277 | pn = bb.data.getVar('PN', d, True) | ||
278 | logdir = bb.data.getVar('LOG_DIR', d, True) | ||
279 | if not logdir: | ||
280 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") | ||
281 | return | ||
282 | if not os.path.isdir(logdir): | ||
283 | os.makedirs(logdir) | ||
284 | result_file = os.path.join(logdir, "distro_check-" + datetime + ".results") | ||
285 | line = pn + " : " | ||
286 | for i in result: | ||
287 | line = line + i + ", " | ||
288 | if result: | ||
289 | line = line[:-2] # Take out the comma at the end of line | ||
290 | if not os.path.exists(result_file): | ||
291 | open(result_file, 'w+b').close() # touch the file so that the next open won't fail | ||
292 | f = open(result_file, "a+b") | ||
293 | import fcntl | ||
294 | fcntl.lockf(f, fcntl.LOCK_EX) | ||
295 | f.seek(0, os.SEEK_END) # seek to the end of file | ||
296 | f.write(line + "\n") | ||
297 | fcntl.lockf(f, fcntl.LOCK_UN) | ||
298 | f.close() | ||