summaryrefslogtreecommitdiffstats
path: root/meta/classes/utility-tasks.bbclass
diff options
context:
space:
mode:
authorKevin Tian <kevin.tian@intel.com>2010-05-30 19:14:24 +0800
committerRichard Purdie <rpurdie@linux.intel.com>2010-06-03 12:23:22 +0100
commit3161c9b15a39e552b383c704851edead8435365c (patch)
treef04cfa1b1e91eb7b7779f3920f4b692ffae3bf59 /meta/classes/utility-tasks.bbclass
parent50224b5afa19c9e68fc36d9a44f3f39e8f0da225 (diff)
downloadpoky-3161c9b15a39e552b383c704851edead8435365c.tar.gz
utility-tasks.bbclass: automatic upstream version check
Automatic upstream version check can be triggered by "bitbake xxx -c checkpkg". The rationale behind is to find a automatic way for version comparison between current in-use one and upstream progress. The tricky thing is how to find out useful version string from mass diverse information. Fortunately now it mostly work for wget based protocol, except some sites (e.g. sourceforge) not providing a directory service. Repo (git/svn/cvs) based protocols are handled in a very simple manner, by always tagging as update required. This will be further improved later. Use 'world' target to retrieve a full version check for all active recipes. Along with version check, other package information is also collected from existing fields in .bb file Signed-off-by Kevin Tian <kevin.tian@intel.com>
Diffstat (limited to 'meta/classes/utility-tasks.bbclass')
-rw-r--r--meta/classes/utility-tasks.bbclass331
1 files changed, 331 insertions, 0 deletions
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass
index f87c9d50d8..c3001ecfc8 100644
--- a/meta/classes/utility-tasks.bbclass
+++ b/meta/classes/utility-tasks.bbclass
@@ -44,6 +44,337 @@ python do_rebuild() {
44# bb.build.exec_func('do_clean', d) 44# bb.build.exec_func('do_clean', d)
45#} 45#}
46 46
47addtask checkpkg
48do_checkpkg[nostamp] = "1"
49python do_checkpkg() {
50 import sys
51 import re
52 import tempfile
53
54 """
55 sanity check to ensure same name and type. Match as many patterns as possible
56 such as:
57 gnome-common-2.20.0.tar.gz (most common format)
58 gtk+-2.90.1.tar.gz
59 xf86-intput-synaptics-12.6.9.tar.gz
60 dri2proto-2.3.tar.gz
61 blktool_4.orig.tar.gz
62 libid3tag-0.15.1b.tar.gz
63 unzip552.tar.gz
64 icu4c-3_6-src.tgz
65 genext2fs_1.3.orig.tar.gz
66 gst-fluendo-mp3
67 """
68 prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]" # match most patterns which uses "-" as separator to version digits
69 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz
70 prefix = "(%s|%s)" % (prefix1, prefix2)
71 suffix = "(tar\.gz|tgz|tar\.bz2|zip)"
72 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2")
73
74 sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix
75 sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix)
76
77 def parse_inter(s):
78 m = re.search(sinterstr, s)
79 if not m:
80 return None
81 else:
82 return (m.group('name'), m.group('ver'), "")
83
84 def parse_dir(s):
85 m = re.search(sdirstr, s)
86 if not m:
87 return None
88 else:
89 return (m.group('name'), m.group('ver'), m.group('type'))
90
91 """
92 Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
93 purpose. PE is cleared in comparison as it's not for build, and PV is cleared too
94 for simplicity as it's somehow difficult to get from various upstream format
95 """
96 def __vercmp(old, new):
97 (on, ov, ot) = old
98 (en, ev, et) = new
99 if on != en or (et and et not in suffixtuple):
100 return 0
101
102 ov = re.search("\d+[^a-zA-Z]+", ov).group()
103 ev = re.search("\d+[^a-zA-Z]+", ev).group()
104 return bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))
105
106 """
107 wrapper for fetch upstream directory info
108 'url' - upstream link customized by regular expression
109 'd' - database
110 'tmpf' - tmpfile for fetcher output
111 We don't want to exit whole build due to one recipe error. So handle all exceptions
112 gracefully w/o leaking to outer.
113 """
114 def internal_fetch_wget(url, d, tmpf):
115 status = "ErrFetchUnknown"
116 try:
117 """
118 Clear internal url cache as it's a temporary check. Not doing so will have
119 bitbake check url multiple times when looping through a single url
120 """
121 fn = bb.data.getVar('FILE', d, 1)
122 bb.fetch.urldata_cache[fn] = {}
123 bb.fetch.init([url], d)
124 except bb.fetch.NoMethodError:
125 status = "ErrFetchNoMethod"
126 except:
127 status = "ErrInitUrlUnknown"
128 else:
129 """
130 To avoid impacting bitbake build engine, this trick is required for reusing bitbake
131 interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR}
132 while we don't want to pollute that place. So bb.fetch.checkstatus() is borrowed here
133 which is designed for check purpose but we override check command for our own purpose
134 """
135 ld = bb.data.createCopy(d)
136 bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s '${URI}'" \
137 % tmpf.name, d)
138 bb.data.update_data(ld)
139
140 try:
141 bb.fetch.checkstatus(ld)
142 except bb.fetch.MissingParameterError:
143 status = "ErrMissParam"
144 except bb.fetch.FetchError:
145 status = "ErrFetch"
146 except bb.fetch.MD5SumError:
147 status = "ErrMD5Sum"
148 except:
149 status = "ErrFetchUnknown"
150 else:
151 status = "SUCC"
152 return status
153
154 """
155 Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz",
156 'url' - upstream link customized by regular expression
157 'd' - database
158 'curver' - current version
159 Return new version if success, or else error in "Errxxxx" style
160 """
161 def check_new_dir(url, curver, d):
162 pn = bb.data.getVar('PN', d, 1)
163 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
164 status = internal_fetch_wget(url, d, f)
165 fhtml = f.read()
166
167 if status == "SUCC" and len(fhtml):
168 newver = parse_inter(curver)
169
170 """
171 match "*4.1/">*4.1/ where '*' matches chars
172 N.B. add package name, only match for digits
173 """
174 m = re.search("^%s" % prefix, curver)
175 if m:
176 s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group()
177 else:
178 s = "(\d+[\.\-_])+\d+/?"
179
180 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
181 reg = re.compile(searchstr)
182
183 valid = 0
184 for line in fhtml.split("\n"):
185 if line.find(curver) >= 0:
186 valid = 1
187
188 m = reg.search(line)
189 if m:
190 ver = m.group().split("\"")[1]
191 ver = ver.strip("/")
192 ver = parse_inter(ver)
193 if ver and __vercmp(newver, ver) < 0:
194 newver = ver
195
196 """Expect a match for curver in directory list, or else it indicates unknown format"""
197 if not valid:
198 status = "ErrParseInterDir"
199 else:
200 """rejoin the path name"""
201 status = newver[0] + newver[1]
202 elif not len(fhtml):
203 status = "ErrHostNoDir"
204
205 f.close()
206 if status != "ErrHostNoDir" and re.match("Err", status):
207 logpath = bb.data.getVar('LOG_DIR', d, 1)
208 os.system("cp %s %s/" % (f.name, logpath))
209 os.unlink(f.name)
210 return status
211
212 """
213 Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz",
214 'url' - upstream link customized by regular expression
215 'd' - database
216 'curname' - current package name
217 Return new version if success, or else error in "Errxxxx" style
218 """
219 def check_new_version(url, curname, d):
220 pn = bb.data.getVar('PN', d, 1)
221 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
222 status = internal_fetch_wget(url, d, f)
223 fhtml = f.read()
224
225 if status == "SUCC" and len(fhtml):
226 newver = parse_dir(curname)
227
228 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
229 pn1 = re.search("^%s" % prefix, curname).group()
230 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
231 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
232 reg = re.compile(searchstr)
233
234 valid = 0
235 for line in fhtml.split("\n"):
236 m = reg.search(line)
237 if m:
238 valid = 1
239 ver = m.group().split("\"")[1].split("/")[-1]
240 ver = parse_dir(ver)
241 if ver and __vercmp(newver, ver) < 0:
242 newver = ver
243
244 """Expect a match for curver in directory list, or else it indicates unknown format"""
245 if not valid:
246 status = "ErrParseDir"
247 else:
248 """newver still contains a full package name string"""
249 status = re.search("(\d+[.\-_])*\d+", newver[1]).group()
250 elif not len(fhtml):
251 status = "ErrHostNoDir"
252
253 f.close()
254 """if host hasn't directory information, no need to save tmp file"""
255 if status != "ErrHostNoDir" and re.match("Err", status):
256 logpath = bb.data.getVar('LOG_DIR', d, 1)
257 os.system("cp %s %s/" % (f.name, logpath))
258 os.unlink(f.name)
259 return status
260
261 """first check whether a uri is provided"""
262 src_uri = bb.data.getVar('SRC_URI', d, 1)
263 if not src_uri:
264 return
265
266 """initialize log files."""
267 logpath = bb.data.getVar('LOG_DIR', d, 1)
268 bb.utils.mkdirhier(logpath)
269 logfile = os.path.join(logpath, "poky_pkg_info.log.%s" % bb.data.getVar('DATETIME', d, 1))
270 if not os.path.exists(logfile):
271 slogfile = os.path.join(logpath, "poky_pkg_info.log")
272 if os.path.exists(slogfile):
273 os.remove(slogfile)
274 os.system("touch %s" % logfile)
275 os.symlink(logfile, slogfile)
276
277 """generate package information from .bb file"""
278 pname = bb.data.getVar('PN', d, 1)
279 pcurver = bb.data.getVar('PV', d, 1)
280 pdesc = bb.data.getVar('DESCRIPTION', d, 1)
281 pgrp = bb.data.getVar('SECTION', d, 1)
282
283 found = 0
284 for uri in src_uri.split():
285 m = re.compile('(?P<type>[^:]*)').match(uri)
286 if not m:
287 raise MalformedUrl(uri)
288 elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
289 found = 1
290 pproto = m.group('type')
291 break
292 if not found:
293 pproto = "file"
294 pupver = "N/A"
295 pstatus = "ErrUnknown"
296
297 (type, host, path, user, pswd, parm) = bb.decodeurl(uri)
298 if type in ['http', 'https', 'ftp']:
299 newver = pcurver
300 altpath = path
301 dirver = "-"
302 curname = "-"
303
304 """
305 match version number amid the path, such as "5.7" in:
306 http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
307 N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P
308 """
309 m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path)
310 if m:
311 altpath = path.split(m.group())[0]
312 dirver = m.group().strip("/")
313
314 """use new path and remove param. for wget only param is md5sum"""
315 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
316
317 newver = check_new_dir(alturi, dirver, d)
318 altpath = path
319 if not re.match("Err", newver) and dirver != newver:
320 altpath = altpath.replace(dirver, newver, 1)
321
322 """Now try to acquire all remote files in current directory"""
323 if not re.match("Err", newver):
324 curname = altpath.split("/")[-1]
325
326 """get remote name by skipping pacakge name"""
327 m = re.search(r"/.*/", altpath)
328 if not m:
329 altpath = "/"
330 else:
331 altpath = m.group()
332
333 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
334 newver = check_new_version(alturi, curname, d)
335 if not re.match("Err", newver):
336 pupver = newver
337 if pupver != pcurver:
338 pstatus = "UPDATE"
339 else:
340 pstatus = "MATCH"
341
342 if re.match("Err", newver):
343 pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname
344 elif type == 'git':
345 """N.B. Now hardcode UPDATE for git/svn/cvs."""
346 pupver = "master"
347 pstatus = "UPDATE"
348 elif type == 'svn':
349 pupver = "HEAD"
350 pstatus = "UPDATE"
351 elif type == 'cvs':
352 pupver = "HEAD"
353 pstatus = "UPDATE"
354 elif type == 'file':
355 """local file is always up-to-date"""
356 pupver = pcurver
357 pstatus = "MATCH"
358 else:
359 pstatus = "ErrUnsupportedProto"
360
361 if re.match("Err", pstatus):
362 pstatus += ":%s%s" % (host, path)
363 lf = bb.utils.lockfile(logfile + ".lock")
364 f = open(logfile, "a")
365 f.write("\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \
366 (pname, pgrp, pproto, pcurver, pupver, pstatus, pdesc))
367 f.close()
368 bb.utils.unlockfile(lf)
369}
370
371addtask checkpkgall after do_checkpkg
372do_checkpkgall[recrdeptask] = "do_checkpkg"
373do_checkpkgall[nostamp] = "1"
374do_checkpkgall() {
375 :
376}
377
47addtask checkuri 378addtask checkuri
48do_checkuri[nostamp] = "1" 379do_checkuri[nostamp] = "1"
49python do_checkuri() { 380python do_checkuri() {