summaryrefslogtreecommitdiffstats
path: root/meta/classes/utility-tasks.bbclass
diff options
context:
space:
mode:
authorDongxiao Xu <dongxiao.xu@intel.com>2010-11-22 14:02:37 +0800
committerSaul Wold <sgw@linux.intel.com>2010-11-22 10:38:15 -0800
commit53aff7d6775eb1c2c8f419f325b91c062d85eed5 (patch)
tree295959086735ab12391444999e8a5dd9ca68d44a /meta/classes/utility-tasks.bbclass
parent836b290732b67ff3de27229f85290c953327f345 (diff)
downloadpoky-53aff7d6775eb1c2c8f419f325b91c062d85eed5.tar.gz
utility-tasks.bbclass: Move distro related tasks to distrodata.bbclass
Most of the d.keys() used in file parsing are variables in distro_tracking_fields.inc, which are not used in normal build. Therefore remove the inclusion of distro_tracking_fields.inc from poky.conf. Besides, move distro related tasks to distrodata.bbclass, which includes that tracking field file. By this change, the file parsing time could save about 25%. Signed-off-by: Dongxiao Xu <dongxiao.xu@intel.com>
Diffstat (limited to 'meta/classes/utility-tasks.bbclass')
-rw-r--r--meta/classes/utility-tasks.bbclass442
1 files changed, 0 insertions, 442 deletions
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass
index 205a206baf..3ab37fa6b9 100644
--- a/meta/classes/utility-tasks.bbclass
+++ b/meta/classes/utility-tasks.bbclass
@@ -47,396 +47,6 @@ python do_rebuild() {
47# bb.build.exec_func('do_clean', d) 47# bb.build.exec_func('do_clean', d)
48#} 48#}
49 49
50addtask checkpkg
51do_checkpkg[nostamp] = "1"
52python do_checkpkg() {
53 import sys
54 import re
55 import tempfile
56
57 """
58 sanity check to ensure same name and type. Match as many patterns as possible
59 such as:
60 gnome-common-2.20.0.tar.gz (most common format)
61 gtk+-2.90.1.tar.gz
62 xf86-intput-synaptics-12.6.9.tar.gz
63 dri2proto-2.3.tar.gz
64 blktool_4.orig.tar.gz
65 libid3tag-0.15.1b.tar.gz
66 unzip552.tar.gz
67 icu4c-3_6-src.tgz
68 genext2fs_1.3.orig.tar.gz
69 gst-fluendo-mp3
70 """
71 prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]" # match most patterns which uses "-" as separator to version digits
72 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz
73 prefix = "(%s|%s)" % (prefix1, prefix2)
74 suffix = "(tar\.gz|tgz|tar\.bz2|zip)"
75 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2")
76
77 sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix
78 sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix)
79
80 def parse_inter(s):
81 m = re.search(sinterstr, s)
82 if not m:
83 return None
84 else:
85 return (m.group('name'), m.group('ver'), "")
86
87 def parse_dir(s):
88 m = re.search(sdirstr, s)
89 if not m:
90 return None
91 else:
92 return (m.group('name'), m.group('ver'), m.group('type'))
93
94 """
95 Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
96 purpose. PE is cleared in comparison as it's not for build, and PV is cleared too
97 for simplicity as it's somehow difficult to get from various upstream format
98 """
99 def __vercmp(old, new):
100 (on, ov, ot) = old
101 (en, ev, et) = new
102 if on != en or (et and et not in suffixtuple):
103 return 0
104
105 ov = re.search("\d+[^a-zA-Z]+", ov).group()
106 ev = re.search("\d+[^a-zA-Z]+", ev).group()
107 return bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))
108
109 """
110 wrapper for fetch upstream directory info
111 'url' - upstream link customized by regular expression
112 'd' - database
113 'tmpf' - tmpfile for fetcher output
114 We don't want to exit whole build due to one recipe error. So handle all exceptions
115 gracefully w/o leaking to outer.
116 """
117 def internal_fetch_wget(url, d, tmpf):
118 status = "ErrFetchUnknown"
119 try:
120 """
121 Clear internal url cache as it's a temporary check. Not doing so will have
122 bitbake check url multiple times when looping through a single url
123 """
124 fn = bb.data.getVar('FILE', d, 1)
125 bb.fetch.urldata_cache[fn] = {}
126 bb.fetch.init([url], d)
127 except bb.fetch.NoMethodError:
128 status = "ErrFetchNoMethod"
129 except:
130 status = "ErrInitUrlUnknown"
131 else:
132 """
133 To avoid impacting bitbake build engine, this trick is required for reusing bitbake
134 interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR}
135 while we don't want to pollute that place. So bb.fetch.checkstatus() is borrowed here
136 which is designed for check purpose but we override check command for our own purpose
137 """
138 ld = bb.data.createCopy(d)
139 bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s '${URI}'" \
140 % tmpf.name, d)
141 bb.data.update_data(ld)
142
143 try:
144 bb.fetch.checkstatus(ld)
145 except bb.fetch.MissingParameterError:
146 status = "ErrMissParam"
147 except bb.fetch.FetchError:
148 status = "ErrFetch"
149 except bb.fetch.MD5SumError:
150 status = "ErrMD5Sum"
151 except:
152 status = "ErrFetchUnknown"
153 else:
154 status = "SUCC"
155 return status
156
157 """
158 Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz",
159 'url' - upstream link customized by regular expression
160 'd' - database
161 'curver' - current version
162 Return new version if success, or else error in "Errxxxx" style
163 """
164 def check_new_dir(url, curver, d):
165 pn = bb.data.getVar('PN', d, 1)
166 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
167 status = internal_fetch_wget(url, d, f)
168 fhtml = f.read()
169
170 if status == "SUCC" and len(fhtml):
171 newver = parse_inter(curver)
172
173 """
174 match "*4.1/">*4.1/ where '*' matches chars
175 N.B. add package name, only match for digits
176 """
177 m = re.search("^%s" % prefix, curver)
178 if m:
179 s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group()
180 else:
181 s = "(\d+[\.\-_])+\d+/?"
182
183 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
184 reg = re.compile(searchstr)
185
186 valid = 0
187 for line in fhtml.split("\n"):
188 if line.find(curver) >= 0:
189 valid = 1
190
191 m = reg.search(line)
192 if m:
193 ver = m.group().split("\"")[1]
194 ver = ver.strip("/")
195 ver = parse_inter(ver)
196 if ver and __vercmp(newver, ver) < 0:
197 newver = ver
198
199 """Expect a match for curver in directory list, or else it indicates unknown format"""
200 if not valid:
201 status = "ErrParseInterDir"
202 else:
203 """rejoin the path name"""
204 status = newver[0] + newver[1]
205 elif not len(fhtml):
206 status = "ErrHostNoDir"
207
208 f.close()
209 if status != "ErrHostNoDir" and re.match("Err", status):
210 logpath = bb.data.getVar('LOG_DIR', d, 1)
211 os.system("cp %s %s/" % (f.name, logpath))
212 os.unlink(f.name)
213 return status
214
215 """
216 Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz",
217 'url' - upstream link customized by regular expression
218 'd' - database
219 'curname' - current package name
220 Return new version if success, or else error in "Errxxxx" style
221 """
222 def check_new_version(url, curname, d):
223 """possible to have no version in pkg name, such as spectrum-fw"""
224 if not re.search("\d+", curname):
225 return pcurver
226 pn = bb.data.getVar('PN', d, 1)
227 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
228 status = internal_fetch_wget(url, d, f)
229 fhtml = f.read()
230
231 if status == "SUCC" and len(fhtml):
232 newver = parse_dir(curname)
233
234 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
235 pn1 = re.search("^%s" % prefix, curname).group()
236 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
237 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
238 reg = re.compile(searchstr)
239
240 valid = 0
241 for line in fhtml.split("\n"):
242 m = reg.search(line)
243 if m:
244 valid = 1
245 ver = m.group().split("\"")[1].split("/")[-1]
246 ver = parse_dir(ver)
247 if ver and __vercmp(newver, ver) < 0:
248 newver = ver
249
250 """Expect a match for curver in directory list, or else it indicates unknown format"""
251 if not valid:
252 status = "ErrParseDir"
253 else:
254 """newver still contains a full package name string"""
255 status = re.search("(\d+[.\-_])*\d+", newver[1]).group()
256 elif not len(fhtml):
257 status = "ErrHostNoDir"
258
259 f.close()
260 """if host hasn't directory information, no need to save tmp file"""
261 if status != "ErrHostNoDir" and re.match("Err", status):
262 logpath = bb.data.getVar('LOG_DIR', d, 1)
263 os.system("cp %s %s/" % (f.name, logpath))
264 os.unlink(f.name)
265 return status
266
267 """first check whether a uri is provided"""
268 src_uri = bb.data.getVar('SRC_URI', d, 1)
269 if not src_uri:
270 return
271
272 """initialize log files."""
273 logpath = bb.data.getVar('LOG_DIR', d, 1)
274 bb.utils.mkdirhier(logpath)
275 logfile = os.path.join(logpath, "poky_pkg_info.log.%s" % bb.data.getVar('DATETIME', d, 1))
276 if not os.path.exists(logfile):
277 slogfile = os.path.join(logpath, "poky_pkg_info.log")
278 if os.path.exists(slogfile):
279 os.remove(slogfile)
280 os.system("touch %s" % logfile)
281 os.symlink(logfile, slogfile)
282
283 """generate package information from .bb file"""
284 pname = bb.data.getVar('PN', d, 1)
285 pdesc = bb.data.getVar('DESCRIPTION', d, 1)
286 pgrp = bb.data.getVar('SECTION', d, 1)
287
288 found = 0
289 for uri in src_uri.split():
290 m = re.compile('(?P<type>[^:]*)').match(uri)
291 if not m:
292 raise MalformedUrl(uri)
293 elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
294 found = 1
295 pproto = m.group('type')
296 break
297 if not found:
298 pproto = "file"
299 pupver = "N/A"
300 pstatus = "ErrUnknown"
301
302 (type, host, path, user, pswd, parm) = bb.decodeurl(uri)
303 if type in ['http', 'https', 'ftp']:
304 pcurver = bb.data.getVar('PV', d, 1)
305 else:
306 pcurver = bb.data.getVar("SRCREV", d, 1)
307
308 if type in ['http', 'https', 'ftp']:
309 newver = pcurver
310 altpath = path
311 dirver = "-"
312 curname = "-"
313
314 """
315 match version number amid the path, such as "5.7" in:
316 http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
317 N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P
318 """
319 m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path)
320 if m:
321 altpath = path.split(m.group())[0]
322 dirver = m.group().strip("/")
323
324 """use new path and remove param. for wget only param is md5sum"""
325 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
326
327 newver = check_new_dir(alturi, dirver, d)
328 altpath = path
329 if not re.match("Err", newver) and dirver != newver:
330 altpath = altpath.replace(dirver, newver, 1)
331
332 """Now try to acquire all remote files in current directory"""
333 if not re.match("Err", newver):
334 curname = altpath.split("/")[-1]
335
336 """get remote name by skipping pacakge name"""
337 m = re.search(r"/.*/", altpath)
338 if not m:
339 altpath = "/"
340 else:
341 altpath = m.group()
342
343 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
344 newver = check_new_version(alturi, curname, d)
345 if not re.match("Err", newver):
346 pupver = newver
347 if pupver != pcurver:
348 pstatus = "UPDATE"
349 else:
350 pstatus = "MATCH"
351
352 if re.match("Err", newver):
353 pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname
354 elif type == 'git':
355 if user:
356 gituser = user + '@'
357 else:
358 gituser = ""
359
360 if 'protocol' in parm:
361 gitproto = parm['protocol']
362 else:
363 gitproto = "rsync"
364
365 gitcmd = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path)
366 print gitcmd
367 ver = os.popen(gitcmd).read()
368 if ver and re.search("HEAD", ver):
369 pupver = ver.split("\t")[0]
370 if pcurver == pupver:
371 pstatus = "MATCH"
372 else:
373 pstatus = "UPDATE"
374 else:
375 pstatus = "ErrGitAccess"
376 elif type == 'svn':
377 options = []
378 if user:
379 options.append("--username %s" % user)
380 if pswd:
381 options.append("--password %s" % pswd)
382 svnproto = 'svn'
383 if 'proto' in parm:
384 svnproto = parm['proto']
385 if 'rev' in parm:
386 pcurver = parm['rev']
387
388 svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"])
389 print svncmd
390 svninfo = os.popen(svncmd).read()
391 for line in svninfo.split("\n"):
392 if re.search("^Last Changed Rev:", line):
393 pupver = line.split(" ")[-1]
394 if pcurver == pupver:
395 pstatus = "MATCH"
396 else:
397 pstatus = "UPDATE"
398
399 if re.match("Err", pstatus):
400 pstatus = "ErrSvnAccess"
401 elif type == 'cvs':
402 pupver = "HEAD"
403 pstatus = "UPDATE"
404 elif type == 'file':
405 """local file is always up-to-date"""
406 pupver = pcurver
407 pstatus = "MATCH"
408 else:
409 pstatus = "ErrUnsupportedProto"
410
411 if re.match("Err", pstatus):
412 pstatus += ":%s%s" % (host, path)
413
414 """Read from manual distro tracking fields as alternative"""
415 pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, 1)
416 if not pmver:
417 pmver = "N/A"
418 pmstatus = "ErrNoRecipeData"
419 else:
420 if pmver == pcurver:
421 pmstatus = "MATCH"
422 else:
423 pmstatus = "UPDATE"
424
425 lf = bb.utils.lockfile(logfile + ".lock")
426 f = open(logfile, "a")
427 f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \
428 (pname, pgrp, pproto, pcurver, pmver, pupver, pmstatus, pstatus, pdesc))
429 f.close()
430 bb.utils.unlockfile(lf)
431}
432
433addtask checkpkgall after do_checkpkg
434do_checkpkgall[recrdeptask] = "do_checkpkg"
435do_checkpkgall[nostamp] = "1"
436do_checkpkgall() {
437 :
438}
439
440addtask checkuri 50addtask checkuri
441do_checkuri[nostamp] = "1" 51do_checkuri[nostamp] = "1"
442python do_checkuri() { 52python do_checkuri() {
@@ -487,55 +97,3 @@ do_buildall[recrdeptask] = "do_build"
487do_buildall() { 97do_buildall() {
488 : 98 :
489} 99}
490
491#addhandler check_eventhandler
492python check_eventhandler() {
493 from bb.event import Handled, NotHandled
494 # if bb.event.getName(e) == "TaskStarted":
495
496 if bb.event.getName(e) == "BuildStarted":
497 import oe.distro_check as dc
498 tmpdir = bb.data.getVar('TMPDIR', e.data, 1)
499 distro_check_dir = os.path.join(tmpdir, "distro_check")
500 datetime = bb.data.getVar('DATETIME', e.data, 1)
501 """initialize log files."""
502 logpath = bb.data.getVar('LOG_DIR', e.data, 1)
503 bb.utils.mkdirhier(logpath)
504 logfile = os.path.join(logpath, "distrocheck.%s.csv" % bb.data.getVar('DATETIME', e.data, 1))
505 if not os.path.exists(logfile):
506 slogfile = os.path.join(logpath, "distrocheck.csv")
507 if os.path.exists(slogfile):
508 os.remove(slogfile)
509 os.system("touch %s" % logfile)
510 os.symlink(logfile, slogfile)
511 bb.data.setVar('LOG_FILE', logfile, e.data)
512
513 return NotHandled
514}
515
516addtask distro_check
517do_distro_check[nostamp] = "1"
518python do_distro_check() {
519 """checks if the package is present in other public Linux distros"""
520 import oe.distro_check as dc
521 localdata = bb.data.createCopy(d)
522 bb.data.update_data(localdata)
523 tmpdir = bb.data.getVar('TMPDIR', d, 1)
524 distro_check_dir = os.path.join(tmpdir, "distro_check")
525 datetime = bb.data.getVar('DATETIME', localdata, 1)
526 dc.update_distro_data(distro_check_dir, datetime)
527
528 # do the comparison
529 result = dc.compare_in_distro_packages_list(distro_check_dir, d)
530
531 # save the results
532 dc.save_distro_check_result(result, datetime, d)
533}
534
535addtask distro_checkall after do_distro_check
536do_distro_checkall[recrdeptask] = "do_distro_check"
537do_distro_checkall[nostamp] = "1"
538do_distro_checkall() {
539 :
540}
541