summaryrefslogtreecommitdiffstats
path: root/meta/classes/distrodata.bbclass
diff options
context:
space:
mode:
authorDongxiao Xu <dongxiao.xu@intel.com>2010-11-22 14:02:37 +0800
committerSaul Wold <sgw@linux.intel.com>2010-11-22 10:38:15 -0800
commit53aff7d6775eb1c2c8f419f325b91c062d85eed5 (patch)
tree295959086735ab12391444999e8a5dd9ca68d44a /meta/classes/distrodata.bbclass
parent836b290732b67ff3de27229f85290c953327f345 (diff)
downloadpoky-53aff7d6775eb1c2c8f419f325b91c062d85eed5.tar.gz
utility-tasks.bbclass: Move distro related tasks to distrodata.bbclass
Most of the d.keys() used in file parsing are variables in distro_tracking_fields.inc, which are not used in normal build. Therefore remove the inclusion of distro_tracking_fields.inc from poky.conf. Besides, move distro related tasks to distrodata.bbclass, which includes that tracking field file. By this change, the file parsing time could save about 25%. Signed-off-by: Dongxiao Xu <dongxiao.xu@intel.com>
Diffstat (limited to 'meta/classes/distrodata.bbclass')
-rw-r--r--meta/classes/distrodata.bbclass440
1 files changed, 440 insertions, 0 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index f6642f0f13..221dfae9f3 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -211,3 +211,443 @@ do_distrodataall() {
211 : 211 :
212} 212}
213 213
214addtask checkpkg
215do_checkpkg[nostamp] = "1"
216python do_checkpkg() {
217 import sys
218 import re
219 import tempfile
220
221 """
222 sanity check to ensure same name and type. Match as many patterns as possible
223 such as:
224 gnome-common-2.20.0.tar.gz (most common format)
225 gtk+-2.90.1.tar.gz
226 xf86-intput-synaptics-12.6.9.tar.gz
227 dri2proto-2.3.tar.gz
228 blktool_4.orig.tar.gz
229 libid3tag-0.15.1b.tar.gz
230 unzip552.tar.gz
231 icu4c-3_6-src.tgz
232 genext2fs_1.3.orig.tar.gz
233 gst-fluendo-mp3
234 """
235 prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]" # match most patterns which uses "-" as separator to version digits
236 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz
237 prefix = "(%s|%s)" % (prefix1, prefix2)
238 suffix = "(tar\.gz|tgz|tar\.bz2|zip)"
239 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2")
240
241 sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix
242 sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix)
243
244 def parse_inter(s):
245 m = re.search(sinterstr, s)
246 if not m:
247 return None
248 else:
249 return (m.group('name'), m.group('ver'), "")
250
251 def parse_dir(s):
252 m = re.search(sdirstr, s)
253 if not m:
254 return None
255 else:
256 return (m.group('name'), m.group('ver'), m.group('type'))
257
258 """
259 Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
260 purpose. PE is cleared in comparison as it's not for build, and PV is cleared too
261 for simplicity as it's somehow difficult to get from various upstream format
262 """
263 def __vercmp(old, new):
264 (on, ov, ot) = old
265 (en, ev, et) = new
266 if on != en or (et and et not in suffixtuple):
267 return 0
268
269 ov = re.search("\d+[^a-zA-Z]+", ov).group()
270 ev = re.search("\d+[^a-zA-Z]+", ev).group()
271 return bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))
272
273 """
274 wrapper for fetch upstream directory info
275 'url' - upstream link customized by regular expression
276 'd' - database
277 'tmpf' - tmpfile for fetcher output
278 We don't want to exit whole build due to one recipe error. So handle all exceptions
279 gracefully w/o leaking to outer.
280 """
281 def internal_fetch_wget(url, d, tmpf):
282 status = "ErrFetchUnknown"
283 try:
284 """
285 Clear internal url cache as it's a temporary check. Not doing so will have
286 bitbake check url multiple times when looping through a single url
287 """
288 fn = bb.data.getVar('FILE', d, 1)
289 bb.fetch.urldata_cache[fn] = {}
290 bb.fetch.init([url], d)
291 except bb.fetch.NoMethodError:
292 status = "ErrFetchNoMethod"
293 except:
294 status = "ErrInitUrlUnknown"
295 else:
296 """
297 To avoid impacting bitbake build engine, this trick is required for reusing bitbake
298 interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR}
299 while we don't want to pollute that place. So bb.fetch.checkstatus() is borrowed here
300 which is designed for check purpose but we override check command for our own purpose
301 """
302 ld = bb.data.createCopy(d)
303 bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s '${URI}'" \
304 % tmpf.name, d)
305 bb.data.update_data(ld)
306
307 try:
308 bb.fetch.checkstatus(ld)
309 except bb.fetch.MissingParameterError:
310 status = "ErrMissParam"
311 except bb.fetch.FetchError:
312 status = "ErrFetch"
313 except bb.fetch.MD5SumError:
314 status = "ErrMD5Sum"
315 except:
316 status = "ErrFetchUnknown"
317 else:
318 status = "SUCC"
319 return status
320
321 """
322 Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz",
323 'url' - upstream link customized by regular expression
324 'd' - database
325 'curver' - current version
326 Return new version if success, or else error in "Errxxxx" style
327 """
328 def check_new_dir(url, curver, d):
329 pn = bb.data.getVar('PN', d, 1)
330 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
331 status = internal_fetch_wget(url, d, f)
332 fhtml = f.read()
333
334 if status == "SUCC" and len(fhtml):
335 newver = parse_inter(curver)
336
337 """
338 match "*4.1/">*4.1/ where '*' matches chars
339 N.B. add package name, only match for digits
340 """
341 m = re.search("^%s" % prefix, curver)
342 if m:
343 s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group()
344 else:
345 s = "(\d+[\.\-_])+\d+/?"
346
347 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
348 reg = re.compile(searchstr)
349
350 valid = 0
351 for line in fhtml.split("\n"):
352 if line.find(curver) >= 0:
353 valid = 1
354
355 m = reg.search(line)
356 if m:
357 ver = m.group().split("\"")[1]
358 ver = ver.strip("/")
359 ver = parse_inter(ver)
360 if ver and __vercmp(newver, ver) < 0:
361 newver = ver
362
363 """Expect a match for curver in directory list, or else it indicates unknown format"""
364 if not valid:
365 status = "ErrParseInterDir"
366 else:
367 """rejoin the path name"""
368 status = newver[0] + newver[1]
369 elif not len(fhtml):
370 status = "ErrHostNoDir"
371
372 f.close()
373 if status != "ErrHostNoDir" and re.match("Err", status):
374 logpath = bb.data.getVar('LOG_DIR', d, 1)
375 os.system("cp %s %s/" % (f.name, logpath))
376 os.unlink(f.name)
377 return status
378
379 """
380 Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz",
381 'url' - upstream link customized by regular expression
382 'd' - database
383 'curname' - current package name
384 Return new version if success, or else error in "Errxxxx" style
385 """
386 def check_new_version(url, curname, d):
387 """possible to have no version in pkg name, such as spectrum-fw"""
388 if not re.search("\d+", curname):
389 return pcurver
390 pn = bb.data.getVar('PN', d, 1)
391 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
392 status = internal_fetch_wget(url, d, f)
393 fhtml = f.read()
394
395 if status == "SUCC" and len(fhtml):
396 newver = parse_dir(curname)
397
398 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
399 pn1 = re.search("^%s" % prefix, curname).group()
400 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
401 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
402 reg = re.compile(searchstr)
403
404 valid = 0
405 for line in fhtml.split("\n"):
406 m = reg.search(line)
407 if m:
408 valid = 1
409 ver = m.group().split("\"")[1].split("/")[-1]
410 ver = parse_dir(ver)
411 if ver and __vercmp(newver, ver) < 0:
412 newver = ver
413
414 """Expect a match for curver in directory list, or else it indicates unknown format"""
415 if not valid:
416 status = "ErrParseDir"
417 else:
418 """newver still contains a full package name string"""
419 status = re.search("(\d+[.\-_])*\d+", newver[1]).group()
420 elif not len(fhtml):
421 status = "ErrHostNoDir"
422
423 f.close()
424 """if host hasn't directory information, no need to save tmp file"""
425 if status != "ErrHostNoDir" and re.match("Err", status):
426 logpath = bb.data.getVar('LOG_DIR', d, 1)
427 os.system("cp %s %s/" % (f.name, logpath))
428 os.unlink(f.name)
429 return status
430
431 """first check whether a uri is provided"""
432 src_uri = bb.data.getVar('SRC_URI', d, 1)
433 if not src_uri:
434 return
435
436 """initialize log files."""
437 logpath = bb.data.getVar('LOG_DIR', d, 1)
438 bb.utils.mkdirhier(logpath)
439 logfile = os.path.join(logpath, "poky_pkg_info.log.%s" % bb.data.getVar('DATETIME', d, 1))
440 if not os.path.exists(logfile):
441 slogfile = os.path.join(logpath, "poky_pkg_info.log")
442 if os.path.exists(slogfile):
443 os.remove(slogfile)
444 os.system("touch %s" % logfile)
445 os.symlink(logfile, slogfile)
446
447 """generate package information from .bb file"""
448 pname = bb.data.getVar('PN', d, 1)
449 pdesc = bb.data.getVar('DESCRIPTION', d, 1)
450 pgrp = bb.data.getVar('SECTION', d, 1)
451
452 found = 0
453 for uri in src_uri.split():
454 m = re.compile('(?P<type>[^:]*)').match(uri)
455 if not m:
456 raise MalformedUrl(uri)
457 elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
458 found = 1
459 pproto = m.group('type')
460 break
461 if not found:
462 pproto = "file"
463 pupver = "N/A"
464 pstatus = "ErrUnknown"
465
466 (type, host, path, user, pswd, parm) = bb.decodeurl(uri)
467 if type in ['http', 'https', 'ftp']:
468 pcurver = bb.data.getVar('PV', d, 1)
469 else:
470 pcurver = bb.data.getVar("SRCREV", d, 1)
471
472 if type in ['http', 'https', 'ftp']:
473 newver = pcurver
474 altpath = path
475 dirver = "-"
476 curname = "-"
477
478 """
479 match version number amid the path, such as "5.7" in:
480 http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
481 N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P
482 """
483 m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path)
484 if m:
485 altpath = path.split(m.group())[0]
486 dirver = m.group().strip("/")
487
488 """use new path and remove param. for wget only param is md5sum"""
489 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
490
491 newver = check_new_dir(alturi, dirver, d)
492 altpath = path
493 if not re.match("Err", newver) and dirver != newver:
494 altpath = altpath.replace(dirver, newver, 1)
495
496 """Now try to acquire all remote files in current directory"""
497 if not re.match("Err", newver):
498 curname = altpath.split("/")[-1]
499
500 """get remote name by skipping pacakge name"""
501 m = re.search(r"/.*/", altpath)
502 if not m:
503 altpath = "/"
504 else:
505 altpath = m.group()
506
507 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
508 newver = check_new_version(alturi, curname, d)
509 if not re.match("Err", newver):
510 pupver = newver
511 if pupver != pcurver:
512 pstatus = "UPDATE"
513 else:
514 pstatus = "MATCH"
515
516 if re.match("Err", newver):
517 pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname
518 elif type == 'git':
519 if user:
520 gituser = user + '@'
521 else:
522 gituser = ""
523
524 if 'protocol' in parm:
525 gitproto = parm['protocol']
526 else:
527 gitproto = "rsync"
528
529 gitcmd = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path)
530 print gitcmd
531 ver = os.popen(gitcmd).read()
532 if ver and re.search("HEAD", ver):
533 pupver = ver.split("\t")[0]
534 if pcurver == pupver:
535 pstatus = "MATCH"
536 else:
537 pstatus = "UPDATE"
538 else:
539 pstatus = "ErrGitAccess"
540 elif type == 'svn':
541 options = []
542 if user:
543 options.append("--username %s" % user)
544 if pswd:
545 options.append("--password %s" % pswd)
546 svnproto = 'svn'
547 if 'proto' in parm:
548 svnproto = parm['proto']
549 if 'rev' in parm:
550 pcurver = parm['rev']
551
552 svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"])
553 print svncmd
554 svninfo = os.popen(svncmd).read()
555 for line in svninfo.split("\n"):
556 if re.search("^Last Changed Rev:", line):
557 pupver = line.split(" ")[-1]
558 if pcurver == pupver:
559 pstatus = "MATCH"
560 else:
561 pstatus = "UPDATE"
562
563 if re.match("Err", pstatus):
564 pstatus = "ErrSvnAccess"
565 elif type == 'cvs':
566 pupver = "HEAD"
567 pstatus = "UPDATE"
568 elif type == 'file':
569 """local file is always up-to-date"""
570 pupver = pcurver
571 pstatus = "MATCH"
572 else:
573 pstatus = "ErrUnsupportedProto"
574
575 if re.match("Err", pstatus):
576 pstatus += ":%s%s" % (host, path)
577
578 """Read from manual distro tracking fields as alternative"""
579 pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, 1)
580 if not pmver:
581 pmver = "N/A"
582 pmstatus = "ErrNoRecipeData"
583 else:
584 if pmver == pcurver:
585 pmstatus = "MATCH"
586 else:
587 pmstatus = "UPDATE"
588
589 lf = bb.utils.lockfile(logfile + ".lock")
590 f = open(logfile, "a")
591 f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \
592 (pname, pgrp, pproto, pcurver, pmver, pupver, pmstatus, pstatus, pdesc))
593 f.close()
594 bb.utils.unlockfile(lf)
595}
596
597addtask checkpkgall after do_checkpkg
598do_checkpkgall[recrdeptask] = "do_checkpkg"
599do_checkpkgall[nostamp] = "1"
600do_checkpkgall() {
601 :
602}
603
604#addhandler check_eventhandler
605python check_eventhandler() {
606 from bb.event import Handled, NotHandled
607 # if bb.event.getName(e) == "TaskStarted":
608
609 if bb.event.getName(e) == "BuildStarted":
610 import oe.distro_check as dc
611 tmpdir = bb.data.getVar('TMPDIR', e.data, 1)
612 distro_check_dir = os.path.join(tmpdir, "distro_check")
613 datetime = bb.data.getVar('DATETIME', e.data, 1)
614 """initialize log files."""
615 logpath = bb.data.getVar('LOG_DIR', e.data, 1)
616 bb.utils.mkdirhier(logpath)
617 logfile = os.path.join(logpath, "distrocheck.%s.csv" % bb.data.getVar('DATETIME', e.data, 1))
618 if not os.path.exists(logfile):
619 slogfile = os.path.join(logpath, "distrocheck.csv")
620 if os.path.exists(slogfile):
621 os.remove(slogfile)
622 os.system("touch %s" % logfile)
623 os.symlink(logfile, slogfile)
624 bb.data.setVar('LOG_FILE', logfile, e.data)
625
626 return NotHandled
627}
628
629addtask distro_check
630do_distro_check[nostamp] = "1"
631python do_distro_check() {
632 """checks if the package is present in other public Linux distros"""
633 import oe.distro_check as dc
634 localdata = bb.data.createCopy(d)
635 bb.data.update_data(localdata)
636 tmpdir = bb.data.getVar('TMPDIR', d, 1)
637 distro_check_dir = os.path.join(tmpdir, "distro_check")
638 datetime = bb.data.getVar('DATETIME', localdata, 1)
639 dc.update_distro_data(distro_check_dir, datetime)
640
641 # do the comparison
642 result = dc.compare_in_distro_packages_list(distro_check_dir, d)
643
644 # save the results
645 dc.save_distro_check_result(result, datetime, d)
646}
647
648addtask distro_checkall after do_distro_check
649do_distro_checkall[recrdeptask] = "do_distro_check"
650do_distro_checkall[nostamp] = "1"
651do_distro_checkall() {
652 :
653}