diff options
author | Dongxiao Xu <dongxiao.xu@intel.com> | 2010-11-22 14:02:37 +0800 |
---|---|---|
committer | Saul Wold <sgw@linux.intel.com> | 2010-11-22 10:38:15 -0800 |
commit | 53aff7d6775eb1c2c8f419f325b91c062d85eed5 (patch) | |
tree | 295959086735ab12391444999e8a5dd9ca68d44a /meta/classes | |
parent | 836b290732b67ff3de27229f85290c953327f345 (diff) | |
download | poky-53aff7d6775eb1c2c8f419f325b91c062d85eed5.tar.gz |
utility-tasks.bbclass: Move distro related tasks to distrodata.bbclass
Most of the d.keys() used in file parsing are variables in
distro_tracking_fields.inc, which are not used in normal build.
Therefore remove the inclusion of distro_tracking_fields.inc from
poky.conf. Besides, move distro related tasks to distrodata.bbclass,
which includes that tracking field file.
By this change, the file parsing time could save about 25%.
Signed-off-by: Dongxiao Xu <dongxiao.xu@intel.com>
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/distrodata.bbclass | 440 | ||||
-rw-r--r-- | meta/classes/utility-tasks.bbclass | 442 |
2 files changed, 440 insertions, 442 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index f6642f0f13..221dfae9f3 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -211,3 +211,443 @@ do_distrodataall() { | |||
211 | : | 211 | : |
212 | } | 212 | } |
213 | 213 | ||
214 | addtask checkpkg | ||
215 | do_checkpkg[nostamp] = "1" | ||
216 | python do_checkpkg() { | ||
217 | import sys | ||
218 | import re | ||
219 | import tempfile | ||
220 | |||
221 | """ | ||
222 | sanity check to ensure same name and type. Match as many patterns as possible | ||
223 | such as: | ||
224 | gnome-common-2.20.0.tar.gz (most common format) | ||
225 | gtk+-2.90.1.tar.gz | ||
226 | xf86-intput-synaptics-12.6.9.tar.gz | ||
227 | dri2proto-2.3.tar.gz | ||
228 | blktool_4.orig.tar.gz | ||
229 | libid3tag-0.15.1b.tar.gz | ||
230 | unzip552.tar.gz | ||
231 | icu4c-3_6-src.tgz | ||
232 | genext2fs_1.3.orig.tar.gz | ||
233 | gst-fluendo-mp3 | ||
234 | """ | ||
235 | prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]" # match most patterns which uses "-" as separator to version digits | ||
236 | prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz | ||
237 | prefix = "(%s|%s)" % (prefix1, prefix2) | ||
238 | suffix = "(tar\.gz|tgz|tar\.bz2|zip)" | ||
239 | suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2") | ||
240 | |||
241 | sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix | ||
242 | sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix) | ||
243 | |||
244 | def parse_inter(s): | ||
245 | m = re.search(sinterstr, s) | ||
246 | if not m: | ||
247 | return None | ||
248 | else: | ||
249 | return (m.group('name'), m.group('ver'), "") | ||
250 | |||
251 | def parse_dir(s): | ||
252 | m = re.search(sdirstr, s) | ||
253 | if not m: | ||
254 | return None | ||
255 | else: | ||
256 | return (m.group('name'), m.group('ver'), m.group('type')) | ||
257 | |||
258 | """ | ||
259 | Check whether 'new' is newer than 'old' version. We use existing vercmp() for the | ||
260 | purpose. PE is cleared in comparison as it's not for build, and PV is cleared too | ||
261 | for simplicity as it's somehow difficult to get from various upstream format | ||
262 | """ | ||
263 | def __vercmp(old, new): | ||
264 | (on, ov, ot) = old | ||
265 | (en, ev, et) = new | ||
266 | if on != en or (et and et not in suffixtuple): | ||
267 | return 0 | ||
268 | |||
269 | ov = re.search("\d+[^a-zA-Z]+", ov).group() | ||
270 | ev = re.search("\d+[^a-zA-Z]+", ev).group() | ||
271 | return bb.utils.vercmp(("0", ov, ""), ("0", ev, "")) | ||
272 | |||
273 | """ | ||
274 | wrapper for fetch upstream directory info | ||
275 | 'url' - upstream link customized by regular expression | ||
276 | 'd' - database | ||
277 | 'tmpf' - tmpfile for fetcher output | ||
278 | We don't want to exit whole build due to one recipe error. So handle all exceptions | ||
279 | gracefully w/o leaking to outer. | ||
280 | """ | ||
281 | def internal_fetch_wget(url, d, tmpf): | ||
282 | status = "ErrFetchUnknown" | ||
283 | try: | ||
284 | """ | ||
285 | Clear internal url cache as it's a temporary check. Not doing so will have | ||
286 | bitbake check url multiple times when looping through a single url | ||
287 | """ | ||
288 | fn = bb.data.getVar('FILE', d, 1) | ||
289 | bb.fetch.urldata_cache[fn] = {} | ||
290 | bb.fetch.init([url], d) | ||
291 | except bb.fetch.NoMethodError: | ||
292 | status = "ErrFetchNoMethod" | ||
293 | except: | ||
294 | status = "ErrInitUrlUnknown" | ||
295 | else: | ||
296 | """ | ||
297 | To avoid impacting bitbake build engine, this trick is required for reusing bitbake | ||
298 | interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR} | ||
299 | while we don't want to pollute that place. So bb.fetch.checkstatus() is borrowed here | ||
300 | which is designed for check purpose but we override check command for our own purpose | ||
301 | """ | ||
302 | ld = bb.data.createCopy(d) | ||
303 | bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s '${URI}'" \ | ||
304 | % tmpf.name, d) | ||
305 | bb.data.update_data(ld) | ||
306 | |||
307 | try: | ||
308 | bb.fetch.checkstatus(ld) | ||
309 | except bb.fetch.MissingParameterError: | ||
310 | status = "ErrMissParam" | ||
311 | except bb.fetch.FetchError: | ||
312 | status = "ErrFetch" | ||
313 | except bb.fetch.MD5SumError: | ||
314 | status = "ErrMD5Sum" | ||
315 | except: | ||
316 | status = "ErrFetchUnknown" | ||
317 | else: | ||
318 | status = "SUCC" | ||
319 | return status | ||
320 | |||
321 | """ | ||
322 | Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz", | ||
323 | 'url' - upstream link customized by regular expression | ||
324 | 'd' - database | ||
325 | 'curver' - current version | ||
326 | Return new version if success, or else error in "Errxxxx" style | ||
327 | """ | ||
328 | def check_new_dir(url, curver, d): | ||
329 | pn = bb.data.getVar('PN', d, 1) | ||
330 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) | ||
331 | status = internal_fetch_wget(url, d, f) | ||
332 | fhtml = f.read() | ||
333 | |||
334 | if status == "SUCC" and len(fhtml): | ||
335 | newver = parse_inter(curver) | ||
336 | |||
337 | """ | ||
338 | match "*4.1/">*4.1/ where '*' matches chars | ||
339 | N.B. add package name, only match for digits | ||
340 | """ | ||
341 | m = re.search("^%s" % prefix, curver) | ||
342 | if m: | ||
343 | s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group() | ||
344 | else: | ||
345 | s = "(\d+[\.\-_])+\d+/?" | ||
346 | |||
347 | searchstr = "[hH][rR][eE][fF]=\"%s\">" % s | ||
348 | reg = re.compile(searchstr) | ||
349 | |||
350 | valid = 0 | ||
351 | for line in fhtml.split("\n"): | ||
352 | if line.find(curver) >= 0: | ||
353 | valid = 1 | ||
354 | |||
355 | m = reg.search(line) | ||
356 | if m: | ||
357 | ver = m.group().split("\"")[1] | ||
358 | ver = ver.strip("/") | ||
359 | ver = parse_inter(ver) | ||
360 | if ver and __vercmp(newver, ver) < 0: | ||
361 | newver = ver | ||
362 | |||
363 | """Expect a match for curver in directory list, or else it indicates unknown format""" | ||
364 | if not valid: | ||
365 | status = "ErrParseInterDir" | ||
366 | else: | ||
367 | """rejoin the path name""" | ||
368 | status = newver[0] + newver[1] | ||
369 | elif not len(fhtml): | ||
370 | status = "ErrHostNoDir" | ||
371 | |||
372 | f.close() | ||
373 | if status != "ErrHostNoDir" and re.match("Err", status): | ||
374 | logpath = bb.data.getVar('LOG_DIR', d, 1) | ||
375 | os.system("cp %s %s/" % (f.name, logpath)) | ||
376 | os.unlink(f.name) | ||
377 | return status | ||
378 | |||
379 | """ | ||
380 | Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz", | ||
381 | 'url' - upstream link customized by regular expression | ||
382 | 'd' - database | ||
383 | 'curname' - current package name | ||
384 | Return new version if success, or else error in "Errxxxx" style | ||
385 | """ | ||
386 | def check_new_version(url, curname, d): | ||
387 | """possible to have no version in pkg name, such as spectrum-fw""" | ||
388 | if not re.search("\d+", curname): | ||
389 | return pcurver | ||
390 | pn = bb.data.getVar('PN', d, 1) | ||
391 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) | ||
392 | status = internal_fetch_wget(url, d, f) | ||
393 | fhtml = f.read() | ||
394 | |||
395 | if status == "SUCC" and len(fhtml): | ||
396 | newver = parse_dir(curname) | ||
397 | |||
398 | """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """ | ||
399 | pn1 = re.search("^%s" % prefix, curname).group() | ||
400 | s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1 | ||
401 | searchstr = "[hH][rR][eE][fF]=\"%s\">" % s | ||
402 | reg = re.compile(searchstr) | ||
403 | |||
404 | valid = 0 | ||
405 | for line in fhtml.split("\n"): | ||
406 | m = reg.search(line) | ||
407 | if m: | ||
408 | valid = 1 | ||
409 | ver = m.group().split("\"")[1].split("/")[-1] | ||
410 | ver = parse_dir(ver) | ||
411 | if ver and __vercmp(newver, ver) < 0: | ||
412 | newver = ver | ||
413 | |||
414 | """Expect a match for curver in directory list, or else it indicates unknown format""" | ||
415 | if not valid: | ||
416 | status = "ErrParseDir" | ||
417 | else: | ||
418 | """newver still contains a full package name string""" | ||
419 | status = re.search("(\d+[.\-_])*\d+", newver[1]).group() | ||
420 | elif not len(fhtml): | ||
421 | status = "ErrHostNoDir" | ||
422 | |||
423 | f.close() | ||
424 | """if host hasn't directory information, no need to save tmp file""" | ||
425 | if status != "ErrHostNoDir" and re.match("Err", status): | ||
426 | logpath = bb.data.getVar('LOG_DIR', d, 1) | ||
427 | os.system("cp %s %s/" % (f.name, logpath)) | ||
428 | os.unlink(f.name) | ||
429 | return status | ||
430 | |||
431 | """first check whether a uri is provided""" | ||
432 | src_uri = bb.data.getVar('SRC_URI', d, 1) | ||
433 | if not src_uri: | ||
434 | return | ||
435 | |||
436 | """initialize log files.""" | ||
437 | logpath = bb.data.getVar('LOG_DIR', d, 1) | ||
438 | bb.utils.mkdirhier(logpath) | ||
439 | logfile = os.path.join(logpath, "poky_pkg_info.log.%s" % bb.data.getVar('DATETIME', d, 1)) | ||
440 | if not os.path.exists(logfile): | ||
441 | slogfile = os.path.join(logpath, "poky_pkg_info.log") | ||
442 | if os.path.exists(slogfile): | ||
443 | os.remove(slogfile) | ||
444 | os.system("touch %s" % logfile) | ||
445 | os.symlink(logfile, slogfile) | ||
446 | |||
447 | """generate package information from .bb file""" | ||
448 | pname = bb.data.getVar('PN', d, 1) | ||
449 | pdesc = bb.data.getVar('DESCRIPTION', d, 1) | ||
450 | pgrp = bb.data.getVar('SECTION', d, 1) | ||
451 | |||
452 | found = 0 | ||
453 | for uri in src_uri.split(): | ||
454 | m = re.compile('(?P<type>[^:]*)').match(uri) | ||
455 | if not m: | ||
456 | raise MalformedUrl(uri) | ||
457 | elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'): | ||
458 | found = 1 | ||
459 | pproto = m.group('type') | ||
460 | break | ||
461 | if not found: | ||
462 | pproto = "file" | ||
463 | pupver = "N/A" | ||
464 | pstatus = "ErrUnknown" | ||
465 | |||
466 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) | ||
467 | if type in ['http', 'https', 'ftp']: | ||
468 | pcurver = bb.data.getVar('PV', d, 1) | ||
469 | else: | ||
470 | pcurver = bb.data.getVar("SRCREV", d, 1) | ||
471 | |||
472 | if type in ['http', 'https', 'ftp']: | ||
473 | newver = pcurver | ||
474 | altpath = path | ||
475 | dirver = "-" | ||
476 | curname = "-" | ||
477 | |||
478 | """ | ||
479 | match version number amid the path, such as "5.7" in: | ||
480 | http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz | ||
481 | N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P | ||
482 | """ | ||
483 | m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path) | ||
484 | if m: | ||
485 | altpath = path.split(m.group())[0] | ||
486 | dirver = m.group().strip("/") | ||
487 | |||
488 | """use new path and remove param. for wget only param is md5sum""" | ||
489 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) | ||
490 | |||
491 | newver = check_new_dir(alturi, dirver, d) | ||
492 | altpath = path | ||
493 | if not re.match("Err", newver) and dirver != newver: | ||
494 | altpath = altpath.replace(dirver, newver, 1) | ||
495 | |||
496 | """Now try to acquire all remote files in current directory""" | ||
497 | if not re.match("Err", newver): | ||
498 | curname = altpath.split("/")[-1] | ||
499 | |||
500 | """get remote name by skipping pacakge name""" | ||
501 | m = re.search(r"/.*/", altpath) | ||
502 | if not m: | ||
503 | altpath = "/" | ||
504 | else: | ||
505 | altpath = m.group() | ||
506 | |||
507 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) | ||
508 | newver = check_new_version(alturi, curname, d) | ||
509 | if not re.match("Err", newver): | ||
510 | pupver = newver | ||
511 | if pupver != pcurver: | ||
512 | pstatus = "UPDATE" | ||
513 | else: | ||
514 | pstatus = "MATCH" | ||
515 | |||
516 | if re.match("Err", newver): | ||
517 | pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname | ||
518 | elif type == 'git': | ||
519 | if user: | ||
520 | gituser = user + '@' | ||
521 | else: | ||
522 | gituser = "" | ||
523 | |||
524 | if 'protocol' in parm: | ||
525 | gitproto = parm['protocol'] | ||
526 | else: | ||
527 | gitproto = "rsync" | ||
528 | |||
529 | gitcmd = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path) | ||
530 | print gitcmd | ||
531 | ver = os.popen(gitcmd).read() | ||
532 | if ver and re.search("HEAD", ver): | ||
533 | pupver = ver.split("\t")[0] | ||
534 | if pcurver == pupver: | ||
535 | pstatus = "MATCH" | ||
536 | else: | ||
537 | pstatus = "UPDATE" | ||
538 | else: | ||
539 | pstatus = "ErrGitAccess" | ||
540 | elif type == 'svn': | ||
541 | options = [] | ||
542 | if user: | ||
543 | options.append("--username %s" % user) | ||
544 | if pswd: | ||
545 | options.append("--password %s" % pswd) | ||
546 | svnproto = 'svn' | ||
547 | if 'proto' in parm: | ||
548 | svnproto = parm['proto'] | ||
549 | if 'rev' in parm: | ||
550 | pcurver = parm['rev'] | ||
551 | |||
552 | svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"]) | ||
553 | print svncmd | ||
554 | svninfo = os.popen(svncmd).read() | ||
555 | for line in svninfo.split("\n"): | ||
556 | if re.search("^Last Changed Rev:", line): | ||
557 | pupver = line.split(" ")[-1] | ||
558 | if pcurver == pupver: | ||
559 | pstatus = "MATCH" | ||
560 | else: | ||
561 | pstatus = "UPDATE" | ||
562 | |||
563 | if re.match("Err", pstatus): | ||
564 | pstatus = "ErrSvnAccess" | ||
565 | elif type == 'cvs': | ||
566 | pupver = "HEAD" | ||
567 | pstatus = "UPDATE" | ||
568 | elif type == 'file': | ||
569 | """local file is always up-to-date""" | ||
570 | pupver = pcurver | ||
571 | pstatus = "MATCH" | ||
572 | else: | ||
573 | pstatus = "ErrUnsupportedProto" | ||
574 | |||
575 | if re.match("Err", pstatus): | ||
576 | pstatus += ":%s%s" % (host, path) | ||
577 | |||
578 | """Read from manual distro tracking fields as alternative""" | ||
579 | pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, 1) | ||
580 | if not pmver: | ||
581 | pmver = "N/A" | ||
582 | pmstatus = "ErrNoRecipeData" | ||
583 | else: | ||
584 | if pmver == pcurver: | ||
585 | pmstatus = "MATCH" | ||
586 | else: | ||
587 | pmstatus = "UPDATE" | ||
588 | |||
589 | lf = bb.utils.lockfile(logfile + ".lock") | ||
590 | f = open(logfile, "a") | ||
591 | f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \ | ||
592 | (pname, pgrp, pproto, pcurver, pmver, pupver, pmstatus, pstatus, pdesc)) | ||
593 | f.close() | ||
594 | bb.utils.unlockfile(lf) | ||
595 | } | ||
596 | |||
597 | addtask checkpkgall after do_checkpkg | ||
598 | do_checkpkgall[recrdeptask] = "do_checkpkg" | ||
599 | do_checkpkgall[nostamp] = "1" | ||
600 | do_checkpkgall() { | ||
601 | : | ||
602 | } | ||
603 | |||
604 | #addhandler check_eventhandler | ||
605 | python check_eventhandler() { | ||
606 | from bb.event import Handled, NotHandled | ||
607 | # if bb.event.getName(e) == "TaskStarted": | ||
608 | |||
609 | if bb.event.getName(e) == "BuildStarted": | ||
610 | import oe.distro_check as dc | ||
611 | tmpdir = bb.data.getVar('TMPDIR', e.data, 1) | ||
612 | distro_check_dir = os.path.join(tmpdir, "distro_check") | ||
613 | datetime = bb.data.getVar('DATETIME', e.data, 1) | ||
614 | """initialize log files.""" | ||
615 | logpath = bb.data.getVar('LOG_DIR', e.data, 1) | ||
616 | bb.utils.mkdirhier(logpath) | ||
617 | logfile = os.path.join(logpath, "distrocheck.%s.csv" % bb.data.getVar('DATETIME', e.data, 1)) | ||
618 | if not os.path.exists(logfile): | ||
619 | slogfile = os.path.join(logpath, "distrocheck.csv") | ||
620 | if os.path.exists(slogfile): | ||
621 | os.remove(slogfile) | ||
622 | os.system("touch %s" % logfile) | ||
623 | os.symlink(logfile, slogfile) | ||
624 | bb.data.setVar('LOG_FILE', logfile, e.data) | ||
625 | |||
626 | return NotHandled | ||
627 | } | ||
628 | |||
629 | addtask distro_check | ||
630 | do_distro_check[nostamp] = "1" | ||
631 | python do_distro_check() { | ||
632 | """checks if the package is present in other public Linux distros""" | ||
633 | import oe.distro_check as dc | ||
634 | localdata = bb.data.createCopy(d) | ||
635 | bb.data.update_data(localdata) | ||
636 | tmpdir = bb.data.getVar('TMPDIR', d, 1) | ||
637 | distro_check_dir = os.path.join(tmpdir, "distro_check") | ||
638 | datetime = bb.data.getVar('DATETIME', localdata, 1) | ||
639 | dc.update_distro_data(distro_check_dir, datetime) | ||
640 | |||
641 | # do the comparison | ||
642 | result = dc.compare_in_distro_packages_list(distro_check_dir, d) | ||
643 | |||
644 | # save the results | ||
645 | dc.save_distro_check_result(result, datetime, d) | ||
646 | } | ||
647 | |||
648 | addtask distro_checkall after do_distro_check | ||
649 | do_distro_checkall[recrdeptask] = "do_distro_check" | ||
650 | do_distro_checkall[nostamp] = "1" | ||
651 | do_distro_checkall() { | ||
652 | : | ||
653 | } | ||
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass index 205a206baf..3ab37fa6b9 100644 --- a/meta/classes/utility-tasks.bbclass +++ b/meta/classes/utility-tasks.bbclass | |||
@@ -47,396 +47,6 @@ python do_rebuild() { | |||
47 | # bb.build.exec_func('do_clean', d) | 47 | # bb.build.exec_func('do_clean', d) |
48 | #} | 48 | #} |
49 | 49 | ||
50 | addtask checkpkg | ||
51 | do_checkpkg[nostamp] = "1" | ||
52 | python do_checkpkg() { | ||
53 | import sys | ||
54 | import re | ||
55 | import tempfile | ||
56 | |||
57 | """ | ||
58 | sanity check to ensure same name and type. Match as many patterns as possible | ||
59 | such as: | ||
60 | gnome-common-2.20.0.tar.gz (most common format) | ||
61 | gtk+-2.90.1.tar.gz | ||
62 | xf86-intput-synaptics-12.6.9.tar.gz | ||
63 | dri2proto-2.3.tar.gz | ||
64 | blktool_4.orig.tar.gz | ||
65 | libid3tag-0.15.1b.tar.gz | ||
66 | unzip552.tar.gz | ||
67 | icu4c-3_6-src.tgz | ||
68 | genext2fs_1.3.orig.tar.gz | ||
69 | gst-fluendo-mp3 | ||
70 | """ | ||
71 | prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]" # match most patterns which uses "-" as separator to version digits | ||
72 | prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz | ||
73 | prefix = "(%s|%s)" % (prefix1, prefix2) | ||
74 | suffix = "(tar\.gz|tgz|tar\.bz2|zip)" | ||
75 | suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2") | ||
76 | |||
77 | sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix | ||
78 | sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix) | ||
79 | |||
80 | def parse_inter(s): | ||
81 | m = re.search(sinterstr, s) | ||
82 | if not m: | ||
83 | return None | ||
84 | else: | ||
85 | return (m.group('name'), m.group('ver'), "") | ||
86 | |||
87 | def parse_dir(s): | ||
88 | m = re.search(sdirstr, s) | ||
89 | if not m: | ||
90 | return None | ||
91 | else: | ||
92 | return (m.group('name'), m.group('ver'), m.group('type')) | ||
93 | |||
94 | """ | ||
95 | Check whether 'new' is newer than 'old' version. We use existing vercmp() for the | ||
96 | purpose. PE is cleared in comparison as it's not for build, and PV is cleared too | ||
97 | for simplicity as it's somehow difficult to get from various upstream format | ||
98 | """ | ||
99 | def __vercmp(old, new): | ||
100 | (on, ov, ot) = old | ||
101 | (en, ev, et) = new | ||
102 | if on != en or (et and et not in suffixtuple): | ||
103 | return 0 | ||
104 | |||
105 | ov = re.search("\d+[^a-zA-Z]+", ov).group() | ||
106 | ev = re.search("\d+[^a-zA-Z]+", ev).group() | ||
107 | return bb.utils.vercmp(("0", ov, ""), ("0", ev, "")) | ||
108 | |||
109 | """ | ||
110 | wrapper for fetch upstream directory info | ||
111 | 'url' - upstream link customized by regular expression | ||
112 | 'd' - database | ||
113 | 'tmpf' - tmpfile for fetcher output | ||
114 | We don't want to exit whole build due to one recipe error. So handle all exceptions | ||
115 | gracefully w/o leaking to outer. | ||
116 | """ | ||
117 | def internal_fetch_wget(url, d, tmpf): | ||
118 | status = "ErrFetchUnknown" | ||
119 | try: | ||
120 | """ | ||
121 | Clear internal url cache as it's a temporary check. Not doing so will have | ||
122 | bitbake check url multiple times when looping through a single url | ||
123 | """ | ||
124 | fn = bb.data.getVar('FILE', d, 1) | ||
125 | bb.fetch.urldata_cache[fn] = {} | ||
126 | bb.fetch.init([url], d) | ||
127 | except bb.fetch.NoMethodError: | ||
128 | status = "ErrFetchNoMethod" | ||
129 | except: | ||
130 | status = "ErrInitUrlUnknown" | ||
131 | else: | ||
132 | """ | ||
133 | To avoid impacting bitbake build engine, this trick is required for reusing bitbake | ||
134 | interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR} | ||
135 | while we don't want to pollute that place. So bb.fetch.checkstatus() is borrowed here | ||
136 | which is designed for check purpose but we override check command for our own purpose | ||
137 | """ | ||
138 | ld = bb.data.createCopy(d) | ||
139 | bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s '${URI}'" \ | ||
140 | % tmpf.name, d) | ||
141 | bb.data.update_data(ld) | ||
142 | |||
143 | try: | ||
144 | bb.fetch.checkstatus(ld) | ||
145 | except bb.fetch.MissingParameterError: | ||
146 | status = "ErrMissParam" | ||
147 | except bb.fetch.FetchError: | ||
148 | status = "ErrFetch" | ||
149 | except bb.fetch.MD5SumError: | ||
150 | status = "ErrMD5Sum" | ||
151 | except: | ||
152 | status = "ErrFetchUnknown" | ||
153 | else: | ||
154 | status = "SUCC" | ||
155 | return status | ||
156 | |||
157 | """ | ||
158 | Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz", | ||
159 | 'url' - upstream link customized by regular expression | ||
160 | 'd' - database | ||
161 | 'curver' - current version | ||
162 | Return new version if success, or else error in "Errxxxx" style | ||
163 | """ | ||
164 | def check_new_dir(url, curver, d): | ||
165 | pn = bb.data.getVar('PN', d, 1) | ||
166 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) | ||
167 | status = internal_fetch_wget(url, d, f) | ||
168 | fhtml = f.read() | ||
169 | |||
170 | if status == "SUCC" and len(fhtml): | ||
171 | newver = parse_inter(curver) | ||
172 | |||
173 | """ | ||
174 | match "*4.1/">*4.1/ where '*' matches chars | ||
175 | N.B. add package name, only match for digits | ||
176 | """ | ||
177 | m = re.search("^%s" % prefix, curver) | ||
178 | if m: | ||
179 | s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group() | ||
180 | else: | ||
181 | s = "(\d+[\.\-_])+\d+/?" | ||
182 | |||
183 | searchstr = "[hH][rR][eE][fF]=\"%s\">" % s | ||
184 | reg = re.compile(searchstr) | ||
185 | |||
186 | valid = 0 | ||
187 | for line in fhtml.split("\n"): | ||
188 | if line.find(curver) >= 0: | ||
189 | valid = 1 | ||
190 | |||
191 | m = reg.search(line) | ||
192 | if m: | ||
193 | ver = m.group().split("\"")[1] | ||
194 | ver = ver.strip("/") | ||
195 | ver = parse_inter(ver) | ||
196 | if ver and __vercmp(newver, ver) < 0: | ||
197 | newver = ver | ||
198 | |||
199 | """Expect a match for curver in directory list, or else it indicates unknown format""" | ||
200 | if not valid: | ||
201 | status = "ErrParseInterDir" | ||
202 | else: | ||
203 | """rejoin the path name""" | ||
204 | status = newver[0] + newver[1] | ||
205 | elif not len(fhtml): | ||
206 | status = "ErrHostNoDir" | ||
207 | |||
208 | f.close() | ||
209 | if status != "ErrHostNoDir" and re.match("Err", status): | ||
210 | logpath = bb.data.getVar('LOG_DIR', d, 1) | ||
211 | os.system("cp %s %s/" % (f.name, logpath)) | ||
212 | os.unlink(f.name) | ||
213 | return status | ||
214 | |||
215 | """ | ||
216 | Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz", | ||
217 | 'url' - upstream link customized by regular expression | ||
218 | 'd' - database | ||
219 | 'curname' - current package name | ||
220 | Return new version if success, or else error in "Errxxxx" style | ||
221 | """ | ||
222 | def check_new_version(url, curname, d): | ||
223 | """possible to have no version in pkg name, such as spectrum-fw""" | ||
224 | if not re.search("\d+", curname): | ||
225 | return pcurver | ||
226 | pn = bb.data.getVar('PN', d, 1) | ||
227 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) | ||
228 | status = internal_fetch_wget(url, d, f) | ||
229 | fhtml = f.read() | ||
230 | |||
231 | if status == "SUCC" and len(fhtml): | ||
232 | newver = parse_dir(curname) | ||
233 | |||
234 | """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """ | ||
235 | pn1 = re.search("^%s" % prefix, curname).group() | ||
236 | s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1 | ||
237 | searchstr = "[hH][rR][eE][fF]=\"%s\">" % s | ||
238 | reg = re.compile(searchstr) | ||
239 | |||
240 | valid = 0 | ||
241 | for line in fhtml.split("\n"): | ||
242 | m = reg.search(line) | ||
243 | if m: | ||
244 | valid = 1 | ||
245 | ver = m.group().split("\"")[1].split("/")[-1] | ||
246 | ver = parse_dir(ver) | ||
247 | if ver and __vercmp(newver, ver) < 0: | ||
248 | newver = ver | ||
249 | |||
250 | """Expect a match for curver in directory list, or else it indicates unknown format""" | ||
251 | if not valid: | ||
252 | status = "ErrParseDir" | ||
253 | else: | ||
254 | """newver still contains a full package name string""" | ||
255 | status = re.search("(\d+[.\-_])*\d+", newver[1]).group() | ||
256 | elif not len(fhtml): | ||
257 | status = "ErrHostNoDir" | ||
258 | |||
259 | f.close() | ||
260 | """if host hasn't directory information, no need to save tmp file""" | ||
261 | if status != "ErrHostNoDir" and re.match("Err", status): | ||
262 | logpath = bb.data.getVar('LOG_DIR', d, 1) | ||
263 | os.system("cp %s %s/" % (f.name, logpath)) | ||
264 | os.unlink(f.name) | ||
265 | return status | ||
266 | |||
267 | """first check whether a uri is provided""" | ||
268 | src_uri = bb.data.getVar('SRC_URI', d, 1) | ||
269 | if not src_uri: | ||
270 | return | ||
271 | |||
272 | """initialize log files.""" | ||
273 | logpath = bb.data.getVar('LOG_DIR', d, 1) | ||
274 | bb.utils.mkdirhier(logpath) | ||
275 | logfile = os.path.join(logpath, "poky_pkg_info.log.%s" % bb.data.getVar('DATETIME', d, 1)) | ||
276 | if not os.path.exists(logfile): | ||
277 | slogfile = os.path.join(logpath, "poky_pkg_info.log") | ||
278 | if os.path.exists(slogfile): | ||
279 | os.remove(slogfile) | ||
280 | os.system("touch %s" % logfile) | ||
281 | os.symlink(logfile, slogfile) | ||
282 | |||
283 | """generate package information from .bb file""" | ||
284 | pname = bb.data.getVar('PN', d, 1) | ||
285 | pdesc = bb.data.getVar('DESCRIPTION', d, 1) | ||
286 | pgrp = bb.data.getVar('SECTION', d, 1) | ||
287 | |||
288 | found = 0 | ||
289 | for uri in src_uri.split(): | ||
290 | m = re.compile('(?P<type>[^:]*)').match(uri) | ||
291 | if not m: | ||
292 | raise MalformedUrl(uri) | ||
293 | elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'): | ||
294 | found = 1 | ||
295 | pproto = m.group('type') | ||
296 | break | ||
297 | if not found: | ||
298 | pproto = "file" | ||
299 | pupver = "N/A" | ||
300 | pstatus = "ErrUnknown" | ||
301 | |||
302 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) | ||
303 | if type in ['http', 'https', 'ftp']: | ||
304 | pcurver = bb.data.getVar('PV', d, 1) | ||
305 | else: | ||
306 | pcurver = bb.data.getVar("SRCREV", d, 1) | ||
307 | |||
308 | if type in ['http', 'https', 'ftp']: | ||
309 | newver = pcurver | ||
310 | altpath = path | ||
311 | dirver = "-" | ||
312 | curname = "-" | ||
313 | |||
314 | """ | ||
315 | match version number amid the path, such as "5.7" in: | ||
316 | http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz | ||
317 | N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P | ||
318 | """ | ||
319 | m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path) | ||
320 | if m: | ||
321 | altpath = path.split(m.group())[0] | ||
322 | dirver = m.group().strip("/") | ||
323 | |||
324 | """use new path and remove param. for wget only param is md5sum""" | ||
325 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) | ||
326 | |||
327 | newver = check_new_dir(alturi, dirver, d) | ||
328 | altpath = path | ||
329 | if not re.match("Err", newver) and dirver != newver: | ||
330 | altpath = altpath.replace(dirver, newver, 1) | ||
331 | |||
332 | """Now try to acquire all remote files in current directory""" | ||
333 | if not re.match("Err", newver): | ||
334 | curname = altpath.split("/")[-1] | ||
335 | |||
336 | """get remote name by skipping pacakge name""" | ||
337 | m = re.search(r"/.*/", altpath) | ||
338 | if not m: | ||
339 | altpath = "/" | ||
340 | else: | ||
341 | altpath = m.group() | ||
342 | |||
343 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) | ||
344 | newver = check_new_version(alturi, curname, d) | ||
345 | if not re.match("Err", newver): | ||
346 | pupver = newver | ||
347 | if pupver != pcurver: | ||
348 | pstatus = "UPDATE" | ||
349 | else: | ||
350 | pstatus = "MATCH" | ||
351 | |||
352 | if re.match("Err", newver): | ||
353 | pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname | ||
354 | elif type == 'git': | ||
355 | if user: | ||
356 | gituser = user + '@' | ||
357 | else: | ||
358 | gituser = "" | ||
359 | |||
360 | if 'protocol' in parm: | ||
361 | gitproto = parm['protocol'] | ||
362 | else: | ||
363 | gitproto = "rsync" | ||
364 | |||
365 | gitcmd = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path) | ||
366 | print gitcmd | ||
367 | ver = os.popen(gitcmd).read() | ||
368 | if ver and re.search("HEAD", ver): | ||
369 | pupver = ver.split("\t")[0] | ||
370 | if pcurver == pupver: | ||
371 | pstatus = "MATCH" | ||
372 | else: | ||
373 | pstatus = "UPDATE" | ||
374 | else: | ||
375 | pstatus = "ErrGitAccess" | ||
376 | elif type == 'svn': | ||
377 | options = [] | ||
378 | if user: | ||
379 | options.append("--username %s" % user) | ||
380 | if pswd: | ||
381 | options.append("--password %s" % pswd) | ||
382 | svnproto = 'svn' | ||
383 | if 'proto' in parm: | ||
384 | svnproto = parm['proto'] | ||
385 | if 'rev' in parm: | ||
386 | pcurver = parm['rev'] | ||
387 | |||
388 | svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"]) | ||
389 | print svncmd | ||
390 | svninfo = os.popen(svncmd).read() | ||
391 | for line in svninfo.split("\n"): | ||
392 | if re.search("^Last Changed Rev:", line): | ||
393 | pupver = line.split(" ")[-1] | ||
394 | if pcurver == pupver: | ||
395 | pstatus = "MATCH" | ||
396 | else: | ||
397 | pstatus = "UPDATE" | ||
398 | |||
399 | if re.match("Err", pstatus): | ||
400 | pstatus = "ErrSvnAccess" | ||
401 | elif type == 'cvs': | ||
402 | pupver = "HEAD" | ||
403 | pstatus = "UPDATE" | ||
404 | elif type == 'file': | ||
405 | """local file is always up-to-date""" | ||
406 | pupver = pcurver | ||
407 | pstatus = "MATCH" | ||
408 | else: | ||
409 | pstatus = "ErrUnsupportedProto" | ||
410 | |||
411 | if re.match("Err", pstatus): | ||
412 | pstatus += ":%s%s" % (host, path) | ||
413 | |||
414 | """Read from manual distro tracking fields as alternative""" | ||
415 | pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, 1) | ||
416 | if not pmver: | ||
417 | pmver = "N/A" | ||
418 | pmstatus = "ErrNoRecipeData" | ||
419 | else: | ||
420 | if pmver == pcurver: | ||
421 | pmstatus = "MATCH" | ||
422 | else: | ||
423 | pmstatus = "UPDATE" | ||
424 | |||
425 | lf = bb.utils.lockfile(logfile + ".lock") | ||
426 | f = open(logfile, "a") | ||
427 | f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \ | ||
428 | (pname, pgrp, pproto, pcurver, pmver, pupver, pmstatus, pstatus, pdesc)) | ||
429 | f.close() | ||
430 | bb.utils.unlockfile(lf) | ||
431 | } | ||
432 | |||
433 | addtask checkpkgall after do_checkpkg | ||
434 | do_checkpkgall[recrdeptask] = "do_checkpkg" | ||
435 | do_checkpkgall[nostamp] = "1" | ||
436 | do_checkpkgall() { | ||
437 | : | ||
438 | } | ||
439 | |||
440 | addtask checkuri | 50 | addtask checkuri |
441 | do_checkuri[nostamp] = "1" | 51 | do_checkuri[nostamp] = "1" |
442 | python do_checkuri() { | 52 | python do_checkuri() { |
@@ -487,55 +97,3 @@ do_buildall[recrdeptask] = "do_build" | |||
487 | do_buildall() { | 97 | do_buildall() { |
488 | : | 98 | : |
489 | } | 99 | } |
490 | |||
491 | #addhandler check_eventhandler | ||
492 | python check_eventhandler() { | ||
493 | from bb.event import Handled, NotHandled | ||
494 | # if bb.event.getName(e) == "TaskStarted": | ||
495 | |||
496 | if bb.event.getName(e) == "BuildStarted": | ||
497 | import oe.distro_check as dc | ||
498 | tmpdir = bb.data.getVar('TMPDIR', e.data, 1) | ||
499 | distro_check_dir = os.path.join(tmpdir, "distro_check") | ||
500 | datetime = bb.data.getVar('DATETIME', e.data, 1) | ||
501 | """initialize log files.""" | ||
502 | logpath = bb.data.getVar('LOG_DIR', e.data, 1) | ||
503 | bb.utils.mkdirhier(logpath) | ||
504 | logfile = os.path.join(logpath, "distrocheck.%s.csv" % bb.data.getVar('DATETIME', e.data, 1)) | ||
505 | if not os.path.exists(logfile): | ||
506 | slogfile = os.path.join(logpath, "distrocheck.csv") | ||
507 | if os.path.exists(slogfile): | ||
508 | os.remove(slogfile) | ||
509 | os.system("touch %s" % logfile) | ||
510 | os.symlink(logfile, slogfile) | ||
511 | bb.data.setVar('LOG_FILE', logfile, e.data) | ||
512 | |||
513 | return NotHandled | ||
514 | } | ||
515 | |||
516 | addtask distro_check | ||
517 | do_distro_check[nostamp] = "1" | ||
518 | python do_distro_check() { | ||
519 | """checks if the package is present in other public Linux distros""" | ||
520 | import oe.distro_check as dc | ||
521 | localdata = bb.data.createCopy(d) | ||
522 | bb.data.update_data(localdata) | ||
523 | tmpdir = bb.data.getVar('TMPDIR', d, 1) | ||
524 | distro_check_dir = os.path.join(tmpdir, "distro_check") | ||
525 | datetime = bb.data.getVar('DATETIME', localdata, 1) | ||
526 | dc.update_distro_data(distro_check_dir, datetime) | ||
527 | |||
528 | # do the comparison | ||
529 | result = dc.compare_in_distro_packages_list(distro_check_dir, d) | ||
530 | |||
531 | # save the results | ||
532 | dc.save_distro_check_result(result, datetime, d) | ||
533 | } | ||
534 | |||
535 | addtask distro_checkall after do_distro_check | ||
536 | do_distro_checkall[recrdeptask] = "do_distro_check" | ||
537 | do_distro_checkall[nostamp] = "1" | ||
538 | do_distro_checkall() { | ||
539 | : | ||
540 | } | ||
541 | |||