summaryrefslogtreecommitdiffstats
path: root/meta/classes/distrodata.bbclass
diff options
context:
space:
mode:
authorAníbal Limón <anibal.limon@linux.intel.com>2014-11-27 19:09:43 -0600
committerRichard Purdie <richard.purdie@linuxfoundation.org>2014-11-28 14:02:57 +0000
commit21924451c10e058473c50c697c23d7149297856a (patch)
tree87b5714c99356f578f0a5754e6daeadb18dbd3a0 /meta/classes/distrodata.bbclass
parentfd9fc495f12c78a4f62bda5919b2ef73c9584e0b (diff)
downloadpoky-21924451c10e058473c50c697c23d7149297856a.tar.gz
distrodata_class: checkpkg make usage of latest_versionstring methods in bitbake fetcher
Because methods for get latest version of upstream package are now available into bitbake removes duplicated code and make use of it. Compatibility testing was made running distrodata class and the result files can be found at: https://bugzilla.yoctoproject.org/show_bug.cgi?id=1813 [YOCTO #1813] (From OE-Core rev: 68ddb28a68ceb59cd1ed322c16143827ce1ac712) Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/distrodata.bbclass')
-rw-r--r--meta/classes/distrodata.bbclass492
1 files changed, 43 insertions, 449 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index a890de7911..83aa381fe7 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -268,240 +268,6 @@ python do_checkpkg() {
268 import tempfile 268 import tempfile
269 import subprocess 269 import subprocess
270 270
271 """
272 sanity check to ensure same name and type. Match as many patterns as possible
273 such as:
274 gnome-common-2.20.0.tar.gz (most common format)
275 gtk+-2.90.1.tar.gz
276 xf86-input-synaptics-12.6.9.tar.gz
277 dri2proto-2.3.tar.gz
278 blktool_4.orig.tar.gz
279 libid3tag-0.15.1b.tar.gz
280 unzip552.tar.gz
281 icu4c-3_6-src.tgz
282 genext2fs_1.3.orig.tar.gz
283 gst-fluendo-mp3
284 """
285 prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*\+?[\-_]" # match most patterns which uses "-" as separator to version digits
286 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz
287 prefix3 = "[0-9]+[\-]?[a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz
288 prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3)
289 ver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"#"((\d+[\.\-_[a-z]])+)"
290 # src.rpm extension was added only for rpm package. Can be removed if the rpm
291 # packaged will always be considered as having to be manually upgraded
292 suffix = "(tar\.gz|tgz|tar\.bz2|tar\.lz4|zip|xz|rpm|bz2|lz4|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
293
294 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "tar.lz4", "bz2", "lz4", "orig.tar.gz", "src.tar.gz", "src.rpm", "src.tgz", "svnr\d+.tar.bz2", "stable.tar.gz", "src.rpm")
295 sinterstr = "(?P<name>%s?)v?(?P<ver>%s)(\-source)?" % (prefix, ver_regex)
296 sdirstr = "(?P<name>%s)\.?v?(?P<ver>%s)(\-source)?[\.\-](?P<type>%s$)" % (prefix, ver_regex, suffix)
297
298 def parse_inter(s):
299 m = re.search(sinterstr, s)
300 if not m:
301 return None
302 else:
303 return (m.group('name'), m.group('ver'), "")
304
305 def parse_dir(s):
306 m = re.search(sdirstr, s)
307 if not m:
308 return None
309 else:
310 return (m.group('name'), m.group('ver'), m.group('type'))
311
312 def modelate_version(version):
313 if version[0] in ['.', '-']:
314 if version[1].isdigit():
315 version = version[1] + version[0] + version[2:len(version)]
316 else:
317 version = version[1:len(version)]
318
319 version = re.sub('\-', '.', version)
320 version = re.sub('_', '.', version)
321 version = re.sub('(rc)+', '.-1.', version)
322 version = re.sub('(alpha)+', '.-3.', version)
323 version = re.sub('(beta)+', '.-2.', version)
324 if version[0] == 'v':
325 version = version[1:len(version)]
326 return version
327
328 """
329 Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
330 purpose. PE is cleared in comparison as it's not for build, and PV is cleared too
331 for simplicity as it's somehow difficult to get from various upstream format
332 """
333 def __vercmp(old, new):
334 (on, ov, ot) = old
335 (en, ev, et) = new
336 if on != en or (et and et not in suffixtuple):
337 return False
338 ov = modelate_version(ov)
339 ev = modelate_version(ev)
340
341 result = bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))
342 if result < 0:
343 return True
344 else:
345 return False
346
347 """
348 wrapper for fetch upstream directory info
349 'url' - upstream link customized by regular expression
350 'd' - database
351 'tmpf' - tmpfile for fetcher output
352 We don't want to exit whole build due to one recipe error. So handle all exceptions
353 gracefully w/o leaking to outer.
354 """
355 def internal_fetch_wget(url, ud, d, tmpf):
356 status = "ErrFetchUnknown"
357
358 agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
359 fetchcmd = "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"%s\" '%s'" % (tmpf.name, agent, url)
360 try:
361 fetcher = bb.fetch2.wget.Wget(d)
362 fetcher._runwget(ud, d, fetchcmd, True)
363 status = "SUCC"
364 except bb.fetch2.BBFetchException, e:
365 status = "ErrFetch"
366
367 return status
368
369 """
370 Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz",
371 'url' - upstream link customized by regular expression
372 'd' - database
373 'curver' - current version
374 Return new version if success, or else error in "Errxxxx" style
375 """
376 def check_new_dir(url, curver, ud, d):
377 pn = d.getVar('PN', True)
378 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
379 status = internal_fetch_wget(url, ud, d, f)
380 fhtml = f.read()
381 if status == "SUCC" and len(fhtml):
382 newver = parse_inter(curver)
383
384 """
385 match "*4.1/">*4.1/ where '*' matches chars
386 N.B. add package name, only match for digits
387 """
388 regex = d.getVar('REGEX', True)
389 if regex == '':
390 regex = "^%s" %prefix
391 m = re.search("^%s" % regex, curver)
392 if m:
393 s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group()
394 else:
395 s = "(\d+[\.\-_])+\d+/?"
396
397 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
398
399 reg = re.compile(searchstr)
400 valid = 0
401 for line in fhtml.split("\n"):
402 if line.find(curver) >= 0:
403 valid = 1
404 m = reg.search(line)
405 if m:
406 ver = m.group().split("\"")[1]
407 ver = ver.strip("/")
408 ver = parse_inter(ver)
409 if ver and __vercmp(newver, ver) == True:
410 newver = ver
411
412 """Expect a match for curver in directory list, or else it indicates unknown format"""
413 if not valid:
414 status = "ErrParseInterDir"
415 else:
416 """rejoin the path name"""
417 status = newver[0] + newver[1]
418 elif not len(fhtml):
419 status = "ErrHostNoDir"
420
421 f.close()
422 if status != "ErrHostNoDir" and re.match("Err", status):
423 logpath = d.getVar('LOG_DIR', True)
424 subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
425 os.unlink(f.name)
426 return status
427
428 """
429 Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz",
430 'url' - upstream link customized by regular expression
431 'd' - database
432 'curname' - current package name
433 Return new version if success, or else error in "Errxxxx" style
434 """
435 def check_new_version(url, curname, ud, d):
436 """possible to have no version in pkg name, such as spectrum-fw"""
437 if not re.search("\d+", curname):
438 return pcurver
439 pn = d.getVar('PN', True)
440 newver_regex = d.getVar('REGEX', True)
441 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
442 status = internal_fetch_wget(url, ud, d, f)
443 fhtml = f.read()
444
445 if status == "SUCC" and len(fhtml):
446 newver = parse_dir(curname)
447
448 if not newver_regex:
449 """this is the default matching pattern, if recipe does not """
450 """provide a regex expression """
451 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
452 pn1 = re.search("^%s" % prefix, curname).group()
453 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
454 searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s
455 reg = searchstr
456 else:
457 reg = newver_regex
458 valid = 0
459 count = 0
460 for line in fhtml.split("\n"):
461 if pn == 'kconfig-frontends':
462 m = re.findall(reg, line)
463 if m:
464 valid = 1
465 for match in m:
466 (on, ov, oe) = newver
467 ver = (on, match[0], oe)
468 if ver and __vercmp(newver, ver) == True:
469 newver = ver
470 continue
471 count += 1
472 m = re.search(reg, line)
473 if m:
474 valid = 1
475 if not newver_regex:
476 ver = m.group().split("\"")[1].split("/")[-1]
477 if ver == "download":
478 ver = m.group().split("\"")[1].split("/")[-2]
479 ver = parse_dir(ver)
480 else:
481 """ we cheat a little here, but we assume that the
482 regular expression in the recipe will extract exacly
483 the version """
484 (on, ov, oe) = newver
485 ver = (on, m.group('pver'), oe)
486 if ver and __vercmp(newver, ver) == True:
487 newver = ver
488 """Expect a match for curver in directory list, or else it indicates unknown format"""
489 if not valid:
490 status = "ErrParseDir"
491 else:
492 """newver still contains a full package name string"""
493 status = re.sub('_', '.', newver[1])
494 elif not len(fhtml):
495 status = "ErrHostNoDir"
496
497 f.close()
498 """if host hasn't directory information, no need to save tmp file"""
499 if status != "ErrHostNoDir" and re.match("Err", status):
500 logpath = d.getVar('LOG_DIR', True)
501 subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
502 os.unlink(f.name)
503 return status
504
505 """first check whether a uri is provided""" 271 """first check whether a uri is provided"""
506 src_uri = d.getVar('SRC_URI', True) 272 src_uri = d.getVar('SRC_URI', True)
507 if not src_uri: 273 if not src_uri:
@@ -543,9 +309,6 @@ python do_checkpkg() {
543 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 309 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
544 bb.data.update_data(localdata) 310 bb.data.update_data(localdata)
545 311
546 chk_uri = d.getVar('REGEX_URI', True)
547 if not chk_uri:
548 chk_uri = src_uri
549 pdesc = localdata.getVar('DESCRIPTION', True) 312 pdesc = localdata.getVar('DESCRIPTION', True)
550 pgrp = localdata.getVar('SECTION', True) 313 pgrp = localdata.getVar('SECTION', True)
551 if localdata.getVar('PRSPV', True): 314 if localdata.getVar('PRSPV', True):
@@ -562,232 +325,63 @@ python do_checkpkg() {
562 psrcuri = localdata.getVar('SRC_URI', True) 325 psrcuri = localdata.getVar('SRC_URI', True)
563 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 326 maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
564 327
328 """ Get upstream version version """
329 pupver = None
330 pstatus = "ErrUnknown"
565 found = 0 331 found = 0
332
566 for uri in src_uri.split(): 333 for uri in src_uri.split():
567 m = re.compile('(?P<type>[^:]*)').match(uri) 334 m = re.compile('(?P<type>[^:]*)').match(uri)
568 if not m: 335 if not m:
569 raise MalformedUrl(uri) 336 raise MalformedUrl(uri)
570 elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'): 337 elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
571 found = 1 338 found = 1
572 pproto = m.group('type') 339 psrcuri = uri
573 break 340 pproto = m.group('type')
341 break
574 if not found: 342 if not found:
575 pproto = "file" 343 pproto = "file"
576 pupver = "N/A"
577 pstatus = "ErrUnknown"
578
579 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(uri)
580 if type in ['http', 'https', 'ftp']:
581 if d.getVar('PRSPV', True):
582 pcurver = d.getVar('PRSPV', True)
583 else:
584 pcurver = d.getVar('PV', True)
585 else:
586 if d.getVar('PRSPV', True):
587 pcurver = d.getVar('PRSPV', True)
588 else:
589 pcurver = d.getVar("SRCREV", True)
590
591
592 if type in ['http', 'https', 'ftp']:
593 ud = bb.fetch2.FetchData(uri, d)
594 newver = pcurver
595 altpath = path
596 dirver = "-"
597 curname = "-"
598
599 """
600 match version number amid the path, such as "5.7" in:
601 http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
602 N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P
603 """
604 m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path)
605 if m:
606 altpath = path.split(m.group())[0]
607 dirver = m.group().strip("/")
608
609 """use new path and remove param. for wget only param is md5sum"""
610 alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}])
611 my_uri = d.getVar('REGEX_URI', True)
612 if my_uri:
613 if d.getVar('PRSPV', True):
614 newver = d.getVar('PRSPV', True)
615 else:
616 newver = d.getVar('PV', True)
617 else:
618 newver = check_new_dir(alturi, dirver, ud, d)
619 altpath = path
620 if not re.match("Err", newver) and dirver != newver:
621 altpath = altpath.replace(dirver, newver, True)
622 # For folder in folder cases - try to enter the folder again and then try parsing
623 """Now try to acquire all remote files in current directory"""
624 if not re.match("Err", newver):
625 curname = altpath.split("/")[-1]
626
627 """get remote name by skipping pacakge name"""
628 m = re.search(r"/.*/", altpath)
629 if not m:
630 altpath = "/"
631 else:
632 altpath = m.group()
633
634 chk_uri = d.getVar('REGEX_URI', True)
635 if not chk_uri:
636 alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}])
637 else:
638 alturi = chk_uri
639 newver = check_new_version(alturi, curname, ud, d)
640 while(newver == "ErrHostNoDir"):
641 if alturi == "/download":
642 break
643 else:
644 alturi = "/".join(alturi.split("/")[0:-2]) + "/download"
645 newver = check_new_version(alturi, curname, ud, d)
646 if not re.match("Err", newver):
647 pupver = newver
648 if pupver != pcurver:
649 pstatus = "UPDATE"
650 else:
651 pstatus = "MATCH"
652
653 if re.match("Err", newver):
654 pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname
655 elif type == 'git':
656 if user:
657 gituser = user + '@'
658 else:
659 gituser = ""
660
661 if 'protocol' in parm:
662 gitproto = parm['protocol']
663 else:
664 gitproto = "git"
665 344
666 # Get all tags and HEAD 345 if pproto in ['http', 'https', 'ftp', 'git']:
667 if d.getVar('GIT_REGEX', True): 346 try:
668 gitcmd = "git ls-remote %s://%s%s%s %s 2>&1" % (gitproto, gituser, host, path, d.getVar('GIT_REGEX', True)) 347 ud = bb.fetch2.FetchData(psrcuri, d)
669 else: 348 pupver = ud.method.latest_versionstring(ud, d)
670 gitcmd = "git ls-remote %s://%s%s%s *tag* 2>&1" % (gitproto, gituser, host, path) 349 if pproto == 'git':
671 gitcmd2 = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path) 350 if pupver == "":
672
673 tmp = os.popen(gitcmd).read()
674 if 'unable to connect' in tmp:
675 tmp = None
676 tmp2 = os.popen(gitcmd2).read()
677 if 'unable to connect' in tmp2:
678 tmp2 = None
679 #This is for those repos have tag like: refs/tags/1.2.2
680 phash = pversion.rsplit("+")[-1]
681 if tmp:
682 tmpline = tmp.split("\n")
683 verflag = 0
684 pupver = pversion
685 for line in tmpline:
686 if len(line)==0:
687 break;
688 puptag = line.split("/")[-1]
689 upstr_regex = d.getVar('REGEX', True)
690 if upstr_regex:
691 puptag = re.search(upstr_regex, puptag)
692 else:
693 puptag = re.search("(?P<pver>([0-9][\.|_]?)+)", puptag)
694 if puptag == None:
695 continue
696 puptag = puptag.group('pver')
697 puptag = re.sub("_",".",puptag)
698 plocaltag = pupver.split("+git")[0]
699 if "git" in plocaltag:
700 plocaltag = plocaltag.split("-")[0]
701 result = bb.utils.vercmp(("0", puptag, ""), ("0", plocaltag, ""))
702
703 if result > 0:
704 verflag = 1
705 pupver = puptag
706 elif verflag == 0 :
707 pupver = plocaltag
708 #This is for those no tag repo
709 elif tmp2:
710 pupver = pversion.rsplit("+")[0] 351 pupver = pversion.rsplit("+")[0]
711 phash = pupver 352 if re.search(pversion, "gitrAUTOINC"):
712 else: 353 pupver += "+gitrAUTOINC+"
713 pstatus = "ErrGitAccess" 354 else:
714 if not ('ErrGitAccess' in pstatus): 355 pupver += "+gitAUTOINC+"
715 356 latest_revision = ud.method.latest_revision(ud, d, ud.names[0])
716 latest_head = tmp2.rsplit("\t")[0][:7] 357 pupver += latest_revision[:10]
717 tmp3 = re.search('(?P<git_ver>(\d+[\.-]?)+)(?P<git_prefix>(\+git[r|\-|]?)AUTOINC\+)(?P<head_md5>([\w|_]+))', pversion) 358 except Exception as inst:
718 tmp4 = re.search('(?P<git_ver>(\d+[\.-]?)+)(?P<git_prefix>(\+git[r|\-|]?)AUTOINC\+)(?P<head_md5>([\w|_]+))', pupver) 359 bb.warn("%s: unexpected error: %s" % (pname, repr(inst)))
719 if not tmp4: 360 pstatus = "ErrAccess"
720 tmp4 = re.search('(?P<git_ver>(\d+[\.-]?)+)', pupver) 361 elif pproto == "file":
721 362 """Local files are always updated"""
722 if tmp3: 363 pupver = pversion
723 # Get status of the package - MATCH/UPDATE
724 result = bb.utils.vercmp(("0", tmp3.group('git_ver'), ""), ("0",tmp3.group('git_ver') , ""))
725 # Get the latest tag
726 pstatus = 'MATCH'
727 if result < 0:
728 latest_pv = tmp3.group('git_ver')
729 else:
730 latest_pv = pupver
731 if not(tmp3.group('head_md5')[:7] in latest_head) or not(latest_head in tmp3.group('head_md5')[:7]):
732 pstatus = 'UPDATE'
733
734 git_prefix = tmp3.group('git_prefix')
735 pupver = latest_pv + tmp3.group('git_prefix') + latest_head
736 else:
737 if not tmp3:
738 bb.plain("#DEBUG# Package %s: current version (%s) doesn't match the usual pattern" %(pname, pversion))
739 elif type == 'svn':
740 ud = bb.fetch2.FetchData(uri, d)
741
742 svnFetcher = bb.fetch2.svn.Svn(d)
743 svnFetcher.urldata_init(ud, d)
744 try:
745 pupver = svnFetcher.latest_revision(ud, d, ud.names[0])
746 except bb.fetch2.FetchError:
747 pstatus = "ErrSvnAccess"
748
749 if pupver:
750 if pupver in pversion:
751 pstatus = "MATCH"
752 else:
753 pstatus = "UPDATE"
754 else:
755 pstatus = "ErrSvnAccess"
756
757 if 'rev' in ud.parm:
758 pcurver = ud.parm['rev']
759
760 if pstatus != "ErrSvnAccess":
761 tag = pversion.rsplit("+svn")[0]
762 svn_prefix = re.search('(\+svn[r|\-]?)', pversion)
763 if tag and svn_prefix:
764 pupver = tag + svn_prefix.group() + pupver
765
766 elif type == 'cvs':
767 pupver = "HEAD"
768 pstatus = "UPDATE"
769 elif type == 'file':
770 """local file is always up-to-date"""
771 pupver = pcurver
772 pstatus = "MATCH"
773 else: 364 else:
774 pstatus = "ErrUnsupportedProto" 365 pstatus = "ErrUnsupportedProto"
366 bb.note("do_checkpkg, protocol %s isn't implemented" % pproto)
775 367
776 if re.match("Err", pstatus): 368 if not pupver:
777 pstatus += ":%s%s" % (host, path) 369 pupver = "N/A"
370 elif pupver == pversion:
371 pstatus = "MATCH"
372 else:
373 pstatus = "UPDATE"
778 374
779 """Read from manual distro tracking fields as alternative""" 375 """Read from manual distro tracking fields as alternative"""
780 pmver = d.getVar("RECIPE_UPSTREAM_VERSION", True) 376 pmver = d.getVar("RECIPE_UPSTREAM_VERSION", True)
781 if not pmver: 377 if not pmver:
782 pmver = "N/A" 378 pmver = "N/A"
783 pmstatus = "ErrNoRecipeData" 379 pmstatus = "ErrNoRecipeData"
380 elif pmver == pupver:
381 pmstatus = "MATCH"
784 else: 382 else:
785 if pmver == pcurver: 383 pmstatus = "UPDATE"
786 pmstatus = "MATCH"
787 else:
788 pmstatus = "UPDATE"
789 384
790 psrcuri = psrcuri.split()[0]
791 pdepends = "".join(pdepends.split("\t")) 385 pdepends = "".join(pdepends.split("\t"))
792 pdesc = "".join(pdesc.split("\t")) 386 pdesc = "".join(pdesc.split("\t"))
793 no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True) 387 no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True)