summaryrefslogtreecommitdiffstats
path: root/meta/classes/distrodata.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/distrodata.bbclass')
-rw-r--r--meta/classes/distrodata.bbclass88
1 files changed, 63 insertions, 25 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index 69bcc35d44..ffc1b829e7 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -1,4 +1,4 @@
1 1include conf/distro/include/upstream_tracking.inc
2addhandler distro_eventhandler 2addhandler distro_eventhandler
3python distro_eventhandler() { 3python distro_eventhandler() {
4 4
@@ -237,11 +237,12 @@ python do_checkpkg() {
237 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz 237 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz
238 prefix3 = "[0-9a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz 238 prefix3 = "[0-9a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz
239 prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3) 239 prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3)
240 suffix = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm)" 240 #ver_regex = "((\d+[\.\-_]*[a-z]*)+)"#"((\d+[\.\-_[a-z]])+)"
241 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "src.rpm") 241 suffix = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz)"
242 242
243 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "src.rpm", "bz2", "orig.tar.gz")
243 sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix 244 sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix
244 sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix) 245 sdirstr = "(?P<name>%s)\.?(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix)
245 246
246 def parse_inter(s): 247 def parse_inter(s):
247 m = re.search(sinterstr, s) 248 m = re.search(sinterstr, s)
@@ -271,6 +272,13 @@ python do_checkpkg() {
271 ev = re.search("[\d|\.]+[^a-zA-Z]+", ev).group() 272 ev = re.search("[\d|\.]+[^a-zA-Z]+", ev).group()
272 return bb.utils.vercmp(("0", ov, ""), ("0", ev, "")) 273 return bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))
273 274
275 def __vercmp2(old,new):
276 (on,ov,ot) = old
277 (en,ev,et) = new
278 #bb.plain("old n = %s" %(str(ov)))
279 if on != en or (et and et not in suffixtuple):
280 return False
281 return ov < ev
274 """ 282 """
275 wrapper for fetch upstream directory info 283 wrapper for fetch upstream directory info
276 'url' - upstream link customized by regular expression 284 'url' - upstream link customized by regular expression
@@ -376,6 +384,7 @@ python do_checkpkg() {
376 if not re.search("\d+", curname): 384 if not re.search("\d+", curname):
377 return pcurver 385 return pcurver
378 pn = d.getVar('PN', True) 386 pn = d.getVar('PN', True)
387 newver_regex = d.getVar('RECIPE_NEWVER_REGEX',True)
379 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) 388 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
380 status = internal_fetch_wget(url, d, f) 389 status = internal_fetch_wget(url, d, f)
381 fhtml = f.read() 390 fhtml = f.read()
@@ -383,25 +392,45 @@ python do_checkpkg() {
383 if status == "SUCC" and len(fhtml): 392 if status == "SUCC" and len(fhtml):
384 newver = parse_dir(curname) 393 newver = parse_dir(curname)
385 394
386 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """ 395 if not newver_regex:
387 pn1 = re.search("^%s" % prefix, curname).group() 396 """this is the default matching pattern, if recipe does not """
388 397 """provide a regex expression """
389 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1 398 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
390 searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s 399 pn1 = re.search("^%s" % prefix, curname).group()
391 reg = re.compile(searchstr) 400 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
392 401 searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s
402 reg = searchstr
403 else:
404 reg = newver_regex
393 valid = 0 405 valid = 0
406 count = 0
394 for line in fhtml.split("\n"): 407 for line in fhtml.split("\n"):
395 m = reg.search(line) 408 count += 1
409 m = re.search(reg, line)
396 if m: 410 if m:
397 valid = 1 411 valid = 1
398 ver = m.group().split("\"")[1].split("/")[-1] 412 if not newver_regex:
399 if ver == "download": 413 ver = m.group().split("\"")[1].split("/")[-1]
400 ver = m.group().split("\"")[1].split("/")[-2] 414 if ver == "download":
401 ver = parse_dir(ver) 415 ver = m.group().split("\"")[1].split("/")[-2]
402 if ver and __vercmp(newver, ver) < 0: 416 ver = parse_dir(ver)
403 newver = ver 417 else:
404 418 """ we cheat a little here, but we assume that the
419 regular expression in the recipe will extract exacly
420 the version """
421 (on, ov, oe) = newver
422 #HARDCODED MESS
423 if pn == 'remake':
424 ver = (on, m.group(1)+m.group(3), oe)
425 else:
426 ver = (on, m.group(1), oe)
427 pkg_problem = ['jpeg','dhcp','remake','blktool','apmd','nativesdk-openssl','valgrind','net-tools']
428 if pn in pkg_problem:
429 if ver and __vercmp2(newver,ver) == True:
430 newver = ver
431 else:
432 if ver and __vercmp(newver, ver) < 0:
433 newver = ver
405 """Expect a match for curver in directory list, or else it indicates unknown format""" 434 """Expect a match for curver in directory list, or else it indicates unknown format"""
406 if not valid: 435 if not valid:
407 status = "ErrParseDir" 436 status = "ErrParseDir"
@@ -454,6 +483,9 @@ python do_checkpkg() {
454 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 483 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
455 bb.data.update_data(localdata) 484 bb.data.update_data(localdata)
456 485
486 chk_uri = d.getVar('RECIPE_NEWVER_URI',True)
487 if not chk_uri:
488 chk_uri = src_uri
457 pdesc = localdata.getVar('DESCRIPTION', True) 489 pdesc = localdata.getVar('DESCRIPTION', True)
458 pgrp = localdata.getVar('SECTION', True) 490 pgrp = localdata.getVar('SECTION', True)
459 pversion = localdata.getVar('PV', True) 491 pversion = localdata.getVar('PV', True)
@@ -505,12 +537,15 @@ python do_checkpkg() {
505 537
506 """use new path and remove param. for wget only param is md5sum""" 538 """use new path and remove param. for wget only param is md5sum"""
507 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) 539 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
508 540 my_uri = d.getVar('RECIPE_NEWVER_URI',True)
509 newver = check_new_dir(alturi, dirver, d) 541 if my_uri:
542 newver = d.getVar('PV', True)
543 else:
544 newver = check_new_dir(alturi, dirver, d)
510 altpath = path 545 altpath = path
511 if not re.match("Err", newver) and dirver != newver: 546 if not re.match("Err", newver) and dirver != newver:
512 altpath = altpath.replace(dirver, newver, True) 547 altpath = altpath.replace(dirver, newver, True)
513 548 # For folder in folder cases - try to enter the folder again and then try parsing
514 """Now try to acquire all remote files in current directory""" 549 """Now try to acquire all remote files in current directory"""
515 if not re.match("Err", newver): 550 if not re.match("Err", newver):
516 curname = altpath.split("/")[-1] 551 curname = altpath.split("/")[-1]
@@ -521,8 +556,11 @@ python do_checkpkg() {
521 altpath = "/" 556 altpath = "/"
522 else: 557 else:
523 altpath = m.group() 558 altpath = m.group()
524 559 chk_uri = d.getVar('RECIPE_NEWVER_URI',True)
525 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) 560 if not chk_uri:
561 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
562 else:
563 alturi = chk_uri
526 newver = check_new_version(alturi, curname, d) 564 newver = check_new_version(alturi, curname, d)
527 while(newver == "ErrHostNoDir"): 565 while(newver == "ErrHostNoDir"):
528 if alturi == "/download": 566 if alturi == "/download":