diff options
| -rw-r--r-- | meta/classes/distrodata.bbclass | 179 |
1 files changed, 127 insertions, 52 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index ffc1b829e7..e25eeab367 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | include conf/distro/include/upstream_tracking.inc | 1 | include conf/distro/include/package_regex.inc |
| 2 | addhandler distro_eventhandler | 2 | addhandler distro_eventhandler |
| 3 | python distro_eventhandler() { | 3 | python distro_eventhandler() { |
| 4 | 4 | ||
| @@ -198,6 +198,48 @@ do_distrodataall() { | |||
| 198 | 198 | ||
| 199 | addhandler checkpkg_eventhandler | 199 | addhandler checkpkg_eventhandler |
| 200 | python checkpkg_eventhandler() { | 200 | python checkpkg_eventhandler() { |
| 201 | def parse_csv_file(filename): | ||
| 202 | package_dict = {} | ||
| 203 | fd = open(filename, "r") | ||
| 204 | lines = fd.read().rsplit("\n") | ||
| 205 | fd.close() | ||
| 206 | |||
| 207 | first_line = '' | ||
| 208 | index = 0 | ||
| 209 | for line in lines: | ||
| 210 | #Skip the first line | ||
| 211 | if index == 0: | ||
| 212 | first_line = line | ||
| 213 | index += 1 | ||
| 214 | continue | ||
| 215 | elif line == '': | ||
| 216 | continue | ||
| 217 | index += 1 | ||
| 218 | package_name = line.rsplit("\t")[0] | ||
| 219 | if '-native' in package_name or 'nativesdk-' in package_name: | ||
| 220 | original_name = package_name.rsplit('-native')[0] | ||
| 221 | if original_name == '': | ||
| 222 | original_name = package_name.rsplit('nativesdk-')[0] | ||
| 223 | if original_name in package_dict: | ||
| 224 | continue | ||
| 225 | else: | ||
| 226 | package_dict[package_name] = line | ||
| 227 | else: | ||
| 228 | new_name = package_name + "-native" | ||
| 229 | if not(new_name in package_dict): | ||
| 230 | new_name = 'nativesdk-' + package_name | ||
| 231 | if new_name in package_dict: | ||
| 232 | del package_dict[new_name] | ||
| 233 | package_dict[package_name] = line | ||
| 234 | |||
| 235 | fd = open(filename, "w") | ||
| 236 | fd.write("%s\n"%first_line) | ||
| 237 | for el in package_dict: | ||
| 238 | fd.write(package_dict[el] + "\n") | ||
| 239 | fd.close() | ||
| 240 | |||
| 241 | del package_dict | ||
| 242 | |||
| 201 | if bb.event.getName(e) == "BuildStarted": | 243 | if bb.event.getName(e) == "BuildStarted": |
| 202 | import oe.distro_check as dc | 244 | import oe.distro_check as dc |
| 203 | logfile = dc.create_log_file(e.data, "checkpkg.csv") | 245 | logfile = dc.create_log_file(e.data, "checkpkg.csv") |
| @@ -207,6 +249,13 @@ python checkpkg_eventhandler() { | |||
| 207 | f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n") | 249 | f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n") |
| 208 | f.close() | 250 | f.close() |
| 209 | bb.utils.unlockfile(lf) | 251 | bb.utils.unlockfile(lf) |
| 252 | elif bb.event.getName(e) == "BuildCompleted": | ||
| 253 | import os | ||
| 254 | filename = "tmp/log/checkpkg.csv" | ||
| 255 | if os.path.isfile(filename): | ||
| 256 | lf = bb.utils.lockfile("%s.lock"%filename) | ||
| 257 | parse_csv_file(filename) | ||
| 258 | bb.utils.unlockfile(lf) | ||
| 210 | return | 259 | return |
| 211 | } | 260 | } |
| 212 | 261 | ||
| @@ -214,7 +263,6 @@ addtask checkpkg | |||
| 214 | do_checkpkg[nostamp] = "1" | 263 | do_checkpkg[nostamp] = "1" |
| 215 | python do_checkpkg() { | 264 | python do_checkpkg() { |
| 216 | localdata = bb.data.createCopy(d) | 265 | localdata = bb.data.createCopy(d) |
| 217 | import sys | ||
| 218 | import re | 266 | import re |
| 219 | import tempfile | 267 | import tempfile |
| 220 | import subprocess | 268 | import subprocess |
| @@ -233,16 +281,18 @@ python do_checkpkg() { | |||
| 233 | genext2fs_1.3.orig.tar.gz | 281 | genext2fs_1.3.orig.tar.gz |
| 234 | gst-fluendo-mp3 | 282 | gst-fluendo-mp3 |
| 235 | """ | 283 | """ |
| 236 | prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]" # match most patterns which uses "-" as separator to version digits | 284 | prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*\+?[\-_]" # match most patterns which uses "-" as separator to version digits |
| 237 | prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz | 285 | prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz |
| 238 | prefix3 = "[0-9a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz | 286 | prefix3 = "[0-9]+[\-]?[a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz |
| 239 | prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3) | 287 | prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3) |
| 240 | #ver_regex = "((\d+[\.\-_]*[a-z]*)+)"#"((\d+[\.\-_[a-z]])+)" | 288 | ver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"#"((\d+[\.\-_[a-z]])+)" |
| 241 | suffix = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz)" | 289 | # src.rpm extension was added only for rpm package. Can be removed if the rpm |
| 290 | # packaged will always be considered as having to be manually upgraded | ||
| 291 | suffix = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)" | ||
| 242 | 292 | ||
| 243 | suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "src.rpm", "bz2", "orig.tar.gz") | 293 | suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "bz2", "orig.tar.gz", "src.tar.gz", "src.rpm", "src.tgz", "svnr\d+.tar.bz2", "stable.tar.gz", "src.rpm") |
| 244 | sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix | 294 | sinterstr = "(?P<name>%s?)v?(?P<ver>%s)(source)?" % (prefix, ver_regex) |
| 245 | sdirstr = "(?P<name>%s)\.?(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix) | 295 | sdirstr = "(?P<name>%s)\.?v?(?P<ver>%s)(source)?[\.\-](?P<type>%s$)" % (prefix, ver_regex, suffix) |
| 246 | 296 | ||
| 247 | def parse_inter(s): | 297 | def parse_inter(s): |
| 248 | m = re.search(sinterstr, s) | 298 | m = re.search(sinterstr, s) |
| @@ -258,6 +308,22 @@ python do_checkpkg() { | |||
| 258 | else: | 308 | else: |
| 259 | return (m.group('name'), m.group('ver'), m.group('type')) | 309 | return (m.group('name'), m.group('ver'), m.group('type')) |
| 260 | 310 | ||
| 311 | def modelate_version(version): | ||
| 312 | if version[0] in ['.', '-']: | ||
| 313 | if version[1].isdigit(): | ||
| 314 | version = version[1] + version[0] + version[2:len(version)] | ||
| 315 | else: | ||
| 316 | version = version[1:len(version)] | ||
| 317 | |||
| 318 | version = re.sub('\-', '.', version) | ||
| 319 | version = re.sub('_', '.', version) | ||
| 320 | version = re.sub('(rc)+', '.-1.', version) | ||
| 321 | version = re.sub('(alpha)+', '.-3.', version) | ||
| 322 | version = re.sub('(beta)+', '.-2.', version) | ||
| 323 | if version[0] == 'v': | ||
| 324 | version = version[1:len(version)] | ||
| 325 | return version | ||
| 326 | |||
| 261 | """ | 327 | """ |
| 262 | Check whether 'new' is newer than 'old' version. We use existing vercmp() for the | 328 | Check whether 'new' is newer than 'old' version. We use existing vercmp() for the |
| 263 | purpose. PE is cleared in comparison as it's not for build, and PV is cleared too | 329 | purpose. PE is cleared in comparison as it's not for build, and PV is cleared too |
| @@ -267,18 +333,16 @@ python do_checkpkg() { | |||
| 267 | (on, ov, ot) = old | 333 | (on, ov, ot) = old |
| 268 | (en, ev, et) = new | 334 | (en, ev, et) = new |
| 269 | if on != en or (et and et not in suffixtuple): | 335 | if on != en or (et and et not in suffixtuple): |
| 270 | return 0 | ||
| 271 | ov = re.search("[\d|\.]+[^a-zA-Z]+", ov).group() | ||
| 272 | ev = re.search("[\d|\.]+[^a-zA-Z]+", ev).group() | ||
| 273 | return bb.utils.vercmp(("0", ov, ""), ("0", ev, "")) | ||
| 274 | |||
| 275 | def __vercmp2(old,new): | ||
| 276 | (on,ov,ot) = old | ||
| 277 | (en,ev,et) = new | ||
| 278 | #bb.plain("old n = %s" %(str(ov))) | ||
| 279 | if on != en or (et and et not in suffixtuple): | ||
| 280 | return False | 336 | return False |
| 281 | return ov < ev | 337 | ov = modelate_version(ov) |
| 338 | ev = modelate_version(ev) | ||
| 339 | |||
| 340 | result = bb.utils.vercmp(("0", ov, ""), ("0", ev, "")) | ||
| 341 | if result < 0: | ||
| 342 | return True | ||
| 343 | else: | ||
| 344 | return False | ||
| 345 | |||
| 282 | """ | 346 | """ |
| 283 | wrapper for fetch upstream directory info | 347 | wrapper for fetch upstream directory info |
| 284 | 'url' - upstream link customized by regular expression | 348 | 'url' - upstream link customized by regular expression |
| @@ -335,15 +399,18 @@ python do_checkpkg() { | |||
| 335 | match "*4.1/">*4.1/ where '*' matches chars | 399 | match "*4.1/">*4.1/ where '*' matches chars |
| 336 | N.B. add package name, only match for digits | 400 | N.B. add package name, only match for digits |
| 337 | """ | 401 | """ |
| 338 | m = re.search("^%s" % prefix, curver) | 402 | regex = d.getVar('REGEX', True) |
| 403 | if regex == '': | ||
| 404 | regex = "^%s" %prefix | ||
| 405 | m = re.search("^%s" % regex, curver) | ||
| 339 | if m: | 406 | if m: |
| 340 | s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group() | 407 | s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group() |
| 341 | else: | 408 | else: |
| 342 | s = "(\d+[\.\-_])+\d+/?" | 409 | s = "(\d+[\.\-_])+\d+/?" |
| 343 | 410 | ||
| 344 | searchstr = "[hH][rR][eE][fF]=\"%s\">" % s | 411 | searchstr = "[hH][rR][eE][fF]=\"%s\">" % s |
| 345 | reg = re.compile(searchstr) | ||
| 346 | 412 | ||
| 413 | reg = re.compile(searchstr) | ||
| 347 | valid = 0 | 414 | valid = 0 |
| 348 | for line in fhtml.split("\n"): | 415 | for line in fhtml.split("\n"): |
| 349 | if line.find(curver) >= 0: | 416 | if line.find(curver) >= 0: |
| @@ -353,7 +420,7 @@ python do_checkpkg() { | |||
| 353 | ver = m.group().split("\"")[1] | 420 | ver = m.group().split("\"")[1] |
| 354 | ver = ver.strip("/") | 421 | ver = ver.strip("/") |
| 355 | ver = parse_inter(ver) | 422 | ver = parse_inter(ver) |
| 356 | if ver and __vercmp(newver, ver) < 0: | 423 | if ver and __vercmp(newver, ver) == True: |
| 357 | newver = ver | 424 | newver = ver |
| 358 | 425 | ||
| 359 | """Expect a match for curver in directory list, or else it indicates unknown format""" | 426 | """Expect a match for curver in directory list, or else it indicates unknown format""" |
| @@ -384,7 +451,7 @@ python do_checkpkg() { | |||
| 384 | if not re.search("\d+", curname): | 451 | if not re.search("\d+", curname): |
| 385 | return pcurver | 452 | return pcurver |
| 386 | pn = d.getVar('PN', True) | 453 | pn = d.getVar('PN', True) |
| 387 | newver_regex = d.getVar('RECIPE_NEWVER_REGEX',True) | 454 | newver_regex = d.getVar('REGEX', True) |
| 388 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) | 455 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) |
| 389 | status = internal_fetch_wget(url, d, f) | 456 | status = internal_fetch_wget(url, d, f) |
| 390 | fhtml = f.read() | 457 | fhtml = f.read() |
| @@ -401,10 +468,20 @@ python do_checkpkg() { | |||
| 401 | searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s | 468 | searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s |
| 402 | reg = searchstr | 469 | reg = searchstr |
| 403 | else: | 470 | else: |
| 404 | reg = newver_regex | 471 | reg = newver_regex |
| 405 | valid = 0 | 472 | valid = 0 |
| 406 | count = 0 | 473 | count = 0 |
| 407 | for line in fhtml.split("\n"): | 474 | for line in fhtml.split("\n"): |
| 475 | if pn == 'kconfig-frontends': | ||
| 476 | m = re.findall(reg, line) | ||
| 477 | if m: | ||
| 478 | valid = 1 | ||
| 479 | for match in m: | ||
| 480 | (on, ov, oe) = newver | ||
| 481 | ver = (on, match[0], oe) | ||
| 482 | if ver and __vercmp(newver, ver) == True: | ||
| 483 | newver = ver | ||
| 484 | continue | ||
| 408 | count += 1 | 485 | count += 1 |
| 409 | m = re.search(reg, line) | 486 | m = re.search(reg, line) |
| 410 | if m: | 487 | if m: |
| @@ -419,28 +496,15 @@ python do_checkpkg() { | |||
| 419 | regular expression in the recipe will extract exacly | 496 | regular expression in the recipe will extract exacly |
| 420 | the version """ | 497 | the version """ |
| 421 | (on, ov, oe) = newver | 498 | (on, ov, oe) = newver |
| 422 | #HARDCODED MESS | 499 | ver = (on, m.group('pver'), oe) |
| 423 | if pn == 'remake': | 500 | if ver and __vercmp(newver, ver) == True: |
| 424 | ver = (on, m.group(1)+m.group(3), oe) | 501 | newver = ver |
| 425 | else: | ||
| 426 | ver = (on, m.group(1), oe) | ||
| 427 | pkg_problem = ['jpeg','dhcp','remake','blktool','apmd','nativesdk-openssl','valgrind','net-tools'] | ||
| 428 | if pn in pkg_problem: | ||
| 429 | if ver and __vercmp2(newver,ver) == True: | ||
| 430 | newver = ver | ||
| 431 | else: | ||
| 432 | if ver and __vercmp(newver, ver) < 0: | ||
| 433 | newver = ver | ||
| 434 | """Expect a match for curver in directory list, or else it indicates unknown format""" | 502 | """Expect a match for curver in directory list, or else it indicates unknown format""" |
| 435 | if not valid: | 503 | if not valid: |
| 436 | status = "ErrParseDir" | 504 | status = "ErrParseDir" |
| 437 | else: | 505 | else: |
| 438 | """newver still contains a full package name string""" | 506 | """newver still contains a full package name string""" |
| 439 | status = re.search("(\d+[\.\-_])*(\d+[0-9a-zA-Z]*)", newver[1]).group() | 507 | status = newver[1] |
| 440 | if "_" in status: | ||
| 441 | status = re.sub("_",".",status) | ||
| 442 | elif "-" in status: | ||
| 443 | status = re.sub("-",".",status) | ||
| 444 | elif not len(fhtml): | 508 | elif not len(fhtml): |
| 445 | status = "ErrHostNoDir" | 509 | status = "ErrHostNoDir" |
| 446 | 510 | ||
| @@ -466,11 +530,21 @@ python do_checkpkg() { | |||
| 466 | pname = d.getVar('PN', True) | 530 | pname = d.getVar('PN', True) |
| 467 | 531 | ||
| 468 | if pname.find("-native") != -1: | 532 | if pname.find("-native") != -1: |
| 533 | if d.getVar('BBCLASSEXTEND', True): | ||
| 534 | return | ||
| 469 | pnstripped = pname.split("-native") | 535 | pnstripped = pname.split("-native") |
| 470 | bb.note("Native Split: %s" % pnstripped) | 536 | bb.note("Native Split: %s" % pnstripped) |
| 471 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 537 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
| 472 | bb.data.update_data(localdata) | 538 | bb.data.update_data(localdata) |
| 473 | 539 | ||
| 540 | if pname.startswith("nativesdk-"): | ||
| 541 | if d.getVar('BBCLASSEXTEND', True): | ||
| 542 | return | ||
| 543 | pnstripped = pname.replace("nativesdk-", "") | ||
| 544 | bb.note("NativeSDK Split: %s" % pnstripped) | ||
| 545 | localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) | ||
| 546 | bb.data.update_data(localdata) | ||
| 547 | |||
| 474 | if pname.find("-cross") != -1: | 548 | if pname.find("-cross") != -1: |
| 475 | pnstripped = pname.split("-cross") | 549 | pnstripped = pname.split("-cross") |
| 476 | bb.note("cross Split: %s" % pnstripped) | 550 | bb.note("cross Split: %s" % pnstripped) |
| @@ -483,7 +557,7 @@ python do_checkpkg() { | |||
| 483 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 557 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
| 484 | bb.data.update_data(localdata) | 558 | bb.data.update_data(localdata) |
| 485 | 559 | ||
| 486 | chk_uri = d.getVar('RECIPE_NEWVER_URI',True) | 560 | chk_uri = d.getVar('REGEX_URI', True) |
| 487 | if not chk_uri: | 561 | if not chk_uri: |
| 488 | chk_uri = src_uri | 562 | chk_uri = src_uri |
| 489 | pdesc = localdata.getVar('DESCRIPTION', True) | 563 | pdesc = localdata.getVar('DESCRIPTION', True) |
| @@ -537,14 +611,14 @@ python do_checkpkg() { | |||
| 537 | 611 | ||
| 538 | """use new path and remove param. for wget only param is md5sum""" | 612 | """use new path and remove param. for wget only param is md5sum""" |
| 539 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) | 613 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) |
| 540 | my_uri = d.getVar('RECIPE_NEWVER_URI',True) | 614 | my_uri = d.getVar('REGEX_URI', True) |
| 541 | if my_uri: | 615 | if my_uri: |
| 542 | newver = d.getVar('PV', True) | 616 | newver = d.getVar('PV', True) |
| 543 | else: | 617 | else: |
| 544 | newver = check_new_dir(alturi, dirver, d) | 618 | newver = check_new_dir(alturi, dirver, d) |
| 545 | altpath = path | 619 | altpath = path |
| 546 | if not re.match("Err", newver) and dirver != newver: | 620 | if not re.match("Err", newver) and dirver != newver: |
| 547 | altpath = altpath.replace(dirver, newver, True) | 621 | altpath = altpath.replace(dirver, newver, True) |
| 548 | # For folder in folder cases - try to enter the folder again and then try parsing | 622 | # For folder in folder cases - try to enter the folder again and then try parsing |
| 549 | """Now try to acquire all remote files in current directory""" | 623 | """Now try to acquire all remote files in current directory""" |
| 550 | if not re.match("Err", newver): | 624 | if not re.match("Err", newver): |
| @@ -556,7 +630,8 @@ python do_checkpkg() { | |||
| 556 | altpath = "/" | 630 | altpath = "/" |
| 557 | else: | 631 | else: |
| 558 | altpath = m.group() | 632 | altpath = m.group() |
| 559 | chk_uri = d.getVar('RECIPE_NEWVER_URI',True) | 633 | |
| 634 | chk_uri = d.getVar('REGEX_URI', True) | ||
| 560 | if not chk_uri: | 635 | if not chk_uri: |
| 561 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) | 636 | alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) |
| 562 | else: | 637 | else: |
| @@ -568,12 +643,12 @@ python do_checkpkg() { | |||
| 568 | else: | 643 | else: |
| 569 | alturi = "/".join(alturi.split("/")[0:-2]) + "/download" | 644 | alturi = "/".join(alturi.split("/")[0:-2]) + "/download" |
| 570 | newver = check_new_version(alturi, curname, d) | 645 | newver = check_new_version(alturi, curname, d) |
| 571 | if not re.match("Err", newver): | 646 | if not re.match("Err", newver): |
| 572 | pupver = newver | 647 | pupver = newver |
| 573 | if pupver != pcurver: | 648 | if pupver != pcurver: |
| 574 | pstatus = "UPDATE" | 649 | pstatus = "UPDATE" |
| 575 | else: | 650 | else: |
| 576 | pstatus = "MATCH" | 651 | pstatus = "MATCH" |
| 577 | 652 | ||
| 578 | if re.match("Err", newver): | 653 | if re.match("Err", newver): |
| 579 | pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname | 654 | pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname |
