diff options
author | Irina Patru <irina.patru@intel.com> | 2014-04-28 14:08:46 +0300 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2014-04-29 17:20:12 +0100 |
commit | 59738ffd47c65e9c661016acdc0430f375f80087 (patch) | |
tree | 45d691f1cdd948ef91550f6e49f1fb43b7fcb02e /meta/classes/distrodata.bbclass | |
parent | c0df4f67a84c03b538fd3e8f46922a9af642d404 (diff) | |
download | poky-59738ffd47c65e9c661016acdc0430f375f80087.tar.gz |
distrodata.bbclass: Fix checkpkg functionality
Currently it wasn't working because *COMMAND variables were removed
from fetcher.
Now checkpkg sets the command internally and sends it as a parameter
to _runwget() function from wget fetch.
(From OE-Core rev: b9a51fc1901c378375cca041da27ddbd450c0412)
Signed-off-by: Irina Patru <irina.patru@intel.com>
Signed-off-by: Saul Wold <sgw@linux.intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/distrodata.bbclass')
-rw-r--r-- | meta/classes/distrodata.bbclass | 40 |
1 files changed, 13 insertions, 27 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index b47358b059..a890de7911 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -352,29 +352,14 @@ python do_checkpkg() { | |||
352 | We don't want to exit whole build due to one recipe error. So handle all exceptions | 352 | We don't want to exit whole build due to one recipe error. So handle all exceptions |
353 | gracefully w/o leaking to outer. | 353 | gracefully w/o leaking to outer. |
354 | """ | 354 | """ |
355 | def internal_fetch_wget(url, d, tmpf): | 355 | def internal_fetch_wget(url, ud, d, tmpf): |
356 | status = "ErrFetchUnknown" | 356 | status = "ErrFetchUnknown" |
357 | """ | ||
358 | Clear internal url cache as it's a temporary check. Not doing so will have | ||
359 | bitbake check url multiple times when looping through a single url | ||
360 | """ | ||
361 | fn = d.getVar('FILE', True) | ||
362 | bb.fetch2.urldata_cache[fn] = {} | ||
363 | |||
364 | """ | ||
365 | To avoid impacting bitbake build engine, this trick is required for reusing bitbake | ||
366 | interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR} | ||
367 | while we don't want to pollute that place. So bb.fetch2.checkstatus() is borrowed here | ||
368 | which is designed for check purpose but we override check command for our own purpose | ||
369 | """ | ||
370 | ld = bb.data.createCopy(d) | ||
371 | d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \ | ||
372 | % tmpf.name) | ||
373 | bb.data.update_data(ld) | ||
374 | 357 | ||
358 | agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12" | ||
359 | fetchcmd = "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"%s\" '%s'" % (tmpf.name, agent, url) | ||
375 | try: | 360 | try: |
376 | fetcher = bb.fetch2.Fetch([url], ld) | 361 | fetcher = bb.fetch2.wget.Wget(d) |
377 | fetcher.checkstatus() | 362 | fetcher._runwget(ud, d, fetchcmd, True) |
378 | status = "SUCC" | 363 | status = "SUCC" |
379 | except bb.fetch2.BBFetchException, e: | 364 | except bb.fetch2.BBFetchException, e: |
380 | status = "ErrFetch" | 365 | status = "ErrFetch" |
@@ -388,10 +373,10 @@ python do_checkpkg() { | |||
388 | 'curver' - current version | 373 | 'curver' - current version |
389 | Return new version if success, or else error in "Errxxxx" style | 374 | Return new version if success, or else error in "Errxxxx" style |
390 | """ | 375 | """ |
391 | def check_new_dir(url, curver, d): | 376 | def check_new_dir(url, curver, ud, d): |
392 | pn = d.getVar('PN', True) | 377 | pn = d.getVar('PN', True) |
393 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) | 378 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) |
394 | status = internal_fetch_wget(url, d, f) | 379 | status = internal_fetch_wget(url, ud, d, f) |
395 | fhtml = f.read() | 380 | fhtml = f.read() |
396 | if status == "SUCC" and len(fhtml): | 381 | if status == "SUCC" and len(fhtml): |
397 | newver = parse_inter(curver) | 382 | newver = parse_inter(curver) |
@@ -447,14 +432,14 @@ python do_checkpkg() { | |||
447 | 'curname' - current package name | 432 | 'curname' - current package name |
448 | Return new version if success, or else error in "Errxxxx" style | 433 | Return new version if success, or else error in "Errxxxx" style |
449 | """ | 434 | """ |
450 | def check_new_version(url, curname, d): | 435 | def check_new_version(url, curname, ud, d): |
451 | """possible to have no version in pkg name, such as spectrum-fw""" | 436 | """possible to have no version in pkg name, such as spectrum-fw""" |
452 | if not re.search("\d+", curname): | 437 | if not re.search("\d+", curname): |
453 | return pcurver | 438 | return pcurver |
454 | pn = d.getVar('PN', True) | 439 | pn = d.getVar('PN', True) |
455 | newver_regex = d.getVar('REGEX', True) | 440 | newver_regex = d.getVar('REGEX', True) |
456 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) | 441 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) |
457 | status = internal_fetch_wget(url, d, f) | 442 | status = internal_fetch_wget(url, ud, d, f) |
458 | fhtml = f.read() | 443 | fhtml = f.read() |
459 | 444 | ||
460 | if status == "SUCC" and len(fhtml): | 445 | if status == "SUCC" and len(fhtml): |
@@ -605,6 +590,7 @@ python do_checkpkg() { | |||
605 | 590 | ||
606 | 591 | ||
607 | if type in ['http', 'https', 'ftp']: | 592 | if type in ['http', 'https', 'ftp']: |
593 | ud = bb.fetch2.FetchData(uri, d) | ||
608 | newver = pcurver | 594 | newver = pcurver |
609 | altpath = path | 595 | altpath = path |
610 | dirver = "-" | 596 | dirver = "-" |
@@ -629,7 +615,7 @@ python do_checkpkg() { | |||
629 | else: | 615 | else: |
630 | newver = d.getVar('PV', True) | 616 | newver = d.getVar('PV', True) |
631 | else: | 617 | else: |
632 | newver = check_new_dir(alturi, dirver, d) | 618 | newver = check_new_dir(alturi, dirver, ud, d) |
633 | altpath = path | 619 | altpath = path |
634 | if not re.match("Err", newver) and dirver != newver: | 620 | if not re.match("Err", newver) and dirver != newver: |
635 | altpath = altpath.replace(dirver, newver, True) | 621 | altpath = altpath.replace(dirver, newver, True) |
@@ -650,13 +636,13 @@ python do_checkpkg() { | |||
650 | alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}]) | 636 | alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}]) |
651 | else: | 637 | else: |
652 | alturi = chk_uri | 638 | alturi = chk_uri |
653 | newver = check_new_version(alturi, curname, d) | 639 | newver = check_new_version(alturi, curname, ud, d) |
654 | while(newver == "ErrHostNoDir"): | 640 | while(newver == "ErrHostNoDir"): |
655 | if alturi == "/download": | 641 | if alturi == "/download": |
656 | break | 642 | break |
657 | else: | 643 | else: |
658 | alturi = "/".join(alturi.split("/")[0:-2]) + "/download" | 644 | alturi = "/".join(alturi.split("/")[0:-2]) + "/download" |
659 | newver = check_new_version(alturi, curname, d) | 645 | newver = check_new_version(alturi, curname, ud, d) |
660 | if not re.match("Err", newver): | 646 | if not re.match("Err", newver): |
661 | pupver = newver | 647 | pupver = newver |
662 | if pupver != pcurver: | 648 | if pupver != pcurver: |