diff options
author | Mei Lei <lei.mei@intel.com> | 2011-05-16 19:06:21 +0800 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-05-17 15:14:44 +0100 |
commit | 60ab6fc60cba2ec2456125b618d23f98966468bd (patch) | |
tree | 0bf154f18a3d70cee186299734d88f4a7faadab9 /meta | |
parent | afe43ed09086ea1497c88e07c90bff9fecb59ce8 (diff) | |
download | poky-60ab6fc60cba2ec2456125b618d23f98966468bd.tar.gz |
Add a new task checklicense and fix some bugs in distro_check.py
distro_check.py: Create a new function called create_log_file to reduce a lot of repeat code in distrodata.bbclass.
We needn't to create log file in function save_distro_check_result, because the log file has been generated in check_eventhandler.
Another bug is that we maybe access the /tmp/Meego-1.0 before we create this file.
Add a judge statement to decide whether we need to create this file firstly.
distrodata.bbclass: Add a new task checklicense to collect missing text license information.
This can help package-report system to know how many recipes are missing license text.
(From OE-Core rev: b41148cda9f0cc292b662a8473f26bc1ee0148f3)
Signed-off-by: Mei Lei <lei.mei@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta')
-rw-r--r-- | meta/classes/distrodata.bbclass | 177 | ||||
-rw-r--r-- | meta/lib/oe/distro_check.py | 27 |
2 files changed, 119 insertions, 85 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index 905dad7b9c..f24cff8dc7 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
@@ -4,19 +4,9 @@ addhandler distro_eventhandler | |||
4 | python distro_eventhandler() { | 4 | python distro_eventhandler() { |
5 | 5 | ||
6 | if bb.event.getName(e) == "BuildStarted": | 6 | if bb.event.getName(e) == "BuildStarted": |
7 | """initialize log files.""" | 7 | import oe.distro_check as dc |
8 | logpath = bb.data.getVar('LOG_DIR', e.data, 1) | 8 | logfile = dc.create_log_file(e.data, "distrodata.csv") |
9 | bb.utils.mkdirhier(logpath) | 9 | lf = bb.utils.lockfile("%s.lock" % logfile) |
10 | logfile = os.path.join(logpath, "distrodata.%s.csv" % bb.data.getVar('DATETIME', e.data, 1)) | ||
11 | if not os.path.exists(logfile): | ||
12 | slogfile = os.path.join(logpath, "distrodata.csv") | ||
13 | if os.path.exists(slogfile): | ||
14 | os.remove(slogfile) | ||
15 | os.system("touch %s" % logfile) | ||
16 | os.symlink(logfile, slogfile) | ||
17 | bb.data.setVar('LOG_FILE', logfile, e.data) | ||
18 | |||
19 | lf = bb.utils.lockfile(logfile + ".lock") | ||
20 | f = open(logfile, "a") | 10 | f = open(logfile, "a") |
21 | f.write("Package,Description,Owner,License,ChkSum,Status,VerMatch,Version,Upsteam,Non-Update,Reason,Recipe Status\n") | 11 | f.write("Package,Description,Owner,License,ChkSum,Status,VerMatch,Version,Upsteam,Non-Update,Reason,Recipe Status\n") |
22 | f.close() | 12 | f.close() |
@@ -33,9 +23,9 @@ python do_distrodata_np() { | |||
33 | bb.note("Package Name: %s" % pn) | 23 | bb.note("Package Name: %s" % pn) |
34 | 24 | ||
35 | import oe.distro_check as dist_check | 25 | import oe.distro_check as dist_check |
36 | tmpdir = bb.data.getVar('TMPDIR', d, 1) | 26 | tmpdir = bb.data.getVar('TMPDIR', d, True) |
37 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 27 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
38 | datetime = bb.data.getVar('DATETIME', localdata, 1) | 28 | datetime = bb.data.getVar('DATETIME', localdata, True) |
39 | dist_check.update_distro_data(distro_check_dir, datetime) | 29 | dist_check.update_distro_data(distro_check_dir, datetime) |
40 | 30 | ||
41 | if pn.find("-native") != -1: | 31 | if pn.find("-native") != -1: |
@@ -111,15 +101,15 @@ python do_distrodata_np() { | |||
111 | addtask distrodata | 101 | addtask distrodata |
112 | do_distrodata[nostamp] = "1" | 102 | do_distrodata[nostamp] = "1" |
113 | python do_distrodata() { | 103 | python do_distrodata() { |
114 | logpath = bb.data.getVar('LOG_DIR', d, 1) | 104 | logpath = bb.data.getVar('LOG_DIR', d, True) |
115 | bb.utils.mkdirhier(logpath) | 105 | bb.utils.mkdirhier(logpath) |
116 | logfile = os.path.join(logpath, "distrodata.csv") | 106 | logfile = os.path.join(logpath, "distrodata.csv") |
117 | 107 | ||
118 | import oe.distro_check as dist_check | 108 | import oe.distro_check as dist_check |
119 | localdata = bb.data.createCopy(d) | 109 | localdata = bb.data.createCopy(d) |
120 | tmpdir = bb.data.getVar('TMPDIR', d, 1) | 110 | tmpdir = bb.data.getVar('TMPDIR', d, True) |
121 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 111 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
122 | datetime = bb.data.getVar('DATETIME', localdata, 1) | 112 | datetime = bb.data.getVar('DATETIME', localdata, True) |
123 | dist_check.update_distro_data(distro_check_dir, datetime) | 113 | dist_check.update_distro_data(distro_check_dir, datetime) |
124 | 114 | ||
125 | pn = bb.data.getVar("PN", d, True) | 115 | pn = bb.data.getVar("PN", d, True) |
@@ -189,7 +179,7 @@ python do_distrodata() { | |||
189 | # do the comparison | 179 | # do the comparison |
190 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) | 180 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) |
191 | 181 | ||
192 | lf = bb.utils.lockfile(logfile + ".lock") | 182 | lf = bb.utils.lockfile("%s.lock" % logfile) |
193 | f = open(logfile, "a") | 183 | f = open(logfile, "a") |
194 | f.write("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s," % \ | 184 | f.write("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s," % \ |
195 | (pname, pdesc, maintainer, plicense, pchksum, hasrstatus, vermatch, pcurver, pupver, noupdate, noupdate_reason, rstatus)) | 185 | (pname, pdesc, maintainer, plicense, pchksum, hasrstatus, vermatch, pcurver, pupver, noupdate, noupdate_reason, rstatus)) |
@@ -211,19 +201,10 @@ do_distrodataall() { | |||
211 | addhandler checkpkg_eventhandler | 201 | addhandler checkpkg_eventhandler |
212 | python checkpkg_eventhandler() { | 202 | python checkpkg_eventhandler() { |
213 | if bb.event.getName(e) == "BuildStarted": | 203 | if bb.event.getName(e) == "BuildStarted": |
214 | """initialize log files.""" | 204 | import oe.distro_check as dc |
215 | logpath = bb.data.getVar('LOG_DIR', e.data, 1) | 205 | logfile = dc.create_log_file(e.data, "checkpkg.csv") |
216 | bb.utils.mkdirhier(logpath) | 206 | |
217 | logfile = os.path.join(logpath, "checkpkg.%s.csv" % bb.data.getVar('DATETIME', e.data, 1)) | 207 | lf = bb.utils.lockfile("%s.lock" % logfile) |
218 | if not os.path.exists(logfile): | ||
219 | slogfile = os.path.join(logpath, "checkpkg.csv") | ||
220 | if os.path.exists(slogfile): | ||
221 | os.remove(slogfile) | ||
222 | os.system("touch %s" % logfile) | ||
223 | os.symlink(logfile, slogfile) | ||
224 | bb.data.setVar('LOG_FILE', logfile, e.data) | ||
225 | |||
226 | lf = bb.utils.lockfile(logfile + ".lock") | ||
227 | f = open(logfile, "a") | 208 | f = open(logfile, "a") |
228 | f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tPriority\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n") | 209 | f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tPriority\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n") |
229 | f.close() | 210 | f.close() |
@@ -304,7 +285,7 @@ python do_checkpkg() { | |||
304 | Clear internal url cache as it's a temporary check. Not doing so will have | 285 | Clear internal url cache as it's a temporary check. Not doing so will have |
305 | bitbake check url multiple times when looping through a single url | 286 | bitbake check url multiple times when looping through a single url |
306 | """ | 287 | """ |
307 | fn = bb.data.getVar('FILE', d, 1) | 288 | fn = bb.data.getVar('FILE', d, True) |
308 | bb.fetch2.urldata_cache[fn] = {} | 289 | bb.fetch2.urldata_cache[fn] = {} |
309 | 290 | ||
310 | """ | 291 | """ |
@@ -335,7 +316,7 @@ python do_checkpkg() { | |||
335 | Return new version if success, or else error in "Errxxxx" style | 316 | Return new version if success, or else error in "Errxxxx" style |
336 | """ | 317 | """ |
337 | def check_new_dir(url, curver, d): | 318 | def check_new_dir(url, curver, d): |
338 | pn = bb.data.getVar('PN', d, 1) | 319 | pn = bb.data.getVar('PN', d, True) |
339 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) | 320 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) |
340 | status = internal_fetch_wget(url, d, f) | 321 | status = internal_fetch_wget(url, d, f) |
341 | fhtml = f.read() | 322 | fhtml = f.read() |
@@ -394,7 +375,7 @@ python do_checkpkg() { | |||
394 | """possible to have no version in pkg name, such as spectrum-fw""" | 375 | """possible to have no version in pkg name, such as spectrum-fw""" |
395 | if not re.search("\d+", curname): | 376 | if not re.search("\d+", curname): |
396 | return pcurver | 377 | return pcurver |
397 | pn = bb.data.getVar('PN', d, 1) | 378 | pn = bb.data.getVar('PN', d, True) |
398 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) | 379 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) |
399 | status = internal_fetch_wget(url, d, f) | 380 | status = internal_fetch_wget(url, d, f) |
400 | fhtml = f.read() | 381 | fhtml = f.read() |
@@ -437,35 +418,35 @@ python do_checkpkg() { | |||
437 | f.close() | 418 | f.close() |
438 | """if host hasn't directory information, no need to save tmp file""" | 419 | """if host hasn't directory information, no need to save tmp file""" |
439 | if status != "ErrHostNoDir" and re.match("Err", status): | 420 | if status != "ErrHostNoDir" and re.match("Err", status): |
440 | logpath = bb.data.getVar('LOG_DIR', d, 1) | 421 | logpath = bb.data.getVar('LOG_DIR', d, True) |
441 | os.system("cp %s %s/" % (f.name, logpath)) | 422 | os.system("cp %s %s/" % (f.name, logpath)) |
442 | os.unlink(f.name) | 423 | os.unlink(f.name) |
443 | return status | 424 | return status |
444 | 425 | ||
445 | """first check whether a uri is provided""" | 426 | """first check whether a uri is provided""" |
446 | src_uri = bb.data.getVar('SRC_URI', d, 1) | 427 | src_uri = bb.data.getVar('SRC_URI', d, True) |
447 | if not src_uri: | 428 | if not src_uri: |
448 | return | 429 | return |
449 | 430 | ||
450 | """initialize log files.""" | 431 | """initialize log files.""" |
451 | logpath = bb.data.getVar('LOG_DIR', d, 1) | 432 | logpath = bb.data.getVar('LOG_DIR', d, True) |
452 | bb.utils.mkdirhier(logpath) | 433 | bb.utils.mkdirhier(logpath) |
453 | logfile = os.path.join(logpath, "checkpkg.csv") | 434 | logfile = os.path.join(logpath, "checkpkg.csv") |
454 | 435 | ||
455 | """generate package information from .bb file""" | 436 | """generate package information from .bb file""" |
456 | pname = bb.data.getVar('PN', d, 1) | 437 | pname = bb.data.getVar('PN', d, True) |
457 | pdesc = bb.data.getVar('DESCRIPTION', d, 1) | 438 | pdesc = bb.data.getVar('DESCRIPTION', d, True) |
458 | pgrp = bb.data.getVar('SECTION', d, 1) | 439 | pgrp = bb.data.getVar('SECTION', d, True) |
459 | pversion = bb.data.getVar('PV', d, 1) | 440 | pversion = bb.data.getVar('PV', d, True) |
460 | plicense = bb.data.getVar('LICENSE',d,1) | 441 | plicense = bb.data.getVar('LICENSE', d, True) |
461 | psection = bb.data.getVar('SECTION',d,1) | 442 | psection = bb.data.getVar('SECTION', d, True) |
462 | phome = bb.data.getVar('HOMEPAGE', d, 1) | 443 | phome = bb.data.getVar('HOMEPAGE', d, True) |
463 | prelease = bb.data.getVar('PR',d,1) | 444 | prelease = bb.data.getVar('PR', d, True) |
464 | ppriority = bb.data.getVar('PRIORITY',d,1) | 445 | ppriority = bb.data.getVar('PRIORITY', d, True) |
465 | pdepends = bb.data.getVar('DEPENDS',d,1) | 446 | pdepends = bb.data.getVar('DEPENDS', d, True) |
466 | pbugtracker = bb.data.getVar('BUGTRACKER',d,1) | 447 | pbugtracker = bb.data.getVar('BUGTRACKER', d, True) |
467 | ppe = bb.data.getVar('PE',d,1) | 448 | ppe = bb.data.getVar('PE', d, True) |
468 | psrcuri = bb.data.getVar('SRC_URI',d,1) | 449 | psrcuri = bb.data.getVar('SRC_URI', d, True) |
469 | 450 | ||
470 | found = 0 | 451 | found = 0 |
471 | for uri in src_uri.split(): | 452 | for uri in src_uri.split(): |
@@ -483,9 +464,9 @@ python do_checkpkg() { | |||
483 | 464 | ||
484 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) | 465 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) |
485 | if type in ['http', 'https', 'ftp']: | 466 | if type in ['http', 'https', 'ftp']: |
486 | pcurver = bb.data.getVar('PV', d, 1) | 467 | pcurver = bb.data.getVar('PV', d, True) |
487 | else: | 468 | else: |
488 | pcurver = bb.data.getVar("SRCREV", d, 1) | 469 | pcurver = bb.data.getVar("SRCREV", d, True) |
489 | 470 | ||
490 | if type in ['http', 'https', 'ftp']: | 471 | if type in ['http', 'https', 'ftp']: |
491 | newver = pcurver | 472 | newver = pcurver |
@@ -509,7 +490,7 @@ python do_checkpkg() { | |||
509 | newver = check_new_dir(alturi, dirver, d) | 490 | newver = check_new_dir(alturi, dirver, d) |
510 | altpath = path | 491 | altpath = path |
511 | if not re.match("Err", newver) and dirver != newver: | 492 | if not re.match("Err", newver) and dirver != newver: |
512 | altpath = altpath.replace(dirver, newver, 1) | 493 | altpath = altpath.replace(dirver, newver, True) |
513 | 494 | ||
514 | """Now try to acquire all remote files in current directory""" | 495 | """Now try to acquire all remote files in current directory""" |
515 | if not re.match("Err", newver): | 496 | if not re.match("Err", newver): |
@@ -625,7 +606,7 @@ python do_checkpkg() { | |||
625 | pstatus += ":%s%s" % (host, path) | 606 | pstatus += ":%s%s" % (host, path) |
626 | 607 | ||
627 | """Read from manual distro tracking fields as alternative""" | 608 | """Read from manual distro tracking fields as alternative""" |
628 | pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, 1) | 609 | pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, True) |
629 | if not pmver: | 610 | if not pmver: |
630 | pmver = "N/A" | 611 | pmver = "N/A" |
631 | pmstatus = "ErrNoRecipeData" | 612 | pmstatus = "ErrNoRecipeData" |
@@ -639,7 +620,7 @@ python do_checkpkg() { | |||
639 | psrcuri = psrcuri.split()[0] | 620 | psrcuri = psrcuri.split()[0] |
640 | pdepends = "".join(pdepends.split("\t")) | 621 | pdepends = "".join(pdepends.split("\t")) |
641 | pdesc = "".join(pdesc.split("\t")) | 622 | pdesc = "".join(pdesc.split("\t")) |
642 | lf = bb.utils.lockfile(logfile + ".lock") | 623 | lf = bb.utils.lockfile("%s.lock" % logfile) |
643 | f = open(logfile, "a") | 624 | f = open(logfile, "a") |
644 | f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \ | 625 | f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \ |
645 | (pname,pversion,pupver,plicense,psection, phome,prelease, ppriority,pdepends,pbugtracker,ppe,pdesc,pstatus,pmver,psrcuri,maintainer)) | 626 | (pname,pversion,pupver,plicense,psection, phome,prelease, ppriority,pdepends,pbugtracker,ppe,pdesc,pstatus,pmver,psrcuri,maintainer)) |
@@ -654,25 +635,12 @@ do_checkpkgall() { | |||
654 | : | 635 | : |
655 | } | 636 | } |
656 | 637 | ||
657 | #addhandler check_eventhandler | 638 | addhandler distro_check_eventhandler |
658 | python check_eventhandler() { | 639 | python distro_check_eventhandler() { |
659 | if bb.event.getName(e) == "BuildStarted": | 640 | if bb.event.getName(e) == "BuildStarted": |
660 | import oe.distro_check as dc | ||
661 | tmpdir = bb.data.getVar('TMPDIR', e.data, 1) | ||
662 | distro_check_dir = os.path.join(tmpdir, "distro_check") | ||
663 | datetime = bb.data.getVar('DATETIME', e.data, 1) | ||
664 | """initialize log files.""" | 641 | """initialize log files.""" |
665 | logpath = bb.data.getVar('LOG_DIR', e.data, 1) | 642 | import oe.distro_check as dc |
666 | bb.utils.mkdirhier(logpath) | 643 | result_file = dc.create_log_file(e.data, "distrocheck.csv") |
667 | logfile = os.path.join(logpath, "distrocheck.%s.csv" % bb.data.getVar('DATETIME', e.data, 1)) | ||
668 | if not os.path.exists(logfile): | ||
669 | slogfile = os.path.join(logpath, "distrocheck.csv") | ||
670 | if os.path.exists(slogfile): | ||
671 | os.remove(slogfile) | ||
672 | os.system("touch %s" % logfile) | ||
673 | os.symlink(logfile, slogfile) | ||
674 | bb.data.setVar('LOG_FILE', logfile, e.data) | ||
675 | |||
676 | return | 644 | return |
677 | } | 645 | } |
678 | 646 | ||
@@ -681,18 +649,23 @@ do_distro_check[nostamp] = "1" | |||
681 | python do_distro_check() { | 649 | python do_distro_check() { |
682 | """checks if the package is present in other public Linux distros""" | 650 | """checks if the package is present in other public Linux distros""" |
683 | import oe.distro_check as dc | 651 | import oe.distro_check as dc |
652 | import bb | ||
653 | import shutil | ||
684 | localdata = bb.data.createCopy(d) | 654 | localdata = bb.data.createCopy(d) |
685 | bb.data.update_data(localdata) | 655 | bb.data.update_data(localdata) |
686 | tmpdir = bb.data.getVar('TMPDIR', d, 1) | 656 | tmpdir = bb.data.getVar('TMPDIR', d, True) |
687 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 657 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
688 | datetime = bb.data.getVar('DATETIME', localdata, 1) | 658 | logpath = bb.data.getVar('LOG_DIR', d, True) |
659 | bb.utils.mkdirhier(logpath) | ||
660 | result_file = os.path.join(logpath, "distrocheck.csv") | ||
661 | datetime = bb.data.getVar('DATETIME', localdata, True) | ||
689 | dc.update_distro_data(distro_check_dir, datetime) | 662 | dc.update_distro_data(distro_check_dir, datetime) |
690 | 663 | ||
691 | # do the comparison | 664 | # do the comparison |
692 | result = dc.compare_in_distro_packages_list(distro_check_dir, d) | 665 | result = dc.compare_in_distro_packages_list(distro_check_dir, d) |
693 | 666 | ||
694 | # save the results | 667 | # save the results |
695 | dc.save_distro_check_result(result, datetime, d) | 668 | dc.save_distro_check_result(result, datetime, result_file, d) |
696 | } | 669 | } |
697 | 670 | ||
698 | addtask distro_checkall after do_distro_check | 671 | addtask distro_checkall after do_distro_check |
@@ -701,3 +674,55 @@ do_distro_checkall[nostamp] = "1" | |||
701 | do_distro_checkall() { | 674 | do_distro_checkall() { |
702 | : | 675 | : |
703 | } | 676 | } |
677 | # | ||
678 | #Check Missing License Text. | ||
679 | #Use this task to generate the missing license text data for pkg-report system, | ||
680 | #then we can search those recipes which license text isn't exsit in common-licenses directory | ||
681 | # | ||
682 | addhandler checklicense_eventhandler | ||
683 | python checklicense_eventhandler() { | ||
684 | if bb.event.getName(e) == "BuildStarted": | ||
685 | """initialize log files.""" | ||
686 | import oe.distro_check as dc | ||
687 | logfile = dc.create_log_file(e.data, "missinglicense.csv") | ||
688 | lf = bb.utils.lockfile("%s.lock" % logfile) | ||
689 | f = open(logfile, "a") | ||
690 | f.write("Package\tLicense\tMissingLicense\n") | ||
691 | f.close() | ||
692 | bb.utils.unlockfile(lf) | ||
693 | return | ||
694 | } | ||
695 | |||
696 | addtask checklicense | ||
697 | do_checklicense[nostamp] = "1" | ||
698 | python do_checklicense() { | ||
699 | import os | ||
700 | import bb | ||
701 | import shutil | ||
702 | logpath = bb.data.getVar('LOG_DIR', d, True) | ||
703 | bb.utils.mkdirhier(logpath) | ||
704 | pn = bb.data.getVar('PN', d, True) | ||
705 | logfile = os.path.join(logpath, "missinglicense.csv") | ||
706 | generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True) | ||
707 | license_types = bb.data.getVar('LICENSE', d, True) | ||
708 | for license_type in ((license_types.replace('+', '').replace('|', '&') | ||
709 | .replace('(', '').replace(')', '').replace(';', '') | ||
710 | .replace(',', '').replace(" ", "").split("&"))): | ||
711 | if not os.path.isfile(os.path.join(generic_directory, license_type)): | ||
712 | lf = bb.utils.lockfile("%s.lock" % logfile) | ||
713 | f = open(logfile, "a") | ||
714 | f.write("%s\t%s\t%s\n" % \ | ||
715 | (pn,license_types,license_type)) | ||
716 | f.close() | ||
717 | bb.utils.unlockfile(lf) | ||
718 | return | ||
719 | } | ||
720 | |||
721 | addtask checklicenseall after do_checklicense | ||
722 | do_checklicenseall[recrdeptask] = "do_checklicense" | ||
723 | do_checklicenseall[nostamp] = "1" | ||
724 | do_checklicenseall() { | ||
725 | : | ||
726 | } | ||
727 | |||
728 | |||
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index c85d4fb28b..55cdcad461 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py | |||
@@ -73,7 +73,8 @@ def clean_package_list(package_list): | |||
73 | def get_latest_released_meego_source_package_list(): | 73 | def get_latest_released_meego_source_package_list(): |
74 | "Returns list of all the name os packages in the latest meego distro" | 74 | "Returns list of all the name os packages in the latest meego distro" |
75 | 75 | ||
76 | 76 | if not os.path.isfile("/tmp/Meego-1.0"): | |
77 | os.mknod("/tmp/Meego-1.0") | ||
77 | f = open("/tmp/Meego-1.0", "r") | 78 | f = open("/tmp/Meego-1.0", "r") |
78 | package_names = [] | 79 | package_names = [] |
79 | for line in f: | 80 | for line in f: |
@@ -341,7 +342,22 @@ def compare_in_distro_packages_list(distro_check_dir, d): | |||
341 | bb.note("Matching: %s" % matching_distros) | 342 | bb.note("Matching: %s" % matching_distros) |
342 | return matching_distros | 343 | return matching_distros |
343 | 344 | ||
344 | def save_distro_check_result(result, datetime, d): | 345 | def create_log_file(d, logname): |
346 | logpath = bb.data.getVar('LOG_DIR', d, True) | ||
347 | bb.utils.mkdirhier(logpath) | ||
348 | logfn, logsuffix = os.path.splitext(logname) | ||
349 | logfile = os.path.join(logpath, "%s.%s%s" % (logfn, bb.data.getVar('DATETIME', d, True), logsuffix)) | ||
350 | if not os.path.exists(logfile): | ||
351 | slogfile = os.path.join(logpath, logname) | ||
352 | if os.path.exists(slogfile): | ||
353 | os.remove(slogfile) | ||
354 | os.system("touch %s" % logfile) | ||
355 | os.symlink(logfile, slogfile) | ||
356 | bb.data.setVar('LOG_FILE', logfile, d) | ||
357 | return logfile | ||
358 | |||
359 | |||
360 | def save_distro_check_result(result, datetime, result_file, d): | ||
345 | pn = bb.data.getVar('PN', d, True) | 361 | pn = bb.data.getVar('PN', d, True) |
346 | logdir = bb.data.getVar('LOG_DIR', d, True) | 362 | logdir = bb.data.getVar('LOG_DIR', d, True) |
347 | if not logdir: | 363 | if not logdir: |
@@ -349,16 +365,9 @@ def save_distro_check_result(result, datetime, d): | |||
349 | return | 365 | return |
350 | if not os.path.isdir(logdir): | 366 | if not os.path.isdir(logdir): |
351 | os.makedirs(logdir) | 367 | os.makedirs(logdir) |
352 | result_file = os.path.join(logdir, "distrocheck.%s.csv" % datetime) | ||
353 | line = pn | 368 | line = pn |
354 | for i in result: | 369 | for i in result: |
355 | line = line + "," + i | 370 | line = line + "," + i |
356 | if not os.path.exists(result_file): | ||
357 | sresult_file = os.path.join(logdir, "distrocheck.csv") | ||
358 | if os.path.exists(sresult_file): | ||
359 | os.remove(sresult_file) | ||
360 | os.system("touch %s" % result_file) | ||
361 | os.symlink(result_file, sresult_file) | ||
362 | f = open(result_file, "a") | 371 | f = open(result_file, "a") |
363 | import fcntl | 372 | import fcntl |
364 | fcntl.lockf(f, fcntl.LOCK_EX) | 373 | fcntl.lockf(f, fcntl.LOCK_EX) |