summaryrefslogtreecommitdiffstats
path: root/meta/classes/distrodata.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/distrodata.bbclass')
-rw-r--r--meta/classes/distrodata.bbclass177
1 files changed, 101 insertions, 76 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index 905dad7b9c..f24cff8dc7 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -4,19 +4,9 @@ addhandler distro_eventhandler
4python distro_eventhandler() { 4python distro_eventhandler() {
5 5
6 if bb.event.getName(e) == "BuildStarted": 6 if bb.event.getName(e) == "BuildStarted":
7 """initialize log files.""" 7 import oe.distro_check as dc
8 logpath = bb.data.getVar('LOG_DIR', e.data, 1) 8 logfile = dc.create_log_file(e.data, "distrodata.csv")
9 bb.utils.mkdirhier(logpath) 9 lf = bb.utils.lockfile("%s.lock" % logfile)
10 logfile = os.path.join(logpath, "distrodata.%s.csv" % bb.data.getVar('DATETIME', e.data, 1))
11 if not os.path.exists(logfile):
12 slogfile = os.path.join(logpath, "distrodata.csv")
13 if os.path.exists(slogfile):
14 os.remove(slogfile)
15 os.system("touch %s" % logfile)
16 os.symlink(logfile, slogfile)
17 bb.data.setVar('LOG_FILE', logfile, e.data)
18
19 lf = bb.utils.lockfile(logfile + ".lock")
20 f = open(logfile, "a") 10 f = open(logfile, "a")
21 f.write("Package,Description,Owner,License,ChkSum,Status,VerMatch,Version,Upsteam,Non-Update,Reason,Recipe Status\n") 11 f.write("Package,Description,Owner,License,ChkSum,Status,VerMatch,Version,Upsteam,Non-Update,Reason,Recipe Status\n")
22 f.close() 12 f.close()
@@ -33,9 +23,9 @@ python do_distrodata_np() {
33 bb.note("Package Name: %s" % pn) 23 bb.note("Package Name: %s" % pn)
34 24
35 import oe.distro_check as dist_check 25 import oe.distro_check as dist_check
36 tmpdir = bb.data.getVar('TMPDIR', d, 1) 26 tmpdir = bb.data.getVar('TMPDIR', d, True)
37 distro_check_dir = os.path.join(tmpdir, "distro_check") 27 distro_check_dir = os.path.join(tmpdir, "distro_check")
38 datetime = bb.data.getVar('DATETIME', localdata, 1) 28 datetime = bb.data.getVar('DATETIME', localdata, True)
39 dist_check.update_distro_data(distro_check_dir, datetime) 29 dist_check.update_distro_data(distro_check_dir, datetime)
40 30
41 if pn.find("-native") != -1: 31 if pn.find("-native") != -1:
@@ -111,15 +101,15 @@ python do_distrodata_np() {
111addtask distrodata 101addtask distrodata
112do_distrodata[nostamp] = "1" 102do_distrodata[nostamp] = "1"
113python do_distrodata() { 103python do_distrodata() {
114 logpath = bb.data.getVar('LOG_DIR', d, 1) 104 logpath = bb.data.getVar('LOG_DIR', d, True)
115 bb.utils.mkdirhier(logpath) 105 bb.utils.mkdirhier(logpath)
116 logfile = os.path.join(logpath, "distrodata.csv") 106 logfile = os.path.join(logpath, "distrodata.csv")
117 107
118 import oe.distro_check as dist_check 108 import oe.distro_check as dist_check
119 localdata = bb.data.createCopy(d) 109 localdata = bb.data.createCopy(d)
120 tmpdir = bb.data.getVar('TMPDIR', d, 1) 110 tmpdir = bb.data.getVar('TMPDIR', d, True)
121 distro_check_dir = os.path.join(tmpdir, "distro_check") 111 distro_check_dir = os.path.join(tmpdir, "distro_check")
122 datetime = bb.data.getVar('DATETIME', localdata, 1) 112 datetime = bb.data.getVar('DATETIME', localdata, True)
123 dist_check.update_distro_data(distro_check_dir, datetime) 113 dist_check.update_distro_data(distro_check_dir, datetime)
124 114
125 pn = bb.data.getVar("PN", d, True) 115 pn = bb.data.getVar("PN", d, True)
@@ -189,7 +179,7 @@ python do_distrodata() {
189 # do the comparison 179 # do the comparison
190 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) 180 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
191 181
192 lf = bb.utils.lockfile(logfile + ".lock") 182 lf = bb.utils.lockfile("%s.lock" % logfile)
193 f = open(logfile, "a") 183 f = open(logfile, "a")
194 f.write("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s," % \ 184 f.write("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s," % \
195 (pname, pdesc, maintainer, plicense, pchksum, hasrstatus, vermatch, pcurver, pupver, noupdate, noupdate_reason, rstatus)) 185 (pname, pdesc, maintainer, plicense, pchksum, hasrstatus, vermatch, pcurver, pupver, noupdate, noupdate_reason, rstatus))
@@ -211,19 +201,10 @@ do_distrodataall() {
211addhandler checkpkg_eventhandler 201addhandler checkpkg_eventhandler
212python checkpkg_eventhandler() { 202python checkpkg_eventhandler() {
213 if bb.event.getName(e) == "BuildStarted": 203 if bb.event.getName(e) == "BuildStarted":
214 """initialize log files.""" 204 import oe.distro_check as dc
215 logpath = bb.data.getVar('LOG_DIR', e.data, 1) 205 logfile = dc.create_log_file(e.data, "checkpkg.csv")
216 bb.utils.mkdirhier(logpath) 206
217 logfile = os.path.join(logpath, "checkpkg.%s.csv" % bb.data.getVar('DATETIME', e.data, 1)) 207 lf = bb.utils.lockfile("%s.lock" % logfile)
218 if not os.path.exists(logfile):
219 slogfile = os.path.join(logpath, "checkpkg.csv")
220 if os.path.exists(slogfile):
221 os.remove(slogfile)
222 os.system("touch %s" % logfile)
223 os.symlink(logfile, slogfile)
224 bb.data.setVar('LOG_FILE', logfile, e.data)
225
226 lf = bb.utils.lockfile(logfile + ".lock")
227 f = open(logfile, "a") 208 f = open(logfile, "a")
228 f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tPriority\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n") 209 f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tPriority\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n")
229 f.close() 210 f.close()
@@ -304,7 +285,7 @@ python do_checkpkg() {
304 Clear internal url cache as it's a temporary check. Not doing so will have 285 Clear internal url cache as it's a temporary check. Not doing so will have
305 bitbake check url multiple times when looping through a single url 286 bitbake check url multiple times when looping through a single url
306 """ 287 """
307 fn = bb.data.getVar('FILE', d, 1) 288 fn = bb.data.getVar('FILE', d, True)
308 bb.fetch2.urldata_cache[fn] = {} 289 bb.fetch2.urldata_cache[fn] = {}
309 290
310 """ 291 """
@@ -335,7 +316,7 @@ python do_checkpkg() {
335 Return new version if success, or else error in "Errxxxx" style 316 Return new version if success, or else error in "Errxxxx" style
336 """ 317 """
337 def check_new_dir(url, curver, d): 318 def check_new_dir(url, curver, d):
338 pn = bb.data.getVar('PN', d, 1) 319 pn = bb.data.getVar('PN', d, True)
339 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) 320 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
340 status = internal_fetch_wget(url, d, f) 321 status = internal_fetch_wget(url, d, f)
341 fhtml = f.read() 322 fhtml = f.read()
@@ -394,7 +375,7 @@ python do_checkpkg() {
394 """possible to have no version in pkg name, such as spectrum-fw""" 375 """possible to have no version in pkg name, such as spectrum-fw"""
395 if not re.search("\d+", curname): 376 if not re.search("\d+", curname):
396 return pcurver 377 return pcurver
397 pn = bb.data.getVar('PN', d, 1) 378 pn = bb.data.getVar('PN', d, True)
398 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) 379 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
399 status = internal_fetch_wget(url, d, f) 380 status = internal_fetch_wget(url, d, f)
400 fhtml = f.read() 381 fhtml = f.read()
@@ -437,35 +418,35 @@ python do_checkpkg() {
437 f.close() 418 f.close()
438 """if host hasn't directory information, no need to save tmp file""" 419 """if host hasn't directory information, no need to save tmp file"""
439 if status != "ErrHostNoDir" and re.match("Err", status): 420 if status != "ErrHostNoDir" and re.match("Err", status):
440 logpath = bb.data.getVar('LOG_DIR', d, 1) 421 logpath = bb.data.getVar('LOG_DIR', d, True)
441 os.system("cp %s %s/" % (f.name, logpath)) 422 os.system("cp %s %s/" % (f.name, logpath))
442 os.unlink(f.name) 423 os.unlink(f.name)
443 return status 424 return status
444 425
445 """first check whether a uri is provided""" 426 """first check whether a uri is provided"""
446 src_uri = bb.data.getVar('SRC_URI', d, 1) 427 src_uri = bb.data.getVar('SRC_URI', d, True)
447 if not src_uri: 428 if not src_uri:
448 return 429 return
449 430
450 """initialize log files.""" 431 """initialize log files."""
451 logpath = bb.data.getVar('LOG_DIR', d, 1) 432 logpath = bb.data.getVar('LOG_DIR', d, True)
452 bb.utils.mkdirhier(logpath) 433 bb.utils.mkdirhier(logpath)
453 logfile = os.path.join(logpath, "checkpkg.csv") 434 logfile = os.path.join(logpath, "checkpkg.csv")
454 435
455 """generate package information from .bb file""" 436 """generate package information from .bb file"""
456 pname = bb.data.getVar('PN', d, 1) 437 pname = bb.data.getVar('PN', d, True)
457 pdesc = bb.data.getVar('DESCRIPTION', d, 1) 438 pdesc = bb.data.getVar('DESCRIPTION', d, True)
458 pgrp = bb.data.getVar('SECTION', d, 1) 439 pgrp = bb.data.getVar('SECTION', d, True)
459 pversion = bb.data.getVar('PV', d, 1) 440 pversion = bb.data.getVar('PV', d, True)
460 plicense = bb.data.getVar('LICENSE',d,1) 441 plicense = bb.data.getVar('LICENSE', d, True)
461 psection = bb.data.getVar('SECTION',d,1) 442 psection = bb.data.getVar('SECTION', d, True)
462 phome = bb.data.getVar('HOMEPAGE', d, 1) 443 phome = bb.data.getVar('HOMEPAGE', d, True)
463 prelease = bb.data.getVar('PR',d,1) 444 prelease = bb.data.getVar('PR', d, True)
464 ppriority = bb.data.getVar('PRIORITY',d,1) 445 ppriority = bb.data.getVar('PRIORITY', d, True)
465 pdepends = bb.data.getVar('DEPENDS',d,1) 446 pdepends = bb.data.getVar('DEPENDS', d, True)
466 pbugtracker = bb.data.getVar('BUGTRACKER',d,1) 447 pbugtracker = bb.data.getVar('BUGTRACKER', d, True)
467 ppe = bb.data.getVar('PE',d,1) 448 ppe = bb.data.getVar('PE', d, True)
468 psrcuri = bb.data.getVar('SRC_URI',d,1) 449 psrcuri = bb.data.getVar('SRC_URI', d, True)
469 450
470 found = 0 451 found = 0
471 for uri in src_uri.split(): 452 for uri in src_uri.split():
@@ -483,9 +464,9 @@ python do_checkpkg() {
483 464
484 (type, host, path, user, pswd, parm) = bb.decodeurl(uri) 465 (type, host, path, user, pswd, parm) = bb.decodeurl(uri)
485 if type in ['http', 'https', 'ftp']: 466 if type in ['http', 'https', 'ftp']:
486 pcurver = bb.data.getVar('PV', d, 1) 467 pcurver = bb.data.getVar('PV', d, True)
487 else: 468 else:
488 pcurver = bb.data.getVar("SRCREV", d, 1) 469 pcurver = bb.data.getVar("SRCREV", d, True)
489 470
490 if type in ['http', 'https', 'ftp']: 471 if type in ['http', 'https', 'ftp']:
491 newver = pcurver 472 newver = pcurver
@@ -509,7 +490,7 @@ python do_checkpkg() {
509 newver = check_new_dir(alturi, dirver, d) 490 newver = check_new_dir(alturi, dirver, d)
510 altpath = path 491 altpath = path
511 if not re.match("Err", newver) and dirver != newver: 492 if not re.match("Err", newver) and dirver != newver:
512 altpath = altpath.replace(dirver, newver, 1) 493 altpath = altpath.replace(dirver, newver, True)
513 494
514 """Now try to acquire all remote files in current directory""" 495 """Now try to acquire all remote files in current directory"""
515 if not re.match("Err", newver): 496 if not re.match("Err", newver):
@@ -625,7 +606,7 @@ python do_checkpkg() {
625 pstatus += ":%s%s" % (host, path) 606 pstatus += ":%s%s" % (host, path)
626 607
627 """Read from manual distro tracking fields as alternative""" 608 """Read from manual distro tracking fields as alternative"""
628 pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, 1) 609 pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, True)
629 if not pmver: 610 if not pmver:
630 pmver = "N/A" 611 pmver = "N/A"
631 pmstatus = "ErrNoRecipeData" 612 pmstatus = "ErrNoRecipeData"
@@ -639,7 +620,7 @@ python do_checkpkg() {
639 psrcuri = psrcuri.split()[0] 620 psrcuri = psrcuri.split()[0]
640 pdepends = "".join(pdepends.split("\t")) 621 pdepends = "".join(pdepends.split("\t"))
641 pdesc = "".join(pdesc.split("\t")) 622 pdesc = "".join(pdesc.split("\t"))
642 lf = bb.utils.lockfile(logfile + ".lock") 623 lf = bb.utils.lockfile("%s.lock" % logfile)
643 f = open(logfile, "a") 624 f = open(logfile, "a")
644 f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \ 625 f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \
645 (pname,pversion,pupver,plicense,psection, phome,prelease, ppriority,pdepends,pbugtracker,ppe,pdesc,pstatus,pmver,psrcuri,maintainer)) 626 (pname,pversion,pupver,plicense,psection, phome,prelease, ppriority,pdepends,pbugtracker,ppe,pdesc,pstatus,pmver,psrcuri,maintainer))
@@ -654,25 +635,12 @@ do_checkpkgall() {
654 : 635 :
655} 636}
656 637
657#addhandler check_eventhandler 638addhandler distro_check_eventhandler
658python check_eventhandler() { 639python distro_check_eventhandler() {
659 if bb.event.getName(e) == "BuildStarted": 640 if bb.event.getName(e) == "BuildStarted":
660 import oe.distro_check as dc
661 tmpdir = bb.data.getVar('TMPDIR', e.data, 1)
662 distro_check_dir = os.path.join(tmpdir, "distro_check")
663 datetime = bb.data.getVar('DATETIME', e.data, 1)
664 """initialize log files.""" 641 """initialize log files."""
665 logpath = bb.data.getVar('LOG_DIR', e.data, 1) 642 import oe.distro_check as dc
666 bb.utils.mkdirhier(logpath) 643 result_file = dc.create_log_file(e.data, "distrocheck.csv")
667 logfile = os.path.join(logpath, "distrocheck.%s.csv" % bb.data.getVar('DATETIME', e.data, 1))
668 if not os.path.exists(logfile):
669 slogfile = os.path.join(logpath, "distrocheck.csv")
670 if os.path.exists(slogfile):
671 os.remove(slogfile)
672 os.system("touch %s" % logfile)
673 os.symlink(logfile, slogfile)
674 bb.data.setVar('LOG_FILE', logfile, e.data)
675
676 return 644 return
677} 645}
678 646
@@ -681,18 +649,23 @@ do_distro_check[nostamp] = "1"
681python do_distro_check() { 649python do_distro_check() {
682 """checks if the package is present in other public Linux distros""" 650 """checks if the package is present in other public Linux distros"""
683 import oe.distro_check as dc 651 import oe.distro_check as dc
652 import bb
653 import shutil
684 localdata = bb.data.createCopy(d) 654 localdata = bb.data.createCopy(d)
685 bb.data.update_data(localdata) 655 bb.data.update_data(localdata)
686 tmpdir = bb.data.getVar('TMPDIR', d, 1) 656 tmpdir = bb.data.getVar('TMPDIR', d, True)
687 distro_check_dir = os.path.join(tmpdir, "distro_check") 657 distro_check_dir = os.path.join(tmpdir, "distro_check")
688 datetime = bb.data.getVar('DATETIME', localdata, 1) 658 logpath = bb.data.getVar('LOG_DIR', d, True)
659 bb.utils.mkdirhier(logpath)
660 result_file = os.path.join(logpath, "distrocheck.csv")
661 datetime = bb.data.getVar('DATETIME', localdata, True)
689 dc.update_distro_data(distro_check_dir, datetime) 662 dc.update_distro_data(distro_check_dir, datetime)
690 663
691 # do the comparison 664 # do the comparison
692 result = dc.compare_in_distro_packages_list(distro_check_dir, d) 665 result = dc.compare_in_distro_packages_list(distro_check_dir, d)
693 666
694 # save the results 667 # save the results
695 dc.save_distro_check_result(result, datetime, d) 668 dc.save_distro_check_result(result, datetime, result_file, d)
696} 669}
697 670
698addtask distro_checkall after do_distro_check 671addtask distro_checkall after do_distro_check
@@ -701,3 +674,55 @@ do_distro_checkall[nostamp] = "1"
701do_distro_checkall() { 674do_distro_checkall() {
702 : 675 :
703} 676}
677#
678#Check Missing License Text.
679#Use this task to generate the missing license text data for pkg-report system,
680#then we can search those recipes which license text isn't exsit in common-licenses directory
681#
682addhandler checklicense_eventhandler
683python checklicense_eventhandler() {
684 if bb.event.getName(e) == "BuildStarted":
685 """initialize log files."""
686 import oe.distro_check as dc
687 logfile = dc.create_log_file(e.data, "missinglicense.csv")
688 lf = bb.utils.lockfile("%s.lock" % logfile)
689 f = open(logfile, "a")
690 f.write("Package\tLicense\tMissingLicense\n")
691 f.close()
692 bb.utils.unlockfile(lf)
693 return
694}
695
696addtask checklicense
697do_checklicense[nostamp] = "1"
698python do_checklicense() {
699 import os
700 import bb
701 import shutil
702 logpath = bb.data.getVar('LOG_DIR', d, True)
703 bb.utils.mkdirhier(logpath)
704 pn = bb.data.getVar('PN', d, True)
705 logfile = os.path.join(logpath, "missinglicense.csv")
706 generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True)
707 license_types = bb.data.getVar('LICENSE', d, True)
708 for license_type in ((license_types.replace('+', '').replace('|', '&')
709 .replace('(', '').replace(')', '').replace(';', '')
710 .replace(',', '').replace(" ", "").split("&"))):
711 if not os.path.isfile(os.path.join(generic_directory, license_type)):
712 lf = bb.utils.lockfile("%s.lock" % logfile)
713 f = open(logfile, "a")
714 f.write("%s\t%s\t%s\n" % \
715 (pn,license_types,license_type))
716 f.close()
717 bb.utils.unlockfile(lf)
718 return
719}
720
721addtask checklicenseall after do_checklicense
722do_checklicenseall[recrdeptask] = "do_checklicense"
723do_checklicenseall[nostamp] = "1"
724do_checklicenseall() {
725 :
726}
727
728