diff options
Diffstat (limited to 'meta/classes/distrodata.bbclass')
| -rw-r--r-- | meta/classes/distrodata.bbclass | 162 |
1 files changed, 81 insertions, 81 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index ce7b931b13..687247a649 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass | |||
| @@ -19,87 +19,87 @@ addtask distrodata_np | |||
| 19 | do_distrodata_np[nostamp] = "1" | 19 | do_distrodata_np[nostamp] = "1" |
| 20 | python do_distrodata_np() { | 20 | python do_distrodata_np() { |
| 21 | localdata = bb.data.createCopy(d) | 21 | localdata = bb.data.createCopy(d) |
| 22 | pn = bb.data.getVar("PN", d, True) | 22 | pn = d.getVar("PN", True) |
| 23 | bb.note("Package Name: %s" % pn) | 23 | bb.note("Package Name: %s" % pn) |
| 24 | 24 | ||
| 25 | import oe.distro_check as dist_check | 25 | import oe.distro_check as dist_check |
| 26 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 26 | tmpdir = d.getVar('TMPDIR', True) |
| 27 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 27 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
| 28 | datetime = bb.data.getVar('DATETIME', localdata, True) | 28 | datetime = localdata.getVar('DATETIME', True) |
| 29 | dist_check.update_distro_data(distro_check_dir, datetime) | 29 | dist_check.update_distro_data(distro_check_dir, datetime) |
| 30 | 30 | ||
| 31 | if pn.find("-native") != -1: | 31 | if pn.find("-native") != -1: |
| 32 | pnstripped = pn.split("-native") | 32 | pnstripped = pn.split("-native") |
| 33 | bb.note("Native Split: %s" % pnstripped) | 33 | bb.note("Native Split: %s" % pnstripped) |
| 34 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 34 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 35 | bb.data.update_data(localdata) | 35 | bb.data.update_data(localdata) |
| 36 | 36 | ||
| 37 | if pn.find("-nativesdk") != -1: | 37 | if pn.find("-nativesdk") != -1: |
| 38 | pnstripped = pn.split("-nativesdk") | 38 | pnstripped = pn.split("-nativesdk") |
| 39 | bb.note("Native Split: %s" % pnstripped) | 39 | bb.note("Native Split: %s" % pnstripped) |
| 40 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 40 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 41 | bb.data.update_data(localdata) | 41 | bb.data.update_data(localdata) |
| 42 | 42 | ||
| 43 | if pn.find("-cross") != -1: | 43 | if pn.find("-cross") != -1: |
| 44 | pnstripped = pn.split("-cross") | 44 | pnstripped = pn.split("-cross") |
| 45 | bb.note("cross Split: %s" % pnstripped) | 45 | bb.note("cross Split: %s" % pnstripped) |
| 46 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 46 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 47 | bb.data.update_data(localdata) | 47 | bb.data.update_data(localdata) |
| 48 | 48 | ||
| 49 | if pn.find("-crosssdk") != -1: | 49 | if pn.find("-crosssdk") != -1: |
| 50 | pnstripped = pn.split("-crosssdk") | 50 | pnstripped = pn.split("-crosssdk") |
| 51 | bb.note("cross Split: %s" % pnstripped) | 51 | bb.note("cross Split: %s" % pnstripped) |
| 52 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 52 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 53 | bb.data.update_data(localdata) | 53 | bb.data.update_data(localdata) |
| 54 | 54 | ||
| 55 | if pn.find("-initial") != -1: | 55 | if pn.find("-initial") != -1: |
| 56 | pnstripped = pn.split("-initial") | 56 | pnstripped = pn.split("-initial") |
| 57 | bb.note("initial Split: %s" % pnstripped) | 57 | bb.note("initial Split: %s" % pnstripped) |
| 58 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 58 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 59 | bb.data.update_data(localdata) | 59 | bb.data.update_data(localdata) |
| 60 | 60 | ||
| 61 | """generate package information from .bb file""" | 61 | """generate package information from .bb file""" |
| 62 | pname = bb.data.getVar('PN', localdata, True) | 62 | pname = localdata.getVar('PN', True) |
| 63 | pcurver = bb.data.getVar('PV', localdata, True) | 63 | pcurver = localdata.getVar('PV', True) |
| 64 | pdesc = bb.data.getVar('DESCRIPTION', localdata, True) | 64 | pdesc = localdata.getVar('DESCRIPTION', True) |
| 65 | if pdesc is not None: | 65 | if pdesc is not None: |
| 66 | pdesc = pdesc.replace(',','') | 66 | pdesc = pdesc.replace(',','') |
| 67 | pdesc = pdesc.replace('\n','') | 67 | pdesc = pdesc.replace('\n','') |
| 68 | 68 | ||
| 69 | pgrp = bb.data.getVar('SECTION', localdata, True) | 69 | pgrp = localdata.getVar('SECTION', True) |
| 70 | plicense = bb.data.getVar('LICENSE', localdata, True).replace(',','_') | 70 | plicense = localdata.getVar('LICENSE', True).replace(',','_') |
| 71 | if bb.data.getVar('LIC_FILES_CHKSUM', localdata, True): | 71 | if localdata.getVar('LIC_FILES_CHKSUM', True): |
| 72 | pchksum="1" | 72 | pchksum="1" |
| 73 | else: | 73 | else: |
| 74 | pchksum="0" | 74 | pchksum="0" |
| 75 | 75 | ||
| 76 | if bb.data.getVar('RECIPE_STATUS', localdata, True): | 76 | if localdata.getVar('RECIPE_STATUS', True): |
| 77 | hasrstatus="1" | 77 | hasrstatus="1" |
| 78 | else: | 78 | else: |
| 79 | hasrstatus="0" | 79 | hasrstatus="0" |
| 80 | 80 | ||
| 81 | rstatus = bb.data.getVar('RECIPE_STATUS', localdata, True) | 81 | rstatus = localdata.getVar('RECIPE_STATUS', True) |
| 82 | if rstatus is not None: | 82 | if rstatus is not None: |
| 83 | rstatus = rstatus.replace(',','') | 83 | rstatus = rstatus.replace(',','') |
| 84 | 84 | ||
| 85 | pupver = bb.data.getVar('RECIPE_LATEST_VERSION', localdata, True) | 85 | pupver = localdata.getVar('RECIPE_LATEST_VERSION', True) |
| 86 | if pcurver == pupver: | 86 | if pcurver == pupver: |
| 87 | vermatch="1" | 87 | vermatch="1" |
| 88 | else: | 88 | else: |
| 89 | vermatch="0" | 89 | vermatch="0" |
| 90 | noupdate_reason = bb.data.getVar('RECIPE_NO_UPDATE_REASON', localdata, True) | 90 | noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) |
| 91 | if noupdate_reason is None: | 91 | if noupdate_reason is None: |
| 92 | noupdate="0" | 92 | noupdate="0" |
| 93 | else: | 93 | else: |
| 94 | noupdate="1" | 94 | noupdate="1" |
| 95 | noupdate_reason = noupdate_reason.replace(',','') | 95 | noupdate_reason = noupdate_reason.replace(',','') |
| 96 | 96 | ||
| 97 | ris = bb.data.getVar('RECIPE_INTEL_SECTION', localdata, True) | 97 | ris = localdata.getVar('RECIPE_INTEL_SECTION', True) |
| 98 | maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) | 98 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) |
| 99 | rttr = bb.data.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', localdata, True) | 99 | rttr = localdata.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', True) |
| 100 | rlrd = bb.data.getVar('RECIPE_LATEST_RELEASE_DATE', localdata, True) | 100 | rlrd = localdata.getVar('RECIPE_LATEST_RELEASE_DATE', True) |
| 101 | dc = bb.data.getVar('DEPENDENCY_CHECK', localdata, True) | 101 | dc = localdata.getVar('DEPENDENCY_CHECK', True) |
| 102 | rc = bb.data.getVar('RECIPE_COMMENTS', localdata, True) | 102 | rc = localdata.getVar('RECIPE_COMMENTS', True) |
| 103 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) | 103 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) |
| 104 | 104 | ||
| 105 | bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s\n" % \ | 105 | bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s\n" % \ |
| @@ -113,81 +113,81 @@ python do_distrodata_np() { | |||
| 113 | addtask distrodata | 113 | addtask distrodata |
| 114 | do_distrodata[nostamp] = "1" | 114 | do_distrodata[nostamp] = "1" |
| 115 | python do_distrodata() { | 115 | python do_distrodata() { |
| 116 | logpath = bb.data.getVar('LOG_DIR', d, True) | 116 | logpath = d.getVar('LOG_DIR', True) |
| 117 | bb.utils.mkdirhier(logpath) | 117 | bb.utils.mkdirhier(logpath) |
| 118 | logfile = os.path.join(logpath, "distrodata.csv") | 118 | logfile = os.path.join(logpath, "distrodata.csv") |
| 119 | 119 | ||
| 120 | import oe.distro_check as dist_check | 120 | import oe.distro_check as dist_check |
| 121 | localdata = bb.data.createCopy(d) | 121 | localdata = bb.data.createCopy(d) |
| 122 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 122 | tmpdir = d.getVar('TMPDIR', True) |
| 123 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 123 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
| 124 | datetime = bb.data.getVar('DATETIME', localdata, True) | 124 | datetime = localdata.getVar('DATETIME', True) |
| 125 | dist_check.update_distro_data(distro_check_dir, datetime) | 125 | dist_check.update_distro_data(distro_check_dir, datetime) |
| 126 | 126 | ||
| 127 | pn = bb.data.getVar("PN", d, True) | 127 | pn = d.getVar("PN", True) |
| 128 | bb.note("Package Name: %s" % pn) | 128 | bb.note("Package Name: %s" % pn) |
| 129 | 129 | ||
| 130 | if pn.find("-native") != -1: | 130 | if pn.find("-native") != -1: |
| 131 | pnstripped = pn.split("-native") | 131 | pnstripped = pn.split("-native") |
| 132 | bb.note("Native Split: %s" % pnstripped) | 132 | bb.note("Native Split: %s" % pnstripped) |
| 133 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 133 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 134 | bb.data.update_data(localdata) | 134 | bb.data.update_data(localdata) |
| 135 | 135 | ||
| 136 | if pn.find("-cross") != -1: | 136 | if pn.find("-cross") != -1: |
| 137 | pnstripped = pn.split("-cross") | 137 | pnstripped = pn.split("-cross") |
| 138 | bb.note("cross Split: %s" % pnstripped) | 138 | bb.note("cross Split: %s" % pnstripped) |
| 139 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 139 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 140 | bb.data.update_data(localdata) | 140 | bb.data.update_data(localdata) |
| 141 | 141 | ||
| 142 | if pn.find("-initial") != -1: | 142 | if pn.find("-initial") != -1: |
| 143 | pnstripped = pn.split("-initial") | 143 | pnstripped = pn.split("-initial") |
| 144 | bb.note("initial Split: %s" % pnstripped) | 144 | bb.note("initial Split: %s" % pnstripped) |
| 145 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 145 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 146 | bb.data.update_data(localdata) | 146 | bb.data.update_data(localdata) |
| 147 | 147 | ||
| 148 | """generate package information from .bb file""" | 148 | """generate package information from .bb file""" |
| 149 | pname = bb.data.getVar('PN', localdata, True) | 149 | pname = localdata.getVar('PN', True) |
| 150 | pcurver = bb.data.getVar('PV', localdata, True) | 150 | pcurver = localdata.getVar('PV', True) |
| 151 | pdesc = bb.data.getVar('DESCRIPTION', localdata, True) | 151 | pdesc = localdata.getVar('DESCRIPTION', True) |
| 152 | if pdesc is not None: | 152 | if pdesc is not None: |
| 153 | pdesc = pdesc.replace(',','') | 153 | pdesc = pdesc.replace(',','') |
| 154 | pdesc = pdesc.replace('\n','') | 154 | pdesc = pdesc.replace('\n','') |
| 155 | 155 | ||
| 156 | pgrp = bb.data.getVar('SECTION', localdata, True) | 156 | pgrp = localdata.getVar('SECTION', True) |
| 157 | plicense = bb.data.getVar('LICENSE', localdata, True).replace(',','_') | 157 | plicense = localdata.getVar('LICENSE', True).replace(',','_') |
| 158 | if bb.data.getVar('LIC_FILES_CHKSUM', localdata, True): | 158 | if localdata.getVar('LIC_FILES_CHKSUM', True): |
| 159 | pchksum="1" | 159 | pchksum="1" |
| 160 | else: | 160 | else: |
| 161 | pchksum="0" | 161 | pchksum="0" |
| 162 | 162 | ||
| 163 | if bb.data.getVar('RECIPE_STATUS', localdata, True): | 163 | if localdata.getVar('RECIPE_STATUS', True): |
| 164 | hasrstatus="1" | 164 | hasrstatus="1" |
| 165 | else: | 165 | else: |
| 166 | hasrstatus="0" | 166 | hasrstatus="0" |
| 167 | 167 | ||
| 168 | rstatus = bb.data.getVar('RECIPE_STATUS', localdata, True) | 168 | rstatus = localdata.getVar('RECIPE_STATUS', True) |
| 169 | if rstatus is not None: | 169 | if rstatus is not None: |
| 170 | rstatus = rstatus.replace(',','') | 170 | rstatus = rstatus.replace(',','') |
| 171 | 171 | ||
| 172 | pupver = bb.data.getVar('RECIPE_LATEST_VERSION', localdata, True) | 172 | pupver = localdata.getVar('RECIPE_LATEST_VERSION', True) |
| 173 | if pcurver == pupver: | 173 | if pcurver == pupver: |
| 174 | vermatch="1" | 174 | vermatch="1" |
| 175 | else: | 175 | else: |
| 176 | vermatch="0" | 176 | vermatch="0" |
| 177 | 177 | ||
| 178 | noupdate_reason = bb.data.getVar('RECIPE_NO_UPDATE_REASON', localdata, True) | 178 | noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) |
| 179 | if noupdate_reason is None: | 179 | if noupdate_reason is None: |
| 180 | noupdate="0" | 180 | noupdate="0" |
| 181 | else: | 181 | else: |
| 182 | noupdate="1" | 182 | noupdate="1" |
| 183 | noupdate_reason = noupdate_reason.replace(',','') | 183 | noupdate_reason = noupdate_reason.replace(',','') |
| 184 | 184 | ||
| 185 | ris = bb.data.getVar('RECIPE_INTEL_SECTION', localdata, True) | 185 | ris = localdata.getVar('RECIPE_INTEL_SECTION', True) |
| 186 | maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) | 186 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) |
| 187 | rttr = bb.data.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', localdata, True) | 187 | rttr = localdata.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', True) |
| 188 | rlrd = bb.data.getVar('RECIPE_LATEST_RELEASE_DATE', localdata, True) | 188 | rlrd = localdata.getVar('RECIPE_LATEST_RELEASE_DATE', True) |
| 189 | dc = bb.data.getVar('DEPENDENCY_CHECK', localdata, True) | 189 | dc = localdata.getVar('DEPENDENCY_CHECK', True) |
| 190 | rc = bb.data.getVar('RECIPE_COMMENTS', localdata, True) | 190 | rc = localdata.getVar('RECIPE_COMMENTS', True) |
| 191 | # do the comparison | 191 | # do the comparison |
| 192 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) | 192 | result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) |
| 193 | 193 | ||
| @@ -298,7 +298,7 @@ python do_checkpkg() { | |||
| 298 | Clear internal url cache as it's a temporary check. Not doing so will have | 298 | Clear internal url cache as it's a temporary check. Not doing so will have |
| 299 | bitbake check url multiple times when looping through a single url | 299 | bitbake check url multiple times when looping through a single url |
| 300 | """ | 300 | """ |
| 301 | fn = bb.data.getVar('FILE', d, True) | 301 | fn = d.getVar('FILE', True) |
| 302 | bb.fetch2.urldata_cache[fn] = {} | 302 | bb.fetch2.urldata_cache[fn] = {} |
| 303 | 303 | ||
| 304 | """ | 304 | """ |
| @@ -329,7 +329,7 @@ python do_checkpkg() { | |||
| 329 | Return new version if success, or else error in "Errxxxx" style | 329 | Return new version if success, or else error in "Errxxxx" style |
| 330 | """ | 330 | """ |
| 331 | def check_new_dir(url, curver, d): | 331 | def check_new_dir(url, curver, d): |
| 332 | pn = bb.data.getVar('PN', d, True) | 332 | pn = d.getVar('PN', True) |
| 333 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) | 333 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) |
| 334 | status = internal_fetch_wget(url, d, f) | 334 | status = internal_fetch_wget(url, d, f) |
| 335 | fhtml = f.read() | 335 | fhtml = f.read() |
| @@ -372,7 +372,7 @@ python do_checkpkg() { | |||
| 372 | 372 | ||
| 373 | f.close() | 373 | f.close() |
| 374 | if status != "ErrHostNoDir" and re.match("Err", status): | 374 | if status != "ErrHostNoDir" and re.match("Err", status): |
| 375 | logpath = bb.data.getVar('LOG_DIR', d, 1) | 375 | logpath = d.getVar('LOG_DIR', 1) |
| 376 | os.system("cp %s %s/" % (f.name, logpath)) | 376 | os.system("cp %s %s/" % (f.name, logpath)) |
| 377 | os.unlink(f.name) | 377 | os.unlink(f.name) |
| 378 | return status | 378 | return status |
| @@ -388,7 +388,7 @@ python do_checkpkg() { | |||
| 388 | """possible to have no version in pkg name, such as spectrum-fw""" | 388 | """possible to have no version in pkg name, such as spectrum-fw""" |
| 389 | if not re.search("\d+", curname): | 389 | if not re.search("\d+", curname): |
| 390 | return pcurver | 390 | return pcurver |
| 391 | pn = bb.data.getVar('PN', d, True) | 391 | pn = d.getVar('PN', True) |
| 392 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) | 392 | f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) |
| 393 | status = internal_fetch_wget(url, d, f) | 393 | status = internal_fetch_wget(url, d, f) |
| 394 | fhtml = f.read() | 394 | fhtml = f.read() |
| @@ -431,55 +431,55 @@ python do_checkpkg() { | |||
| 431 | f.close() | 431 | f.close() |
| 432 | """if host hasn't directory information, no need to save tmp file""" | 432 | """if host hasn't directory information, no need to save tmp file""" |
| 433 | if status != "ErrHostNoDir" and re.match("Err", status): | 433 | if status != "ErrHostNoDir" and re.match("Err", status): |
| 434 | logpath = bb.data.getVar('LOG_DIR', d, True) | 434 | logpath = d.getVar('LOG_DIR', True) |
| 435 | os.system("cp %s %s/" % (f.name, logpath)) | 435 | os.system("cp %s %s/" % (f.name, logpath)) |
| 436 | os.unlink(f.name) | 436 | os.unlink(f.name) |
| 437 | return status | 437 | return status |
| 438 | 438 | ||
| 439 | """first check whether a uri is provided""" | 439 | """first check whether a uri is provided""" |
| 440 | src_uri = bb.data.getVar('SRC_URI', d, True) | 440 | src_uri = d.getVar('SRC_URI', True) |
| 441 | if not src_uri: | 441 | if not src_uri: |
| 442 | return | 442 | return |
| 443 | 443 | ||
| 444 | """initialize log files.""" | 444 | """initialize log files.""" |
| 445 | logpath = bb.data.getVar('LOG_DIR', d, True) | 445 | logpath = d.getVar('LOG_DIR', True) |
| 446 | bb.utils.mkdirhier(logpath) | 446 | bb.utils.mkdirhier(logpath) |
| 447 | logfile = os.path.join(logpath, "checkpkg.csv") | 447 | logfile = os.path.join(logpath, "checkpkg.csv") |
| 448 | 448 | ||
| 449 | """generate package information from .bb file""" | 449 | """generate package information from .bb file""" |
| 450 | pname = bb.data.getVar('PN', d, True) | 450 | pname = d.getVar('PN', True) |
| 451 | 451 | ||
| 452 | if pname.find("-native") != -1: | 452 | if pname.find("-native") != -1: |
| 453 | pnstripped = pname.split("-native") | 453 | pnstripped = pname.split("-native") |
| 454 | bb.note("Native Split: %s" % pnstripped) | 454 | bb.note("Native Split: %s" % pnstripped) |
| 455 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 455 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 456 | bb.data.update_data(localdata) | 456 | bb.data.update_data(localdata) |
| 457 | 457 | ||
| 458 | if pname.find("-cross") != -1: | 458 | if pname.find("-cross") != -1: |
| 459 | pnstripped = pname.split("-cross") | 459 | pnstripped = pname.split("-cross") |
| 460 | bb.note("cross Split: %s" % pnstripped) | 460 | bb.note("cross Split: %s" % pnstripped) |
| 461 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 461 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 462 | bb.data.update_data(localdata) | 462 | bb.data.update_data(localdata) |
| 463 | 463 | ||
| 464 | if pname.find("-initial") != -1: | 464 | if pname.find("-initial") != -1: |
| 465 | pnstripped = pname.split("-initial") | 465 | pnstripped = pname.split("-initial") |
| 466 | bb.note("initial Split: %s" % pnstripped) | 466 | bb.note("initial Split: %s" % pnstripped) |
| 467 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) | 467 | bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) |
| 468 | bb.data.update_data(localdata) | 468 | bb.data.update_data(localdata) |
| 469 | 469 | ||
| 470 | pdesc = bb.data.getVar('DESCRIPTION', localdata, True) | 470 | pdesc = localdata.getVar('DESCRIPTION', True) |
| 471 | pgrp = bb.data.getVar('SECTION', localdata, True) | 471 | pgrp = localdata.getVar('SECTION', True) |
| 472 | pversion = bb.data.getVar('PV', localdata, True) | 472 | pversion = localdata.getVar('PV', True) |
| 473 | plicense = bb.data.getVar('LICENSE', localdata, True) | 473 | plicense = localdata.getVar('LICENSE', True) |
| 474 | psection = bb.data.getVar('SECTION', localdata, True) | 474 | psection = localdata.getVar('SECTION', True) |
| 475 | phome = bb.data.getVar('HOMEPAGE', localdata, True) | 475 | phome = localdata.getVar('HOMEPAGE', True) |
| 476 | prelease = bb.data.getVar('PR', localdata, True) | 476 | prelease = localdata.getVar('PR', True) |
| 477 | ppriority = bb.data.getVar('PRIORITY', localdata, True) | 477 | ppriority = localdata.getVar('PRIORITY', True) |
| 478 | pdepends = bb.data.getVar('DEPENDS', localdata, True) | 478 | pdepends = localdata.getVar('DEPENDS', True) |
| 479 | pbugtracker = bb.data.getVar('BUGTRACKER', localdata, True) | 479 | pbugtracker = localdata.getVar('BUGTRACKER', True) |
| 480 | ppe = bb.data.getVar('PE', localdata, True) | 480 | ppe = localdata.getVar('PE', True) |
| 481 | psrcuri = bb.data.getVar('SRC_URI', localdata, True) | 481 | psrcuri = localdata.getVar('SRC_URI', True) |
| 482 | maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) | 482 | maintainer = localdata.getVar('RECIPE_MAINTAINER', True) |
| 483 | 483 | ||
| 484 | found = 0 | 484 | found = 0 |
| 485 | for uri in src_uri.split(): | 485 | for uri in src_uri.split(): |
| @@ -497,9 +497,9 @@ python do_checkpkg() { | |||
| 497 | 497 | ||
| 498 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) | 498 | (type, host, path, user, pswd, parm) = bb.decodeurl(uri) |
| 499 | if type in ['http', 'https', 'ftp']: | 499 | if type in ['http', 'https', 'ftp']: |
| 500 | pcurver = bb.data.getVar('PV', d, True) | 500 | pcurver = d.getVar('PV', True) |
| 501 | else: | 501 | else: |
| 502 | pcurver = bb.data.getVar("SRCREV", d, True) | 502 | pcurver = d.getVar("SRCREV", True) |
| 503 | 503 | ||
| 504 | if type in ['http', 'https', 'ftp']: | 504 | if type in ['http', 'https', 'ftp']: |
| 505 | newver = pcurver | 505 | newver = pcurver |
| @@ -639,7 +639,7 @@ python do_checkpkg() { | |||
| 639 | pstatus += ":%s%s" % (host, path) | 639 | pstatus += ":%s%s" % (host, path) |
| 640 | 640 | ||
| 641 | """Read from manual distro tracking fields as alternative""" | 641 | """Read from manual distro tracking fields as alternative""" |
| 642 | pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, True) | 642 | pmver = d.getVar("RECIPE_LATEST_VERSION", True) |
| 643 | if not pmver: | 643 | if not pmver: |
| 644 | pmver = "N/A" | 644 | pmver = "N/A" |
| 645 | pmstatus = "ErrNoRecipeData" | 645 | pmstatus = "ErrNoRecipeData" |
| @@ -688,12 +688,12 @@ python do_distro_check() { | |||
| 688 | 688 | ||
| 689 | localdata = bb.data.createCopy(d) | 689 | localdata = bb.data.createCopy(d) |
| 690 | bb.data.update_data(localdata) | 690 | bb.data.update_data(localdata) |
| 691 | tmpdir = bb.data.getVar('TMPDIR', d, True) | 691 | tmpdir = d.getVar('TMPDIR', True) |
| 692 | distro_check_dir = os.path.join(tmpdir, "distro_check") | 692 | distro_check_dir = os.path.join(tmpdir, "distro_check") |
| 693 | logpath = bb.data.getVar('LOG_DIR', d, True) | 693 | logpath = d.getVar('LOG_DIR', True) |
| 694 | bb.utils.mkdirhier(logpath) | 694 | bb.utils.mkdirhier(logpath) |
| 695 | result_file = os.path.join(logpath, "distrocheck.csv") | 695 | result_file = os.path.join(logpath, "distrocheck.csv") |
| 696 | datetime = bb.data.getVar('DATETIME', localdata, True) | 696 | datetime = localdata.getVar('DATETIME', True) |
| 697 | dc.update_distro_data(distro_check_dir, datetime) | 697 | dc.update_distro_data(distro_check_dir, datetime) |
| 698 | 698 | ||
| 699 | # do the comparison | 699 | # do the comparison |
| @@ -734,12 +734,12 @@ python do_checklicense() { | |||
| 734 | import os | 734 | import os |
| 735 | import bb | 735 | import bb |
| 736 | import shutil | 736 | import shutil |
| 737 | logpath = bb.data.getVar('LOG_DIR', d, True) | 737 | logpath = d.getVar('LOG_DIR', True) |
| 738 | bb.utils.mkdirhier(logpath) | 738 | bb.utils.mkdirhier(logpath) |
| 739 | pn = bb.data.getVar('PN', d, True) | 739 | pn = d.getVar('PN', True) |
| 740 | logfile = os.path.join(logpath, "missinglicense.csv") | 740 | logfile = os.path.join(logpath, "missinglicense.csv") |
| 741 | generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True) | 741 | generic_directory = d.getVar('COMMON_LICENSE_DIR', True) |
| 742 | license_types = bb.data.getVar('LICENSE', d, True) | 742 | license_types = d.getVar('LICENSE', True) |
| 743 | for license_type in ((license_types.replace('+', '').replace('|', '&') | 743 | for license_type in ((license_types.replace('+', '').replace('|', '&') |
| 744 | .replace('(', '').replace(')', '').replace(';', '') | 744 | .replace('(', '').replace(')', '').replace(';', '') |
| 745 | .replace(',', '').replace(" ", "").split("&"))): | 745 | .replace(',', '').replace(" ", "").split("&"))): |
