summaryrefslogtreecommitdiffstats
path: root/meta/classes/distrodata.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/distrodata.bbclass')
-rw-r--r--meta/classes/distrodata.bbclass1090
1 files changed, 545 insertions, 545 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index 0da10a1dd1..945ff5344c 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -3,11 +3,11 @@ addhandler distro_eventhandler
3python distro_eventhandler() { 3python distro_eventhandler() {
4 4
5 if bb.event.getName(e) == "BuildStarted": 5 if bb.event.getName(e) == "BuildStarted":
6 import oe.distro_check as dc 6 import oe.distro_check as dc
7 logfile = dc.create_log_file(e.data, "distrodata.csv") 7 logfile = dc.create_log_file(e.data, "distrodata.csv")
8 lf = bb.utils.lockfile("%s.lock" % logfile) 8 lf = bb.utils.lockfile("%s.lock" % logfile)
9 f = open(logfile, "a") 9 f = open(logfile, "a")
10 f.write("Package,Description,Owner,License,VerMatch,Version,Upsteam,Reason,Recipe Status,Distro 1,Distro 2,Distro 3\n") 10 f.write("Package,Description,Owner,License,VerMatch,Version,Upsteam,Reason,Recipe Status,Distro 1,Distro 2,Distro 3\n")
11 f.close() 11 f.close()
12 bb.utils.unlockfile(lf) 12 bb.utils.unlockfile(lf)
13 13
@@ -17,7 +17,7 @@ python distro_eventhandler() {
17addtask distrodata_np 17addtask distrodata_np
18do_distrodata_np[nostamp] = "1" 18do_distrodata_np[nostamp] = "1"
19python do_distrodata_np() { 19python do_distrodata_np() {
20 localdata = bb.data.createCopy(d) 20 localdata = bb.data.createCopy(d)
21 pn = d.getVar("PN", True) 21 pn = d.getVar("PN", True)
22 bb.note("Package Name: %s" % pn) 22 bb.note("Package Name: %s" % pn)
23 23
@@ -27,69 +27,69 @@ python do_distrodata_np() {
27 datetime = localdata.getVar('DATETIME', True) 27 datetime = localdata.getVar('DATETIME', True)
28 dist_check.update_distro_data(distro_check_dir, datetime) 28 dist_check.update_distro_data(distro_check_dir, datetime)
29 29
30 if pn.find("-native") != -1: 30 if pn.find("-native") != -1:
31 pnstripped = pn.split("-native") 31 pnstripped = pn.split("-native")
32 bb.note("Native Split: %s" % pnstripped) 32 bb.note("Native Split: %s" % pnstripped)
33 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 33 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
34 bb.data.update_data(localdata) 34 bb.data.update_data(localdata)
35 35
36 if pn.find("nativesdk-") != -1: 36 if pn.find("nativesdk-") != -1:
37 pnstripped = pn.replace("nativesdk-", "") 37 pnstripped = pn.replace("nativesdk-", "")
38 bb.note("Native Split: %s" % pnstripped) 38 bb.note("Native Split: %s" % pnstripped)
39 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) 39 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True))
40 bb.data.update_data(localdata) 40 bb.data.update_data(localdata)
41 41
42 if pn.find("-cross") != -1: 42 if pn.find("-cross") != -1:
43 pnstripped = pn.split("-cross") 43 pnstripped = pn.split("-cross")
44 bb.note("cross Split: %s" % pnstripped) 44 bb.note("cross Split: %s" % pnstripped)
45 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 45 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
46 bb.data.update_data(localdata) 46 bb.data.update_data(localdata)
47 47
48 if pn.find("-crosssdk") != -1: 48 if pn.find("-crosssdk") != -1:
49 pnstripped = pn.split("-crosssdk") 49 pnstripped = pn.split("-crosssdk")
50 bb.note("cross Split: %s" % pnstripped) 50 bb.note("cross Split: %s" % pnstripped)
51 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 51 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
52 bb.data.update_data(localdata) 52 bb.data.update_data(localdata)
53 53
54 if pn.find("-initial") != -1: 54 if pn.find("-initial") != -1:
55 pnstripped = pn.split("-initial") 55 pnstripped = pn.split("-initial")
56 bb.note("initial Split: %s" % pnstripped) 56 bb.note("initial Split: %s" % pnstripped)
57 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 57 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
58 bb.data.update_data(localdata) 58 bb.data.update_data(localdata)
59 59
60 """generate package information from .bb file""" 60 """generate package information from .bb file"""
61 pname = localdata.getVar('PN', True) 61 pname = localdata.getVar('PN', True)
62 pcurver = localdata.getVar('PV', True) 62 pcurver = localdata.getVar('PV', True)
63 pdesc = localdata.getVar('DESCRIPTION', True) 63 pdesc = localdata.getVar('DESCRIPTION', True)
64 if pdesc is not None: 64 if pdesc is not None:
65 pdesc = pdesc.replace(',','') 65 pdesc = pdesc.replace(',','')
66 pdesc = pdesc.replace('\n','') 66 pdesc = pdesc.replace('\n','')
67 67
68 pgrp = localdata.getVar('SECTION', True) 68 pgrp = localdata.getVar('SECTION', True)
69 plicense = localdata.getVar('LICENSE', True).replace(',','_') 69 plicense = localdata.getVar('LICENSE', True).replace(',','_')
70 70
71 rstatus = localdata.getVar('RECIPE_COLOR', True) 71 rstatus = localdata.getVar('RECIPE_COLOR', True)
72 if rstatus is not None: 72 if rstatus is not None:
73 rstatus = rstatus.replace(',','') 73 rstatus = rstatus.replace(',','')
74 74
75 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) 75 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True)
76 if pcurver == pupver: 76 if pcurver == pupver:
77 vermatch="1" 77 vermatch="1"
78 else: 78 else:
79 vermatch="0" 79 vermatch="0"
80 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) 80 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
81 if noupdate_reason is None: 81 if noupdate_reason is None:
82 noupdate="0" 82 noupdate="0"
83 else: 83 else:
84 noupdate="1" 84 noupdate="1"
85 noupdate_reason = noupdate_reason.replace(',','') 85 noupdate_reason = noupdate_reason.replace(',','')
86 86
87 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 87 maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
88 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) 88 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True)
89 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) 89 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
90 90
91 bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \ 91 bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \
92 (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus)) 92 (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus))
93 line = pn 93 line = pn
94 for i in result: 94 for i in result:
95 line = line + "," + i 95 line = line + "," + i
@@ -99,12 +99,12 @@ python do_distrodata_np() {
99addtask distrodata 99addtask distrodata
100do_distrodata[nostamp] = "1" 100do_distrodata[nostamp] = "1"
101python do_distrodata() { 101python do_distrodata() {
102 logpath = d.getVar('LOG_DIR', True) 102 logpath = d.getVar('LOG_DIR', True)
103 bb.utils.mkdirhier(logpath) 103 bb.utils.mkdirhier(logpath)
104 logfile = os.path.join(logpath, "distrodata.csv") 104 logfile = os.path.join(logpath, "distrodata.csv")
105 105
106 import oe.distro_check as dist_check 106 import oe.distro_check as dist_check
107 localdata = bb.data.createCopy(d) 107 localdata = bb.data.createCopy(d)
108 tmpdir = d.getVar('TMPDIR', True) 108 tmpdir = d.getVar('TMPDIR', True)
109 distro_check_dir = os.path.join(tmpdir, "distro_check") 109 distro_check_dir = os.path.join(tmpdir, "distro_check")
110 datetime = localdata.getVar('DATETIME', True) 110 datetime = localdata.getVar('DATETIME', True)
@@ -113,61 +113,61 @@ python do_distrodata() {
113 pn = d.getVar("PN", True) 113 pn = d.getVar("PN", True)
114 bb.note("Package Name: %s" % pn) 114 bb.note("Package Name: %s" % pn)
115 115
116 if pn.find("-native") != -1: 116 if pn.find("-native") != -1:
117 pnstripped = pn.split("-native") 117 pnstripped = pn.split("-native")
118 bb.note("Native Split: %s" % pnstripped) 118 bb.note("Native Split: %s" % pnstripped)
119 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 119 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
120 bb.data.update_data(localdata) 120 bb.data.update_data(localdata)
121 121
122 if pn.find("-cross") != -1: 122 if pn.find("-cross") != -1:
123 pnstripped = pn.split("-cross") 123 pnstripped = pn.split("-cross")
124 bb.note("cross Split: %s" % pnstripped) 124 bb.note("cross Split: %s" % pnstripped)
125 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 125 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
126 bb.data.update_data(localdata) 126 bb.data.update_data(localdata)
127 127
128 if pn.find("-initial") != -1: 128 if pn.find("-initial") != -1:
129 pnstripped = pn.split("-initial") 129 pnstripped = pn.split("-initial")
130 bb.note("initial Split: %s" % pnstripped) 130 bb.note("initial Split: %s" % pnstripped)
131 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 131 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
132 bb.data.update_data(localdata) 132 bb.data.update_data(localdata)
133 133
134 """generate package information from .bb file""" 134 """generate package information from .bb file"""
135 pname = localdata.getVar('PN', True) 135 pname = localdata.getVar('PN', True)
136 pcurver = localdata.getVar('PV', True) 136 pcurver = localdata.getVar('PV', True)
137 pdesc = localdata.getVar('DESCRIPTION', True) 137 pdesc = localdata.getVar('DESCRIPTION', True)
138 if pdesc is not None: 138 if pdesc is not None:
139 pdesc = pdesc.replace(',','') 139 pdesc = pdesc.replace(',','')
140 pdesc = pdesc.replace('\n','') 140 pdesc = pdesc.replace('\n','')
141 141
142 pgrp = localdata.getVar('SECTION', True) 142 pgrp = localdata.getVar('SECTION', True)
143 plicense = localdata.getVar('LICENSE', True).replace(',','_') 143 plicense = localdata.getVar('LICENSE', True).replace(',','_')
144 144
145 rstatus = localdata.getVar('RECIPE_COLOR', True) 145 rstatus = localdata.getVar('RECIPE_COLOR', True)
146 if rstatus is not None: 146 if rstatus is not None:
147 rstatus = rstatus.replace(',','') 147 rstatus = rstatus.replace(',','')
148 148
149 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) 149 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True)
150 if pcurver == pupver: 150 if pcurver == pupver:
151 vermatch="1" 151 vermatch="1"
152 else: 152 else:
153 vermatch="0" 153 vermatch="0"
154 154
155 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) 155 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
156 if noupdate_reason is None: 156 if noupdate_reason is None:
157 noupdate="0" 157 noupdate="0"
158 else: 158 else:
159 noupdate="1" 159 noupdate="1"
160 noupdate_reason = noupdate_reason.replace(',','') 160 noupdate_reason = noupdate_reason.replace(',','')
161 161
162 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 162 maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
163 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) 163 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True)
164 # do the comparison 164 # do the comparison
165 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) 165 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
166 166
167 lf = bb.utils.lockfile("%s.lock" % logfile) 167 lf = bb.utils.lockfile("%s.lock" % logfile)
168 f = open(logfile, "a") 168 f = open(logfile, "a")
169 f.write("%s,%s,%s,%s,%s,%s,%s,%s,%s" % \ 169 f.write("%s,%s,%s,%s,%s,%s,%s,%s,%s" % \
170 (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus)) 170 (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus))
171 line = "" 171 line = ""
172 for i in result: 172 for i in result:
173 line = line + "," + i 173 line = line + "," + i
@@ -180,18 +180,18 @@ addtask distrodataall after do_distrodata
180do_distrodataall[recrdeptask] = "do_distrodataall do_distrodata" 180do_distrodataall[recrdeptask] = "do_distrodataall do_distrodata"
181do_distrodataall[nostamp] = "1" 181do_distrodataall[nostamp] = "1"
182do_distrodataall() { 182do_distrodataall() {
183 : 183 :
184} 184}
185 185
186addhandler checkpkg_eventhandler 186addhandler checkpkg_eventhandler
187python checkpkg_eventhandler() { 187python checkpkg_eventhandler() {
188 if bb.event.getName(e) == "BuildStarted": 188 if bb.event.getName(e) == "BuildStarted":
189 import oe.distro_check as dc 189 import oe.distro_check as dc
190 logfile = dc.create_log_file(e.data, "checkpkg.csv") 190 logfile = dc.create_log_file(e.data, "checkpkg.csv")
191 191
192 lf = bb.utils.lockfile("%s.lock" % logfile) 192 lf = bb.utils.lockfile("%s.lock" % logfile)
193 f = open(logfile, "a") 193 f = open(logfile, "a")
194 f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n") 194 f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n")
195 f.close() 195 f.close()
196 bb.utils.unlockfile(lf) 196 bb.utils.unlockfile(lf)
197 return 197 return
@@ -200,444 +200,444 @@ python checkpkg_eventhandler() {
200addtask checkpkg 200addtask checkpkg
201do_checkpkg[nostamp] = "1" 201do_checkpkg[nostamp] = "1"
202python do_checkpkg() { 202python do_checkpkg() {
203 localdata = bb.data.createCopy(d) 203 localdata = bb.data.createCopy(d)
204 import sys 204 import sys
205 import re 205 import re
206 import tempfile 206 import tempfile
207 import subprocess 207 import subprocess
208 208
209 """ 209 """
210 sanity check to ensure same name and type. Match as many patterns as possible 210 sanity check to ensure same name and type. Match as many patterns as possible
211 such as: 211 such as:
212 gnome-common-2.20.0.tar.gz (most common format) 212 gnome-common-2.20.0.tar.gz (most common format)
213 gtk+-2.90.1.tar.gz 213 gtk+-2.90.1.tar.gz
214 xf86-input-synaptics-12.6.9.tar.gz 214 xf86-input-synaptics-12.6.9.tar.gz
215 dri2proto-2.3.tar.gz 215 dri2proto-2.3.tar.gz
216 blktool_4.orig.tar.gz 216 blktool_4.orig.tar.gz
217 libid3tag-0.15.1b.tar.gz 217 libid3tag-0.15.1b.tar.gz
218 unzip552.tar.gz 218 unzip552.tar.gz
219 icu4c-3_6-src.tgz 219 icu4c-3_6-src.tgz
220 genext2fs_1.3.orig.tar.gz 220 genext2fs_1.3.orig.tar.gz
221 gst-fluendo-mp3 221 gst-fluendo-mp3
222 """ 222 """
223 prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]" # match most patterns which uses "-" as separator to version digits 223 prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]" # match most patterns which uses "-" as separator to version digits
224 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz 224 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz
225 prefix3 = "[0-9a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz 225 prefix3 = "[0-9a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz
226 prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3) 226 prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3)
227 suffix = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm)" 227 suffix = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm)"
228 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "src.rpm") 228 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "src.rpm")
229 229
230 sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix 230 sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix
231 sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix) 231 sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix)
232 232
233 def parse_inter(s): 233 def parse_inter(s):
234 m = re.search(sinterstr, s) 234 m = re.search(sinterstr, s)
235 if not m: 235 if not m:
236 return None 236 return None
237 else: 237 else:
238 return (m.group('name'), m.group('ver'), "") 238 return (m.group('name'), m.group('ver'), "")
239 239
240 def parse_dir(s): 240 def parse_dir(s):
241 m = re.search(sdirstr, s) 241 m = re.search(sdirstr, s)
242 if not m: 242 if not m:
243 return None 243 return None
244 else: 244 else:
245 return (m.group('name'), m.group('ver'), m.group('type')) 245 return (m.group('name'), m.group('ver'), m.group('type'))
246 246
247 """ 247 """
248 Check whether 'new' is newer than 'old' version. We use existing vercmp() for the 248 Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
249 purpose. PE is cleared in comparison as it's not for build, and PV is cleared too 249 purpose. PE is cleared in comparison as it's not for build, and PV is cleared too
250 for simplicity as it's somehow difficult to get from various upstream format 250 for simplicity as it's somehow difficult to get from various upstream format
251 """ 251 """
252 def __vercmp(old, new): 252 def __vercmp(old, new):
253 (on, ov, ot) = old 253 (on, ov, ot) = old
254 (en, ev, et) = new 254 (en, ev, et) = new
255 if on != en or (et and et not in suffixtuple): 255 if on != en or (et and et not in suffixtuple):
256 return 0 256 return 0
257 ov = re.search("[\d|\.]+[^a-zA-Z]+", ov).group() 257 ov = re.search("[\d|\.]+[^a-zA-Z]+", ov).group()
258 ev = re.search("[\d|\.]+[^a-zA-Z]+", ev).group() 258 ev = re.search("[\d|\.]+[^a-zA-Z]+", ev).group()
259 return bb.utils.vercmp(("0", ov, ""), ("0", ev, "")) 259 return bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))
260 260
261 """ 261 """
262 wrapper for fetch upstream directory info 262 wrapper for fetch upstream directory info
263 'url' - upstream link customized by regular expression 263 'url' - upstream link customized by regular expression
264 'd' - database 264 'd' - database
265 'tmpf' - tmpfile for fetcher output 265 'tmpf' - tmpfile for fetcher output
266 We don't want to exit whole build due to one recipe error. So handle all exceptions 266 We don't want to exit whole build due to one recipe error. So handle all exceptions
267 gracefully w/o leaking to outer. 267 gracefully w/o leaking to outer.
268 """ 268 """
269 def internal_fetch_wget(url, d, tmpf): 269 def internal_fetch_wget(url, d, tmpf):
270 status = "ErrFetchUnknown" 270 status = "ErrFetchUnknown"
271 """ 271 """
272 Clear internal url cache as it's a temporary check. Not doing so will have 272 Clear internal url cache as it's a temporary check. Not doing so will have
273 bitbake check url multiple times when looping through a single url 273 bitbake check url multiple times when looping through a single url
274 """ 274 """
275 fn = d.getVar('FILE', True) 275 fn = d.getVar('FILE', True)
276 bb.fetch2.urldata_cache[fn] = {} 276 bb.fetch2.urldata_cache[fn] = {}
277 277
278 """ 278 """
279 To avoid impacting bitbake build engine, this trick is required for reusing bitbake 279 To avoid impacting bitbake build engine, this trick is required for reusing bitbake
280 interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR} 280 interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR}
281 while we don't want to pollute that place. So bb.fetch2.checkstatus() is borrowed here 281 while we don't want to pollute that place. So bb.fetch2.checkstatus() is borrowed here
282 which is designed for check purpose but we override check command for our own purpose 282 which is designed for check purpose but we override check command for our own purpose
283 """ 283 """
284 ld = bb.data.createCopy(d) 284 ld = bb.data.createCopy(d)
285 d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \ 285 d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \
286 % tmpf.name) 286 % tmpf.name)
287 bb.data.update_data(ld) 287 bb.data.update_data(ld)
288 288
289 try: 289 try:
290 fetcher = bb.fetch2.Fetch([url], ld) 290 fetcher = bb.fetch2.Fetch([url], ld)
291 fetcher.checkstatus() 291 fetcher.checkstatus()
292 status = "SUCC" 292 status = "SUCC"
293 except bb.fetch2.BBFetchException, e: 293 except bb.fetch2.BBFetchException, e:
294 status = "ErrFetch" 294 status = "ErrFetch"
295 295
296 return status 296 return status
297 297
298 """ 298 """
299 Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz", 299 Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz",
300 'url' - upstream link customized by regular expression 300 'url' - upstream link customized by regular expression
301 'd' - database 301 'd' - database
302 'curver' - current version 302 'curver' - current version
303 Return new version if success, or else error in "Errxxxx" style 303 Return new version if success, or else error in "Errxxxx" style
304 """ 304 """
305 def check_new_dir(url, curver, d): 305 def check_new_dir(url, curver, d):
306 pn = d.getVar('PN', True) 306 pn = d.getVar('PN', True)
307 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) 307 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
308 status = internal_fetch_wget(url, d, f) 308 status = internal_fetch_wget(url, d, f)
309 fhtml = f.read() 309 fhtml = f.read()
310 if status == "SUCC" and len(fhtml): 310 if status == "SUCC" and len(fhtml):
311 newver = parse_inter(curver) 311 newver = parse_inter(curver)
312 312
313 """ 313 """
314 match "*4.1/">*4.1/ where '*' matches chars 314 match "*4.1/">*4.1/ where '*' matches chars
315 N.B. add package name, only match for digits 315 N.B. add package name, only match for digits
316 """ 316 """
317 m = re.search("^%s" % prefix, curver) 317 m = re.search("^%s" % prefix, curver)
318 if m: 318 if m:
319 s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group() 319 s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group()
320 else: 320 else:
321 s = "(\d+[\.\-_])+\d+/?" 321 s = "(\d+[\.\-_])+\d+/?"
322 322
323 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s 323 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
324 reg = re.compile(searchstr) 324 reg = re.compile(searchstr)
325 325
326 valid = 0 326 valid = 0
327 for line in fhtml.split("\n"): 327 for line in fhtml.split("\n"):
328 if line.find(curver) >= 0: 328 if line.find(curver) >= 0:
329 valid = 1 329 valid = 1
330 m = reg.search(line) 330 m = reg.search(line)
331 if m: 331 if m:
332 ver = m.group().split("\"")[1] 332 ver = m.group().split("\"")[1]
333 ver = ver.strip("/") 333 ver = ver.strip("/")
334 ver = parse_inter(ver) 334 ver = parse_inter(ver)
335 if ver and __vercmp(newver, ver) < 0: 335 if ver and __vercmp(newver, ver) < 0:
336 newver = ver 336 newver = ver
337 337
338 """Expect a match for curver in directory list, or else it indicates unknown format""" 338 """Expect a match for curver in directory list, or else it indicates unknown format"""
339 if not valid: 339 if not valid:
340 status = "ErrParseInterDir" 340 status = "ErrParseInterDir"
341 else: 341 else:
342 """rejoin the path name""" 342 """rejoin the path name"""
343 status = newver[0] + newver[1] 343 status = newver[0] + newver[1]
344 elif not len(fhtml): 344 elif not len(fhtml):
345 status = "ErrHostNoDir" 345 status = "ErrHostNoDir"
346 346
347 f.close() 347 f.close()
348 if status != "ErrHostNoDir" and re.match("Err", status): 348 if status != "ErrHostNoDir" and re.match("Err", status):
349 logpath = d.getVar('LOG_DIR', True) 349 logpath = d.getVar('LOG_DIR', True)
350 subprocess.call("cp %s %s/" % (f.name, logpath), shell=True) 350 subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
351 os.unlink(f.name) 351 os.unlink(f.name)
352 return status 352 return status
353 353
354 """ 354 """
355 Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz", 355 Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz",
356 'url' - upstream link customized by regular expression 356 'url' - upstream link customized by regular expression
357 'd' - database 357 'd' - database
358 'curname' - current package name 358 'curname' - current package name
359 Return new version if success, or else error in "Errxxxx" style 359 Return new version if success, or else error in "Errxxxx" style
360 """ 360 """
361 def check_new_version(url, curname, d): 361 def check_new_version(url, curname, d):
362 """possible to have no version in pkg name, such as spectrum-fw""" 362 """possible to have no version in pkg name, such as spectrum-fw"""
363 if not re.search("\d+", curname): 363 if not re.search("\d+", curname):
364 return pcurver 364 return pcurver
365 pn = d.getVar('PN', True) 365 pn = d.getVar('PN', True)
366 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) 366 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
367 status = internal_fetch_wget(url, d, f) 367 status = internal_fetch_wget(url, d, f)
368 fhtml = f.read() 368 fhtml = f.read()
369 369
370 if status == "SUCC" and len(fhtml): 370 if status == "SUCC" and len(fhtml):
371 newver = parse_dir(curname) 371 newver = parse_dir(curname)
372 372
373 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """ 373 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
374 pn1 = re.search("^%s" % prefix, curname).group() 374 pn1 = re.search("^%s" % prefix, curname).group()
375 375
376 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1 376 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
377 searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s 377 searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s
378 reg = re.compile(searchstr) 378 reg = re.compile(searchstr)
379 379
380 valid = 0 380 valid = 0
381 for line in fhtml.split("\n"): 381 for line in fhtml.split("\n"):
382 m = reg.search(line) 382 m = reg.search(line)
383 if m: 383 if m:
384 valid = 1 384 valid = 1
385 ver = m.group().split("\"")[1].split("/")[-1] 385 ver = m.group().split("\"")[1].split("/")[-1]
386 if ver == "download": 386 if ver == "download":
387 ver = m.group().split("\"")[1].split("/")[-2] 387 ver = m.group().split("\"")[1].split("/")[-2]
388 ver = parse_dir(ver) 388 ver = parse_dir(ver)
389 if ver and __vercmp(newver, ver) < 0: 389 if ver and __vercmp(newver, ver) < 0:
390 newver = ver 390 newver = ver
391 391
392 """Expect a match for curver in directory list, or else it indicates unknown format""" 392 """Expect a match for curver in directory list, or else it indicates unknown format"""
393 if not valid: 393 if not valid:
394 status = "ErrParseDir" 394 status = "ErrParseDir"
395 else: 395 else:
396 """newver still contains a full package name string""" 396 """newver still contains a full package name string"""
397 status = re.search("(\d+[\.\-_])*(\d+[0-9a-zA-Z]*)", newver[1]).group() 397 status = re.search("(\d+[\.\-_])*(\d+[0-9a-zA-Z]*)", newver[1]).group()
398 if "_" in status: 398 if "_" in status:
399 status = re.sub("_",".",status) 399 status = re.sub("_",".",status)
400 elif "-" in status: 400 elif "-" in status:
401 status = re.sub("-",".",status) 401 status = re.sub("-",".",status)
402 elif not len(fhtml): 402 elif not len(fhtml):
403 status = "ErrHostNoDir" 403 status = "ErrHostNoDir"
404 404
405 f.close() 405 f.close()
406 """if host hasn't directory information, no need to save tmp file""" 406 """if host hasn't directory information, no need to save tmp file"""
407 if status != "ErrHostNoDir" and re.match("Err", status): 407 if status != "ErrHostNoDir" and re.match("Err", status):
408 logpath = d.getVar('LOG_DIR', True) 408 logpath = d.getVar('LOG_DIR', True)
409 subprocess.call("cp %s %s/" % (f.name, logpath), shell=True) 409 subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
410 os.unlink(f.name) 410 os.unlink(f.name)
411 return status 411 return status
412 412
413 """first check whether a uri is provided""" 413 """first check whether a uri is provided"""
414 src_uri = d.getVar('SRC_URI', True) 414 src_uri = d.getVar('SRC_URI', True)
415 if not src_uri: 415 if not src_uri:
416 return 416 return
417 417
418 """initialize log files.""" 418 """initialize log files."""
419 logpath = d.getVar('LOG_DIR', True) 419 logpath = d.getVar('LOG_DIR', True)
420 bb.utils.mkdirhier(logpath) 420 bb.utils.mkdirhier(logpath)
421 logfile = os.path.join(logpath, "checkpkg.csv") 421 logfile = os.path.join(logpath, "checkpkg.csv")
422 422
423 """generate package information from .bb file""" 423 """generate package information from .bb file"""
424 pname = d.getVar('PN', True) 424 pname = d.getVar('PN', True)
425 425
426 if pname.find("-native") != -1: 426 if pname.find("-native") != -1:
427 pnstripped = pname.split("-native") 427 pnstripped = pname.split("-native")
428 bb.note("Native Split: %s" % pnstripped) 428 bb.note("Native Split: %s" % pnstripped)
429 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 429 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
430 bb.data.update_data(localdata) 430 bb.data.update_data(localdata)
431 431
432 if pname.find("-cross") != -1: 432 if pname.find("-cross") != -1:
433 pnstripped = pname.split("-cross") 433 pnstripped = pname.split("-cross")
434 bb.note("cross Split: %s" % pnstripped) 434 bb.note("cross Split: %s" % pnstripped)
435 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 435 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
436 bb.data.update_data(localdata) 436 bb.data.update_data(localdata)
437 437
438 if pname.find("-initial") != -1: 438 if pname.find("-initial") != -1:
439 pnstripped = pname.split("-initial") 439 pnstripped = pname.split("-initial")
440 bb.note("initial Split: %s" % pnstripped) 440 bb.note("initial Split: %s" % pnstripped)
441 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 441 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
442 bb.data.update_data(localdata) 442 bb.data.update_data(localdata)
443 443
444 pdesc = localdata.getVar('DESCRIPTION', True) 444 pdesc = localdata.getVar('DESCRIPTION', True)
445 pgrp = localdata.getVar('SECTION', True) 445 pgrp = localdata.getVar('SECTION', True)
446 pversion = localdata.getVar('PV', True) 446 pversion = localdata.getVar('PV', True)
447 plicense = localdata.getVar('LICENSE', True) 447 plicense = localdata.getVar('LICENSE', True)
448 psection = localdata.getVar('SECTION', True) 448 psection = localdata.getVar('SECTION', True)
449 phome = localdata.getVar('HOMEPAGE', True) 449 phome = localdata.getVar('HOMEPAGE', True)
450 prelease = localdata.getVar('PR', True) 450 prelease = localdata.getVar('PR', True)
451 pdepends = localdata.getVar('DEPENDS', True) 451 pdepends = localdata.getVar('DEPENDS', True)
452 pbugtracker = localdata.getVar('BUGTRACKER', True) 452 pbugtracker = localdata.getVar('BUGTRACKER', True)
453 ppe = localdata.getVar('PE', True) 453 ppe = localdata.getVar('PE', True)
454 psrcuri = localdata.getVar('SRC_URI', True) 454 psrcuri = localdata.getVar('SRC_URI', True)
455 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 455 maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
456 456
457 found = 0 457 found = 0
458 for uri in src_uri.split(): 458 for uri in src_uri.split():
459 m = re.compile('(?P<type>[^:]*)').match(uri) 459 m = re.compile('(?P<type>[^:]*)').match(uri)
460 if not m: 460 if not m:
461 raise MalformedUrl(uri) 461 raise MalformedUrl(uri)
462 elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'): 462 elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
463 found = 1 463 found = 1
464 pproto = m.group('type') 464 pproto = m.group('type')
465 break 465 break
466 if not found: 466 if not found:
467 pproto = "file" 467 pproto = "file"
468 pupver = "N/A" 468 pupver = "N/A"
469 pstatus = "ErrUnknown" 469 pstatus = "ErrUnknown"
470 470
471 (type, host, path, user, pswd, parm) = bb.decodeurl(uri) 471 (type, host, path, user, pswd, parm) = bb.decodeurl(uri)
472 if type in ['http', 'https', 'ftp']: 472 if type in ['http', 'https', 'ftp']:
473 pcurver = d.getVar('PV', True) 473 pcurver = d.getVar('PV', True)
474 else: 474 else:
475 pcurver = d.getVar("SRCREV", True) 475 pcurver = d.getVar("SRCREV", True)
476 476
477 if type in ['http', 'https', 'ftp']: 477 if type in ['http', 'https', 'ftp']:
478 newver = pcurver 478 newver = pcurver
479 altpath = path 479 altpath = path
480 dirver = "-" 480 dirver = "-"
481 curname = "-" 481 curname = "-"
482 482
483 """ 483 """
484 match version number amid the path, such as "5.7" in: 484 match version number amid the path, such as "5.7" in:
485 http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz 485 http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
486 N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P 486 N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P
487 """ 487 """
488 m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path) 488 m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path)
489 if m: 489 if m:
490 altpath = path.split(m.group())[0] 490 altpath = path.split(m.group())[0]
491 dirver = m.group().strip("/") 491 dirver = m.group().strip("/")
492 492
493 """use new path and remove param. for wget only param is md5sum""" 493 """use new path and remove param. for wget only param is md5sum"""
494 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) 494 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
495 495
496 newver = check_new_dir(alturi, dirver, d) 496 newver = check_new_dir(alturi, dirver, d)
497 altpath = path 497 altpath = path
498 if not re.match("Err", newver) and dirver != newver: 498 if not re.match("Err", newver) and dirver != newver:
499 altpath = altpath.replace(dirver, newver, True) 499 altpath = altpath.replace(dirver, newver, True)
500 500
501 """Now try to acquire all remote files in current directory""" 501 """Now try to acquire all remote files in current directory"""
502 if not re.match("Err", newver): 502 if not re.match("Err", newver):
503 curname = altpath.split("/")[-1] 503 curname = altpath.split("/")[-1]
504 504
505 """get remote name by skipping pacakge name""" 505 """get remote name by skipping pacakge name"""
506 m = re.search(r"/.*/", altpath) 506 m = re.search(r"/.*/", altpath)
507 if not m: 507 if not m:
508 altpath = "/" 508 altpath = "/"
509 else: 509 else:
510 altpath = m.group() 510 altpath = m.group()
511 511
512 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}]) 512 alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
513 newver = check_new_version(alturi, curname, d) 513 newver = check_new_version(alturi, curname, d)
514 while(newver == "ErrHostNoDir"): 514 while(newver == "ErrHostNoDir"):
515 if alturi == "/download": 515 if alturi == "/download":
516 break 516 break
517 else: 517 else:
518 alturi = "/".join(alturi.split("/")[0:-2]) + "/download" 518 alturi = "/".join(alturi.split("/")[0:-2]) + "/download"
519 newver = check_new_version(alturi, curname, d) 519 newver = check_new_version(alturi, curname, d)
520 if not re.match("Err", newver): 520 if not re.match("Err", newver):
521 pupver = newver 521 pupver = newver
522 if pupver != pcurver: 522 if pupver != pcurver:
523 pstatus = "UPDATE" 523 pstatus = "UPDATE"
524 else: 524 else:
525 pstatus = "MATCH" 525 pstatus = "MATCH"
526 526
527 if re.match("Err", newver): 527 if re.match("Err", newver):
528 pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname 528 pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname
529 elif type == 'git': 529 elif type == 'git':
530 if user: 530 if user:
531 gituser = user + '@' 531 gituser = user + '@'
532 else: 532 else:
533 gituser = "" 533 gituser = ""
534 534
535 if 'protocol' in parm: 535 if 'protocol' in parm:
536 gitproto = parm['protocol'] 536 gitproto = parm['protocol']
537 else: 537 else:
538 gitproto = "git" 538 gitproto = "git"
539 gitcmd = "git ls-remote %s://%s%s%s *tag* 2>&1" % (gitproto, gituser, host, path) 539 gitcmd = "git ls-remote %s://%s%s%s *tag* 2>&1" % (gitproto, gituser, host, path)
540 gitcmd2 = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path) 540 gitcmd2 = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path)
541 tmp = os.popen(gitcmd).read() 541 tmp = os.popen(gitcmd).read()
542 tmp2 = os.popen(gitcmd2).read() 542 tmp2 = os.popen(gitcmd2).read()
543 #This is for those repo have tag like: refs/tags/1.2.2 543 #This is for those repo have tag like: refs/tags/1.2.2
544 if tmp: 544 if tmp:
545 tmpline = tmp.split("\n") 545 tmpline = tmp.split("\n")
546 verflag = 0 546 verflag = 0
547 for line in tmpline: 547 for line in tmpline:
548 if len(line)==0: 548 if len(line)==0:
549 break; 549 break;
550 puptag = line.split("/")[-1] 550 puptag = line.split("/")[-1]
551 puptag = re.search("[0-9][0-9|\.|_]+[0-9]", puptag) 551 puptag = re.search("[0-9][0-9|\.|_]+[0-9]", puptag)
552 if puptag == None: 552 if puptag == None:
553 continue; 553 continue;
554 puptag = puptag.group() 554 puptag = puptag.group()
555 puptag = re.sub("_",".",puptag) 555 puptag = re.sub("_",".",puptag)
556 plocaltag = pversion.split("+")[0] 556 plocaltag = pversion.split("+")[0]
557 if "git" in plocaltag: 557 if "git" in plocaltag:
558 plocaltag = plocaltag.split("-")[0] 558 plocaltag = plocaltag.split("-")[0]
559 result = bb.utils.vercmp(("0", puptag, ""), ("0", plocaltag, "")) 559 result = bb.utils.vercmp(("0", puptag, ""), ("0", plocaltag, ""))
560 if result > 0: 560 if result > 0:
561 verflag = 1 561 verflag = 1
562 pstatus = "UPDATE" 562 pstatus = "UPDATE"
563 pupver = puptag 563 pupver = puptag
564 elif verflag == 0 : 564 elif verflag == 0 :
565 pupver = plocaltag 565 pupver = plocaltag
566 pstatus = "MATCH" 566 pstatus = "MATCH"
567 #This is for those no tag repo 567 #This is for those no tag repo
568 elif tmp2: 568 elif tmp2:
569 pupver = tmp2.split("\t")[0] 569 pupver = tmp2.split("\t")[0]
570 if pupver in pversion: 570 if pupver in pversion:
571 pstatus = "MATCH" 571 pstatus = "MATCH"
572 else: 572 else:
573 pstatus = "UPDATE" 573 pstatus = "UPDATE"
574 else: 574 else:
575 pstatus = "ErrGitAccess" 575 pstatus = "ErrGitAccess"
576 elif type == 'svn': 576 elif type == 'svn':
577 options = [] 577 options = []
578 if user: 578 if user:
579 options.append("--username %s" % user) 579 options.append("--username %s" % user)
580 if pswd: 580 if pswd:
581 options.append("--password %s" % pswd) 581 options.append("--password %s" % pswd)
582 svnproto = 'svn' 582 svnproto = 'svn'
583 if 'proto' in parm: 583 if 'proto' in parm:
584 svnproto = parm['proto'] 584 svnproto = parm['proto']
585 if 'rev' in parm: 585 if 'rev' in parm:
586 pcurver = parm['rev'] 586 pcurver = parm['rev']
587 587
588 svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"]) 588 svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"])
589 print svncmd 589 print svncmd
590 svninfo = os.popen(svncmd).read() 590 svninfo = os.popen(svncmd).read()
591 for line in svninfo.split("\n"): 591 for line in svninfo.split("\n"):
592 if re.search("^Last Changed Rev:", line): 592 if re.search("^Last Changed Rev:", line):
593 pupver = line.split(" ")[-1] 593 pupver = line.split(" ")[-1]
594 if pupver in pversion: 594 if pupver in pversion:
595 pstatus = "MATCH" 595 pstatus = "MATCH"
596 else: 596 else:
597 pstatus = "UPDATE" 597 pstatus = "UPDATE"
598 598
599 if re.match("Err", pstatus): 599 if re.match("Err", pstatus):
600 pstatus = "ErrSvnAccess" 600 pstatus = "ErrSvnAccess"
601 elif type == 'cvs': 601 elif type == 'cvs':
602 pupver = "HEAD" 602 pupver = "HEAD"
603 pstatus = "UPDATE" 603 pstatus = "UPDATE"
604 elif type == 'file': 604 elif type == 'file':
605 """local file is always up-to-date""" 605 """local file is always up-to-date"""
606 pupver = pcurver 606 pupver = pcurver
607 pstatus = "MATCH" 607 pstatus = "MATCH"
608 else: 608 else:
609 pstatus = "ErrUnsupportedProto" 609 pstatus = "ErrUnsupportedProto"
610 610
611 if re.match("Err", pstatus): 611 if re.match("Err", pstatus):
612 pstatus += ":%s%s" % (host, path) 612 pstatus += ":%s%s" % (host, path)
613 613
614 """Read from manual distro tracking fields as alternative""" 614 """Read from manual distro tracking fields as alternative"""
615 pmver = d.getVar("RECIPE_UPSTREAM_VERSION", True) 615 pmver = d.getVar("RECIPE_UPSTREAM_VERSION", True)
616 if not pmver: 616 if not pmver:
617 pmver = "N/A" 617 pmver = "N/A"
618 pmstatus = "ErrNoRecipeData" 618 pmstatus = "ErrNoRecipeData"
619 else: 619 else:
620 if pmver == pcurver: 620 if pmver == pcurver:
621 pmstatus = "MATCH" 621 pmstatus = "MATCH"
622 else: 622 else:
623 pmstatus = "UPDATE" 623 pmstatus = "UPDATE"
624 624
625 psrcuri = psrcuri.split()[0] 625 psrcuri = psrcuri.split()[0]
626 pdepends = "".join(pdepends.split("\t")) 626 pdepends = "".join(pdepends.split("\t"))
627 pdesc = "".join(pdesc.split("\t")) 627 pdesc = "".join(pdesc.split("\t"))
628 lf = bb.utils.lockfile("%s.lock" % logfile) 628 lf = bb.utils.lockfile("%s.lock" % logfile)
629 f = open(logfile, "a") 629 f = open(logfile, "a")
630 f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \ 630 f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \
631 (pname,pversion,pupver,plicense,psection, phome,prelease, pdepends,pbugtracker,ppe,pdesc,pstatus,pmver,psrcuri,maintainer)) 631 (pname,pversion,pupver,plicense,psection, phome,prelease, pdepends,pbugtracker,ppe,pdesc,pstatus,pmver,psrcuri,maintainer))
632 f.close() 632 f.close()
633 bb.utils.unlockfile(lf) 633 bb.utils.unlockfile(lf)
634} 634}
635 635
636addtask checkpkgall after do_checkpkg 636addtask checkpkgall after do_checkpkg
637do_checkpkgall[recrdeptask] = "do_checkpkgall do_checkpkg" 637do_checkpkgall[recrdeptask] = "do_checkpkgall do_checkpkg"
638do_checkpkgall[nostamp] = "1" 638do_checkpkgall[nostamp] = "1"
639do_checkpkgall() { 639do_checkpkgall() {
640 : 640 :
641} 641}
642 642
643addhandler distro_check_eventhandler 643addhandler distro_check_eventhandler
@@ -679,7 +679,7 @@ addtask distro_checkall after do_distro_check
679do_distro_checkall[recrdeptask] = "do_distro_checkall do_distro_check" 679do_distro_checkall[recrdeptask] = "do_distro_checkall do_distro_check"
680do_distro_checkall[nostamp] = "1" 680do_distro_checkall[nostamp] = "1"
681do_distro_checkall() { 681do_distro_checkall() {
682 : 682 :
683} 683}
684# 684#
685#Check Missing License Text. 685#Check Missing License Text.
@@ -727,7 +727,7 @@ addtask checklicenseall after do_checklicense
727do_checklicenseall[recrdeptask] = "do_checklicenseall do_checklicense" 727do_checklicenseall[recrdeptask] = "do_checklicenseall do_checklicense"
728do_checklicenseall[nostamp] = "1" 728do_checklicenseall[nostamp] = "1"
729do_checklicenseall() { 729do_checklicenseall() {
730 : 730 :
731} 731}
732 732
733 733