summaryrefslogtreecommitdiffstats
path: root/meta/classes/distrodata.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/distrodata.bbclass')
-rw-r--r--meta/classes/distrodata.bbclass925
1 files changed, 925 insertions, 0 deletions
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
new file mode 100644
index 0000000000..085575a041
--- /dev/null
+++ b/meta/classes/distrodata.bbclass
@@ -0,0 +1,925 @@
1include conf/distro/include/package_regex.inc
2addhandler distro_eventhandler
3distro_eventhandler[eventmask] = "bb.event.BuildStarted"
4python distro_eventhandler() {
5 import oe.distro_check as dc
6 logfile = dc.create_log_file(e.data, "distrodata.csv")
7 lf = bb.utils.lockfile("%s.lock" % logfile)
8 f = open(logfile, "a")
9 f.write("Package,Description,Owner,License,VerMatch,Version,Upsteam,Reason,Recipe Status,Distro 1,Distro 2,Distro 3\n")
10 f.close()
11 bb.utils.unlockfile(lf)
12
13 return
14}
15
16addtask distrodata_np
17do_distrodata_np[nostamp] = "1"
18python do_distrodata_np() {
19 localdata = bb.data.createCopy(d)
20 pn = d.getVar("PN", True)
21 bb.note("Package Name: %s" % pn)
22
23 import oe.distro_check as dist_check
24 tmpdir = d.getVar('TMPDIR', True)
25 distro_check_dir = os.path.join(tmpdir, "distro_check")
26 datetime = localdata.getVar('DATETIME', True)
27 dist_check.update_distro_data(distro_check_dir, datetime)
28
29 if pn.find("-native") != -1:
30 pnstripped = pn.split("-native")
31 bb.note("Native Split: %s" % pnstripped)
32 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
33 bb.data.update_data(localdata)
34
35 if pn.find("-cross") != -1:
36 pnstripped = pn.split("-cross")
37 bb.note("cross Split: %s" % pnstripped)
38 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
39 bb.data.update_data(localdata)
40
41 if pn.find("-crosssdk") != -1:
42 pnstripped = pn.split("-crosssdk")
43 bb.note("cross Split: %s" % pnstripped)
44 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
45 bb.data.update_data(localdata)
46
47 if pn.startswith("nativesdk-"):
48 pnstripped = pn.replace("nativesdk-", "")
49 bb.note("NativeSDK Split: %s" % pnstripped)
50 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True))
51 bb.data.update_data(localdata)
52
53
54 if pn.find("-initial") != -1:
55 pnstripped = pn.split("-initial")
56 bb.note("initial Split: %s" % pnstripped)
57 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
58 bb.data.update_data(localdata)
59
60 """generate package information from .bb file"""
61 pname = localdata.getVar('PN', True)
62 pcurver = localdata.getVar('PV', True)
63 pdesc = localdata.getVar('DESCRIPTION', True)
64 if pdesc is not None:
65 pdesc = pdesc.replace(',','')
66 pdesc = pdesc.replace('\n','')
67
68 pgrp = localdata.getVar('SECTION', True)
69 plicense = localdata.getVar('LICENSE', True).replace(',','_')
70
71 rstatus = localdata.getVar('RECIPE_COLOR', True)
72 if rstatus is not None:
73 rstatus = rstatus.replace(',','')
74
75 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True)
76 if pcurver == pupver:
77 vermatch="1"
78 else:
79 vermatch="0"
80 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
81 if noupdate_reason is None:
82 noupdate="0"
83 else:
84 noupdate="1"
85 noupdate_reason = noupdate_reason.replace(',','')
86
87 maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
88 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True)
89 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
90
91 bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \
92 (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus))
93 line = pn
94 for i in result:
95 line = line + "," + i
96 bb.note("%s\n" % line)
97}
98
99addtask distrodata
100do_distrodata[nostamp] = "1"
101python do_distrodata() {
102 logpath = d.getVar('LOG_DIR', True)
103 bb.utils.mkdirhier(logpath)
104 logfile = os.path.join(logpath, "distrodata.csv")
105
106 import oe.distro_check as dist_check
107 localdata = bb.data.createCopy(d)
108 tmpdir = d.getVar('TMPDIR', True)
109 distro_check_dir = os.path.join(tmpdir, "distro_check")
110 datetime = localdata.getVar('DATETIME', True)
111 dist_check.update_distro_data(distro_check_dir, datetime)
112
113 pn = d.getVar("PN", True)
114 bb.note("Package Name: %s" % pn)
115
116 if pn.find("-native") != -1:
117 pnstripped = pn.split("-native")
118 bb.note("Native Split: %s" % pnstripped)
119 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
120 bb.data.update_data(localdata)
121
122 if pn.startswith("nativesdk-"):
123 pnstripped = pn.replace("nativesdk-", "")
124 bb.note("NativeSDK Split: %s" % pnstripped)
125 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True))
126 bb.data.update_data(localdata)
127
128 if pn.find("-cross") != -1:
129 pnstripped = pn.split("-cross")
130 bb.note("cross Split: %s" % pnstripped)
131 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
132 bb.data.update_data(localdata)
133
134 if pn.find("-crosssdk") != -1:
135 pnstripped = pn.split("-crosssdk")
136 bb.note("cross Split: %s" % pnstripped)
137 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
138 bb.data.update_data(localdata)
139
140 if pn.find("-initial") != -1:
141 pnstripped = pn.split("-initial")
142 bb.note("initial Split: %s" % pnstripped)
143 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
144 bb.data.update_data(localdata)
145
146 """generate package information from .bb file"""
147 pname = localdata.getVar('PN', True)
148 pcurver = localdata.getVar('PV', True)
149 pdesc = localdata.getVar('DESCRIPTION', True)
150 if pdesc is not None:
151 pdesc = pdesc.replace(',','')
152 pdesc = pdesc.replace('\n','')
153
154 pgrp = localdata.getVar('SECTION', True)
155 plicense = localdata.getVar('LICENSE', True).replace(',','_')
156
157 rstatus = localdata.getVar('RECIPE_COLOR', True)
158 if rstatus is not None:
159 rstatus = rstatus.replace(',','')
160
161 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True)
162 if pcurver == pupver:
163 vermatch="1"
164 else:
165 vermatch="0"
166
167 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
168 if noupdate_reason is None:
169 noupdate="0"
170 else:
171 noupdate="1"
172 noupdate_reason = noupdate_reason.replace(',','')
173
174 maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
175 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True)
176 # do the comparison
177 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
178
179 lf = bb.utils.lockfile("%s.lock" % logfile)
180 f = open(logfile, "a")
181 f.write("%s,%s,%s,%s,%s,%s,%s,%s,%s" % \
182 (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus))
183 line = ""
184 for i in result:
185 line = line + "," + i
186 f.write(line + "\n")
187 f.close()
188 bb.utils.unlockfile(lf)
189}
190
191addtask distrodataall after do_distrodata
192do_distrodataall[recrdeptask] = "do_distrodataall do_distrodata"
193do_distrodataall[recideptask] = "do_${BB_DEFAULT_TASK}"
194do_distrodataall[nostamp] = "1"
195do_distrodataall() {
196 :
197}
198
199addhandler checkpkg_eventhandler
200checkpkg_eventhandler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted"
201python checkpkg_eventhandler() {
202 def parse_csv_file(filename):
203 package_dict = {}
204 fd = open(filename, "r")
205 lines = fd.read().rsplit("\n")
206 fd.close()
207
208 first_line = ''
209 index = 0
210 for line in lines:
211 #Skip the first line
212 if index == 0:
213 first_line = line
214 index += 1
215 continue
216 elif line == '':
217 continue
218 index += 1
219 package_name = line.rsplit("\t")[0]
220 if '-native' in package_name or 'nativesdk-' in package_name:
221 original_name = package_name.rsplit('-native')[0]
222 if original_name == '':
223 original_name = package_name.rsplit('nativesdk-')[0]
224 if original_name in package_dict:
225 continue
226 else:
227 package_dict[package_name] = line
228 else:
229 new_name = package_name + "-native"
230 if not(new_name in package_dict):
231 new_name = 'nativesdk-' + package_name
232 if new_name in package_dict:
233 del package_dict[new_name]
234 package_dict[package_name] = line
235
236 fd = open(filename, "w")
237 fd.write("%s\n"%first_line)
238 for el in package_dict:
239 fd.write(package_dict[el] + "\n")
240 fd.close()
241
242 del package_dict
243
244 if bb.event.getName(e) == "BuildStarted":
245 import oe.distro_check as dc
246 logfile = dc.create_log_file(e.data, "checkpkg.csv")
247
248 lf = bb.utils.lockfile("%s.lock" % logfile)
249 f = open(logfile, "a")
250 f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\tNoUpReason\n")
251 f.close()
252 bb.utils.unlockfile(lf)
253 elif bb.event.getName(e) == "BuildCompleted":
254 import os
255 filename = "tmp/log/checkpkg.csv"
256 if os.path.isfile(filename):
257 lf = bb.utils.lockfile("%s.lock"%filename)
258 parse_csv_file(filename)
259 bb.utils.unlockfile(lf)
260 return
261}
262
263addtask checkpkg
264do_checkpkg[nostamp] = "1"
265python do_checkpkg() {
266 localdata = bb.data.createCopy(d)
267 import re
268 import tempfile
269 import subprocess
270
271 """
272 sanity check to ensure same name and type. Match as many patterns as possible
273 such as:
274 gnome-common-2.20.0.tar.gz (most common format)
275 gtk+-2.90.1.tar.gz
276 xf86-input-synaptics-12.6.9.tar.gz
277 dri2proto-2.3.tar.gz
278 blktool_4.orig.tar.gz
279 libid3tag-0.15.1b.tar.gz
280 unzip552.tar.gz
281 icu4c-3_6-src.tgz
282 genext2fs_1.3.orig.tar.gz
283 gst-fluendo-mp3
284 """
285 prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*\+?[\-_]" # match most patterns which uses "-" as separator to version digits
286 prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz
287 prefix3 = "[0-9]+[\-]?[a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz
288 prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3)
289 ver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"#"((\d+[\.\-_[a-z]])+)"
290 # src.rpm extension was added only for rpm package. Can be removed if the rpm
291 # packaged will always be considered as having to be manually upgraded
292 suffix = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
293
294 suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "bz2", "orig.tar.gz", "src.tar.gz", "src.rpm", "src.tgz", "svnr\d+.tar.bz2", "stable.tar.gz", "src.rpm")
295 sinterstr = "(?P<name>%s?)v?(?P<ver>%s)(\-source)?" % (prefix, ver_regex)
296 sdirstr = "(?P<name>%s)\.?v?(?P<ver>%s)(\-source)?[\.\-](?P<type>%s$)" % (prefix, ver_regex, suffix)
297
298 def parse_inter(s):
299 m = re.search(sinterstr, s)
300 if not m:
301 return None
302 else:
303 return (m.group('name'), m.group('ver'), "")
304
305 def parse_dir(s):
306 m = re.search(sdirstr, s)
307 if not m:
308 return None
309 else:
310 return (m.group('name'), m.group('ver'), m.group('type'))
311
312 def modelate_version(version):
313 if version[0] in ['.', '-']:
314 if version[1].isdigit():
315 version = version[1] + version[0] + version[2:len(version)]
316 else:
317 version = version[1:len(version)]
318
319 version = re.sub('\-', '.', version)
320 version = re.sub('_', '.', version)
321 version = re.sub('(rc)+', '.-1.', version)
322 version = re.sub('(alpha)+', '.-3.', version)
323 version = re.sub('(beta)+', '.-2.', version)
324 if version[0] == 'v':
325 version = version[1:len(version)]
326 return version
327
328 """
329 Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
330 purpose. PE is cleared in comparison as it's not for build, and PV is cleared too
331 for simplicity as it's somehow difficult to get from various upstream format
332 """
333 def __vercmp(old, new):
334 (on, ov, ot) = old
335 (en, ev, et) = new
336 if on != en or (et and et not in suffixtuple):
337 return False
338 ov = modelate_version(ov)
339 ev = modelate_version(ev)
340
341 result = bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))
342 if result < 0:
343 return True
344 else:
345 return False
346
347 """
348 wrapper for fetch upstream directory info
349 'url' - upstream link customized by regular expression
350 'd' - database
351 'tmpf' - tmpfile for fetcher output
352 We don't want to exit whole build due to one recipe error. So handle all exceptions
353 gracefully w/o leaking to outer.
354 """
355 def internal_fetch_wget(url, d, tmpf):
356 status = "ErrFetchUnknown"
357 """
358 Clear internal url cache as it's a temporary check. Not doing so will have
359 bitbake check url multiple times when looping through a single url
360 """
361 fn = d.getVar('FILE', True)
362 bb.fetch2.urldata_cache[fn] = {}
363
364 """
365 To avoid impacting bitbake build engine, this trick is required for reusing bitbake
366 interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR}
367 while we don't want to pollute that place. So bb.fetch2.checkstatus() is borrowed here
368 which is designed for check purpose but we override check command for our own purpose
369 """
370 ld = bb.data.createCopy(d)
371 d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \
372 % tmpf.name)
373 bb.data.update_data(ld)
374
375 try:
376 fetcher = bb.fetch2.Fetch([url], ld)
377 fetcher.checkstatus()
378 status = "SUCC"
379 except bb.fetch2.BBFetchException, e:
380 status = "ErrFetch"
381
382 return status
383
384 """
385 Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz",
386 'url' - upstream link customized by regular expression
387 'd' - database
388 'curver' - current version
389 Return new version if success, or else error in "Errxxxx" style
390 """
391 def check_new_dir(url, curver, d):
392 pn = d.getVar('PN', True)
393 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
394 status = internal_fetch_wget(url, d, f)
395 fhtml = f.read()
396 if status == "SUCC" and len(fhtml):
397 newver = parse_inter(curver)
398
399 """
400 match "*4.1/">*4.1/ where '*' matches chars
401 N.B. add package name, only match for digits
402 """
403 regex = d.getVar('REGEX', True)
404 if regex == '':
405 regex = "^%s" %prefix
406 m = re.search("^%s" % regex, curver)
407 if m:
408 s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group()
409 else:
410 s = "(\d+[\.\-_])+\d+/?"
411
412 searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
413
414 reg = re.compile(searchstr)
415 valid = 0
416 for line in fhtml.split("\n"):
417 if line.find(curver) >= 0:
418 valid = 1
419 m = reg.search(line)
420 if m:
421 ver = m.group().split("\"")[1]
422 ver = ver.strip("/")
423 ver = parse_inter(ver)
424 if ver and __vercmp(newver, ver) == True:
425 newver = ver
426
427 """Expect a match for curver in directory list, or else it indicates unknown format"""
428 if not valid:
429 status = "ErrParseInterDir"
430 else:
431 """rejoin the path name"""
432 status = newver[0] + newver[1]
433 elif not len(fhtml):
434 status = "ErrHostNoDir"
435
436 f.close()
437 if status != "ErrHostNoDir" and re.match("Err", status):
438 logpath = d.getVar('LOG_DIR', True)
439 subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
440 os.unlink(f.name)
441 return status
442
443 """
444 Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz",
445 'url' - upstream link customized by regular expression
446 'd' - database
447 'curname' - current package name
448 Return new version if success, or else error in "Errxxxx" style
449 """
450 def check_new_version(url, curname, d):
451 """possible to have no version in pkg name, such as spectrum-fw"""
452 if not re.search("\d+", curname):
453 return pcurver
454 pn = d.getVar('PN', True)
455 newver_regex = d.getVar('REGEX', True)
456 f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
457 status = internal_fetch_wget(url, d, f)
458 fhtml = f.read()
459
460 if status == "SUCC" and len(fhtml):
461 newver = parse_dir(curname)
462
463 if not newver_regex:
464 """this is the default matching pattern, if recipe does not """
465 """provide a regex expression """
466 """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
467 pn1 = re.search("^%s" % prefix, curname).group()
468 s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
469 searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s
470 reg = searchstr
471 else:
472 reg = newver_regex
473 valid = 0
474 count = 0
475 for line in fhtml.split("\n"):
476 if pn == 'kconfig-frontends':
477 m = re.findall(reg, line)
478 if m:
479 valid = 1
480 for match in m:
481 (on, ov, oe) = newver
482 ver = (on, match[0], oe)
483 if ver and __vercmp(newver, ver) == True:
484 newver = ver
485 continue
486 count += 1
487 m = re.search(reg, line)
488 if m:
489 valid = 1
490 if not newver_regex:
491 ver = m.group().split("\"")[1].split("/")[-1]
492 if ver == "download":
493 ver = m.group().split("\"")[1].split("/")[-2]
494 ver = parse_dir(ver)
495 else:
496 """ we cheat a little here, but we assume that the
497 regular expression in the recipe will extract exacly
498 the version """
499 (on, ov, oe) = newver
500 ver = (on, m.group('pver'), oe)
501 if ver and __vercmp(newver, ver) == True:
502 newver = ver
503 """Expect a match for curver in directory list, or else it indicates unknown format"""
504 if not valid:
505 status = "ErrParseDir"
506 else:
507 """newver still contains a full package name string"""
508 status = re.sub('_', '.', newver[1])
509 elif not len(fhtml):
510 status = "ErrHostNoDir"
511
512 f.close()
513 """if host hasn't directory information, no need to save tmp file"""
514 if status != "ErrHostNoDir" and re.match("Err", status):
515 logpath = d.getVar('LOG_DIR', True)
516 subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
517 os.unlink(f.name)
518 return status
519
520 """first check whether a uri is provided"""
521 src_uri = d.getVar('SRC_URI', True)
522 if not src_uri:
523 return
524
525 """initialize log files."""
526 logpath = d.getVar('LOG_DIR', True)
527 bb.utils.mkdirhier(logpath)
528 logfile = os.path.join(logpath, "checkpkg.csv")
529
530 """generate package information from .bb file"""
531 pname = d.getVar('PN', True)
532
533 if pname.find("-native") != -1:
534 if d.getVar('BBCLASSEXTEND', True):
535 return
536 pnstripped = pname.split("-native")
537 bb.note("Native Split: %s" % pnstripped)
538 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
539 bb.data.update_data(localdata)
540
541 if pname.startswith("nativesdk-"):
542 if d.getVar('BBCLASSEXTEND', True):
543 return
544 pnstripped = pname.replace("nativesdk-", "")
545 bb.note("NativeSDK Split: %s" % pnstripped)
546 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True))
547 bb.data.update_data(localdata)
548
549 if pname.find("-cross") != -1:
550 pnstripped = pname.split("-cross")
551 bb.note("cross Split: %s" % pnstripped)
552 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
553 bb.data.update_data(localdata)
554
555 if pname.find("-initial") != -1:
556 pnstripped = pname.split("-initial")
557 bb.note("initial Split: %s" % pnstripped)
558 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
559 bb.data.update_data(localdata)
560
561 chk_uri = d.getVar('REGEX_URI', True)
562 if not chk_uri:
563 chk_uri = src_uri
564 pdesc = localdata.getVar('DESCRIPTION', True)
565 pgrp = localdata.getVar('SECTION', True)
566 if localdata.getVar('PRSPV', True):
567 pversion = localdata.getVar('PRSPV', True)
568 else:
569 pversion = localdata.getVar('PV', True)
570 plicense = localdata.getVar('LICENSE', True)
571 psection = localdata.getVar('SECTION', True)
572 phome = localdata.getVar('HOMEPAGE', True)
573 prelease = localdata.getVar('PR', True)
574 pdepends = localdata.getVar('DEPENDS', True)
575 pbugtracker = localdata.getVar('BUGTRACKER', True)
576 ppe = localdata.getVar('PE', True)
577 psrcuri = localdata.getVar('SRC_URI', True)
578 maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
579
580 found = 0
581 for uri in src_uri.split():
582 m = re.compile('(?P<type>[^:]*)').match(uri)
583 if not m:
584 raise MalformedUrl(uri)
585 elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
586 found = 1
587 pproto = m.group('type')
588 break
589 if not found:
590 pproto = "file"
591 pupver = "N/A"
592 pstatus = "ErrUnknown"
593
594 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(uri)
595 if type in ['http', 'https', 'ftp']:
596 if d.getVar('PRSPV', True):
597 pcurver = d.getVar('PRSPV', True)
598 else:
599 pcurver = d.getVar('PV', True)
600 else:
601 if d.getVar('PRSPV', True):
602 pcurver = d.getVar('PRSPV', True)
603 else:
604 pcurver = d.getVar("SRCREV", True)
605
606
607 if type in ['http', 'https', 'ftp']:
608 newver = pcurver
609 altpath = path
610 dirver = "-"
611 curname = "-"
612
613 """
614 match version number amid the path, such as "5.7" in:
615 http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
616 N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P
617 """
618 m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path)
619 if m:
620 altpath = path.split(m.group())[0]
621 dirver = m.group().strip("/")
622
623 """use new path and remove param. for wget only param is md5sum"""
624 alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}])
625 my_uri = d.getVar('REGEX_URI', True)
626 if my_uri:
627 if d.getVar('PRSPV', True):
628 newver = d.getVar('PRSPV', True)
629 else:
630 newver = d.getVar('PV', True)
631 else:
632 newver = check_new_dir(alturi, dirver, d)
633 altpath = path
634 if not re.match("Err", newver) and dirver != newver:
635 altpath = altpath.replace(dirver, newver, True)
636 # For folder in folder cases - try to enter the folder again and then try parsing
637 """Now try to acquire all remote files in current directory"""
638 if not re.match("Err", newver):
639 curname = altpath.split("/")[-1]
640
641 """get remote name by skipping pacakge name"""
642 m = re.search(r"/.*/", altpath)
643 if not m:
644 altpath = "/"
645 else:
646 altpath = m.group()
647
648 chk_uri = d.getVar('REGEX_URI', True)
649 if not chk_uri:
650 alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}])
651 else:
652 alturi = chk_uri
653 newver = check_new_version(alturi, curname, d)
654 while(newver == "ErrHostNoDir"):
655 if alturi == "/download":
656 break
657 else:
658 alturi = "/".join(alturi.split("/")[0:-2]) + "/download"
659 newver = check_new_version(alturi, curname, d)
660 if not re.match("Err", newver):
661 pupver = newver
662 if pupver != pcurver:
663 pstatus = "UPDATE"
664 else:
665 pstatus = "MATCH"
666
667 if re.match("Err", newver):
668 pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname
669 elif type == 'git':
670 if user:
671 gituser = user + '@'
672 else:
673 gituser = ""
674
675 if 'protocol' in parm:
676 gitproto = parm['protocol']
677 else:
678 gitproto = "git"
679
680 # Get all tags and HEAD
681 if d.getVar('GIT_REGEX', True):
682 gitcmd = "git ls-remote %s://%s%s%s %s 2>&1" % (gitproto, gituser, host, path, d.getVar('GIT_REGEX', True))
683 else:
684 gitcmd = "git ls-remote %s://%s%s%s *tag* 2>&1" % (gitproto, gituser, host, path)
685 gitcmd2 = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path)
686
687 tmp = os.popen(gitcmd).read()
688 if 'unable to connect' in tmp:
689 tmp = None
690 tmp2 = os.popen(gitcmd2).read()
691 if 'unable to connect' in tmp2:
692 tmp2 = None
693 #This is for those repos have tag like: refs/tags/1.2.2
694 phash = pversion.rsplit("+")[-1]
695 if tmp:
696 tmpline = tmp.split("\n")
697 verflag = 0
698 pupver = pversion
699 for line in tmpline:
700 if len(line)==0:
701 break;
702 puptag = line.split("/")[-1]
703 upstr_regex = d.getVar('REGEX', True)
704 if upstr_regex:
705 puptag = re.search(upstr_regex, puptag)
706 else:
707 puptag = re.search("(?P<pver>([0-9][\.|_]?)+)", puptag)
708 if puptag == None:
709 continue
710 puptag = puptag.group('pver')
711 puptag = re.sub("_",".",puptag)
712 plocaltag = pupver.split("+git")[0]
713 if "git" in plocaltag:
714 plocaltag = plocaltag.split("-")[0]
715 result = bb.utils.vercmp(("0", puptag, ""), ("0", plocaltag, ""))
716
717 if result > 0:
718 verflag = 1
719 pupver = puptag
720 elif verflag == 0 :
721 pupver = plocaltag
722 #This is for those no tag repo
723 elif tmp2:
724 pupver = pversion.rsplit("+")[0]
725 phash = pupver
726 else:
727 pstatus = "ErrGitAccess"
728 if not ('ErrGitAccess' in pstatus):
729
730 latest_head = tmp2.rsplit("\t")[0][:7]
731 tmp3 = re.search('(?P<git_ver>(\d+[\.-]?)+)(?P<git_prefix>(\+git[r|\-|]?)AUTOINC\+)(?P<head_md5>([\w|_]+))', pversion)
732 tmp4 = re.search('(?P<git_ver>(\d+[\.-]?)+)(?P<git_prefix>(\+git[r|\-|]?)AUTOINC\+)(?P<head_md5>([\w|_]+))', pupver)
733 if not tmp4:
734 tmp4 = re.search('(?P<git_ver>(\d+[\.-]?)+)', pupver)
735
736 if tmp3:
737 # Get status of the package - MATCH/UPDATE
738 result = bb.utils.vercmp(("0", tmp3.group('git_ver'), ""), ("0",tmp3.group('git_ver') , ""))
739 # Get the latest tag
740 pstatus = 'MATCH'
741 if result < 0:
742 latest_pv = tmp3.group('git_ver')
743 else:
744 latest_pv = pupver
745 if not(tmp3.group('head_md5')[:7] in latest_head) or not(latest_head in tmp3.group('head_md5')[:7]):
746 pstatus = 'UPDATE'
747
748 git_prefix = tmp3.group('git_prefix')
749 pupver = latest_pv + tmp3.group('git_prefix') + latest_head
750 else:
751 if not tmp3:
752 bb.plain("#DEBUG# Package %s: current version (%s) doesn't match the usual pattern" %(pname, pversion))
753 elif type == 'svn':
754 options = []
755 if user:
756 options.append("--username %s" % user)
757 if pswd:
758 options.append("--password %s" % pswd)
759 svnproto = 'svn'
760 if 'proto' in parm:
761 svnproto = parm['proto']
762 if 'rev' in parm:
763 pcurver = parm['rev']
764
765 svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"])
766 print svncmd
767 svninfo = os.popen(svncmd).read()
768 if "Can't connect to host " in svninfo or "Connection timed out" in svninfo:
769 svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), "http",
770 host, path, parm["module"])
771 svninfo = os.popen(svncmd).read()
772 for line in svninfo.split("\n"):
773 if re.search("^Last Changed Rev:", line):
774 pupver = line.split(" ")[-1]
775 if pupver in pversion:
776 pstatus = "MATCH"
777 else:
778 pstatus = "UPDATE"
779
780 if re.match("Err", pstatus):
781 pstatus = "ErrSvnAccess"
782
783 if pstatus != "ErrSvnAccess":
784 tag = pversion.rsplit("+svn")[0]
785 svn_prefix = re.search('(\+svn[r|\-]?)', pversion)
786 if tag and svn_prefix:
787 pupver = tag + svn_prefix.group() + pupver
788
789 elif type == 'cvs':
790 pupver = "HEAD"
791 pstatus = "UPDATE"
792 elif type == 'file':
793 """local file is always up-to-date"""
794 pupver = pcurver
795 pstatus = "MATCH"
796 else:
797 pstatus = "ErrUnsupportedProto"
798
799 if re.match("Err", pstatus):
800 pstatus += ":%s%s" % (host, path)
801
802 """Read from manual distro tracking fields as alternative"""
803 pmver = d.getVar("RECIPE_UPSTREAM_VERSION", True)
804 if not pmver:
805 pmver = "N/A"
806 pmstatus = "ErrNoRecipeData"
807 else:
808 if pmver == pcurver:
809 pmstatus = "MATCH"
810 else:
811 pmstatus = "UPDATE"
812
813 psrcuri = psrcuri.split()[0]
814 pdepends = "".join(pdepends.split("\t"))
815 pdesc = "".join(pdesc.split("\t"))
816 no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True)
817 lf = bb.utils.lockfile("%s.lock" % logfile)
818 f = open(logfile, "a")
819 f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \
820 (pname,pversion,pupver,plicense,psection, phome,prelease, pdepends,pbugtracker,ppe,pdesc,pstatus,pmver,psrcuri,maintainer, no_upgr_reason))
821 f.close()
822 bb.utils.unlockfile(lf)
823}
824
825addtask checkpkgall after do_checkpkg
826do_checkpkgall[recrdeptask] = "do_checkpkgall do_checkpkg"
827do_checkpkgall[recideptask] = "do_${BB_DEFAULT_TASK}"
828do_checkpkgall[nostamp] = "1"
829do_checkpkgall() {
830 :
831}
832
833addhandler distro_check_eventhandler
834distro_check_eventhandler[eventmask] = "bb.event.BuildStarted"
835python distro_check_eventhandler() {
836 """initialize log files."""
837 import oe.distro_check as dc
838 result_file = dc.create_log_file(e.data, "distrocheck.csv")
839 return
840}
841
842addtask distro_check
843do_distro_check[nostamp] = "1"
844python do_distro_check() {
845 """checks if the package is present in other public Linux distros"""
846 import oe.distro_check as dc
847 import shutil
848 if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk',d):
849 return
850
851 localdata = bb.data.createCopy(d)
852 bb.data.update_data(localdata)
853 tmpdir = d.getVar('TMPDIR', True)
854 distro_check_dir = os.path.join(tmpdir, "distro_check")
855 logpath = d.getVar('LOG_DIR', True)
856 bb.utils.mkdirhier(logpath)
857 result_file = os.path.join(logpath, "distrocheck.csv")
858 datetime = localdata.getVar('DATETIME', True)
859 dc.update_distro_data(distro_check_dir, datetime)
860
861 # do the comparison
862 result = dc.compare_in_distro_packages_list(distro_check_dir, d)
863
864 # save the results
865 dc.save_distro_check_result(result, datetime, result_file, d)
866}
867
868addtask distro_checkall after do_distro_check
869do_distro_checkall[recrdeptask] = "do_distro_checkall do_distro_check"
870do_distro_checkall[recideptask] = "do_${BB_DEFAULT_TASK}"
871do_distro_checkall[nostamp] = "1"
872do_distro_checkall() {
873 :
874}
875#
876#Check Missing License Text.
877#Use this task to generate the missing license text data for pkg-report system,
878#then we can search those recipes which license text isn't exsit in common-licenses directory
879#
880addhandler checklicense_eventhandler
881checklicense_eventhandler[eventmask] = "bb.event.BuildStarted"
882python checklicense_eventhandler() {
883 """initialize log files."""
884 import oe.distro_check as dc
885 logfile = dc.create_log_file(e.data, "missinglicense.csv")
886 lf = bb.utils.lockfile("%s.lock" % logfile)
887 f = open(logfile, "a")
888 f.write("Package\tLicense\tMissingLicense\n")
889 f.close()
890 bb.utils.unlockfile(lf)
891 return
892}
893
894addtask checklicense
895do_checklicense[nostamp] = "1"
896python do_checklicense() {
897 import shutil
898 logpath = d.getVar('LOG_DIR', True)
899 bb.utils.mkdirhier(logpath)
900 pn = d.getVar('PN', True)
901 logfile = os.path.join(logpath, "missinglicense.csv")
902 generic_directory = d.getVar('COMMON_LICENSE_DIR', True)
903 license_types = d.getVar('LICENSE', True)
904 for license_type in ((license_types.replace('+', '').replace('|', '&')
905 .replace('(', '').replace(')', '').replace(';', '')
906 .replace(',', '').replace(" ", "").split("&"))):
907 if not os.path.isfile(os.path.join(generic_directory, license_type)):
908 lf = bb.utils.lockfile("%s.lock" % logfile)
909 f = open(logfile, "a")
910 f.write("%s\t%s\t%s\n" % \
911 (pn,license_types,license_type))
912 f.close()
913 bb.utils.unlockfile(lf)
914 return
915}
916
917addtask checklicenseall after do_checklicense
918do_checklicenseall[recrdeptask] = "do_checklicenseall do_checklicense"
919do_checklicenseall[recideptask] = "do_${BB_DEFAULT_TASK}"
920do_checklicenseall[nostamp] = "1"
921do_checklicenseall() {
922 :
923}
924
925