diff options
Diffstat (limited to 'meta/classes/buildhistory.bbclass')
-rw-r--r-- | meta/classes/buildhistory.bbclass | 696 |
1 files changed, 696 insertions, 0 deletions
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass new file mode 100644 index 0000000000..8b5d5c214c --- /dev/null +++ b/meta/classes/buildhistory.bbclass | |||
@@ -0,0 +1,696 @@ | |||
1 | # | ||
2 | # Records history of build output in order to detect regressions | ||
3 | # | ||
4 | # Based in part on testlab.bbclass and packagehistory.bbclass | ||
5 | # | ||
6 | # Copyright (C) 2011-2014 Intel Corporation | ||
7 | # Copyright (C) 2007-2011 Koen Kooi <koen@openembedded.org> | ||
8 | # | ||
9 | |||
10 | BUILDHISTORY_FEATURES ?= "image package sdk" | ||
11 | BUILDHISTORY_DIR ?= "${TOPDIR}/buildhistory" | ||
12 | BUILDHISTORY_DIR_IMAGE = "${BUILDHISTORY_DIR}/images/${MACHINE_ARCH}/${TCLIBC}/${IMAGE_BASENAME}" | ||
13 | BUILDHISTORY_DIR_PACKAGE = "${BUILDHISTORY_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}" | ||
14 | BUILDHISTORY_DIR_SDK = "${BUILDHISTORY_DIR}/sdk/${SDK_NAME}/${IMAGE_BASENAME}" | ||
15 | BUILDHISTORY_IMAGE_FILES ?= "/etc/passwd /etc/group" | ||
16 | BUILDHISTORY_COMMIT ?= "0" | ||
17 | BUILDHISTORY_COMMIT_AUTHOR ?= "buildhistory <buildhistory@${DISTRO}>" | ||
18 | BUILDHISTORY_PUSH_REPO ?= "" | ||
19 | |||
20 | SSTATEPOSTINSTFUNCS_append = " buildhistory_emit_pkghistory" | ||
21 | # We want to avoid influence the signatures of sstate tasks - first the function itself: | ||
22 | sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory" | ||
23 | # then the value added to SSTATEPOSTINSTFUNCS: | ||
24 | SSTATEPOSTINSTFUNCS[vardepvalueexclude] .= "| buildhistory_emit_pkghistory" | ||
25 | |||
26 | # | ||
27 | # Write out metadata about this package for comparision when writing future packages | ||
28 | # | ||
29 | python buildhistory_emit_pkghistory() { | ||
30 | if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: | ||
31 | return 0 | ||
32 | |||
33 | if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): | ||
34 | return 0 | ||
35 | |||
36 | import re | ||
37 | import json | ||
38 | import errno | ||
39 | |||
40 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | ||
41 | |||
42 | class RecipeInfo: | ||
43 | def __init__(self, name): | ||
44 | self.name = name | ||
45 | self.pe = "0" | ||
46 | self.pv = "0" | ||
47 | self.pr = "r0" | ||
48 | self.depends = "" | ||
49 | self.packages = "" | ||
50 | self.srcrev = "" | ||
51 | |||
52 | |||
53 | class PackageInfo: | ||
54 | def __init__(self, name): | ||
55 | self.name = name | ||
56 | self.pe = "0" | ||
57 | self.pv = "0" | ||
58 | self.pr = "r0" | ||
59 | # pkg/pkge/pkgv/pkgr should be empty because we want to be able to default them | ||
60 | self.pkg = "" | ||
61 | self.pkge = "" | ||
62 | self.pkgv = "" | ||
63 | self.pkgr = "" | ||
64 | self.size = 0 | ||
65 | self.depends = "" | ||
66 | self.rprovides = "" | ||
67 | self.rdepends = "" | ||
68 | self.rrecommends = "" | ||
69 | self.rsuggests = "" | ||
70 | self.rreplaces = "" | ||
71 | self.rconflicts = "" | ||
72 | self.files = "" | ||
73 | self.filelist = "" | ||
74 | # Variables that need to be written to their own separate file | ||
75 | self.filevars = dict.fromkeys(['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm']) | ||
76 | |||
77 | # Should check PACKAGES here to see if anything removed | ||
78 | |||
79 | def readPackageInfo(pkg, histfile): | ||
80 | pkginfo = PackageInfo(pkg) | ||
81 | with open(histfile, "r") as f: | ||
82 | for line in f: | ||
83 | lns = line.split('=') | ||
84 | name = lns[0].strip() | ||
85 | value = lns[1].strip(" \t\r\n").strip('"') | ||
86 | if name == "PE": | ||
87 | pkginfo.pe = value | ||
88 | elif name == "PV": | ||
89 | pkginfo.pv = value | ||
90 | elif name == "PR": | ||
91 | pkginfo.pr = value | ||
92 | elif name == "PKG": | ||
93 | pkginfo.pkg = value | ||
94 | elif name == "PKGE": | ||
95 | pkginfo.pkge = value | ||
96 | elif name == "PKGV": | ||
97 | pkginfo.pkgv = value | ||
98 | elif name == "PKGR": | ||
99 | pkginfo.pkgr = value | ||
100 | elif name == "RPROVIDES": | ||
101 | pkginfo.rprovides = value | ||
102 | elif name == "RDEPENDS": | ||
103 | pkginfo.rdepends = value | ||
104 | elif name == "RRECOMMENDS": | ||
105 | pkginfo.rrecommends = value | ||
106 | elif name == "RSUGGESTS": | ||
107 | pkginfo.rsuggests = value | ||
108 | elif name == "RREPLACES": | ||
109 | pkginfo.rreplaces = value | ||
110 | elif name == "RCONFLICTS": | ||
111 | pkginfo.rconflicts = value | ||
112 | elif name == "PKGSIZE": | ||
113 | pkginfo.size = long(value) | ||
114 | elif name == "FILES": | ||
115 | pkginfo.files = value | ||
116 | elif name == "FILELIST": | ||
117 | pkginfo.filelist = value | ||
118 | # Apply defaults | ||
119 | if not pkginfo.pkg: | ||
120 | pkginfo.pkg = pkginfo.name | ||
121 | if not pkginfo.pkge: | ||
122 | pkginfo.pkge = pkginfo.pe | ||
123 | if not pkginfo.pkgv: | ||
124 | pkginfo.pkgv = pkginfo.pv | ||
125 | if not pkginfo.pkgr: | ||
126 | pkginfo.pkgr = pkginfo.pr | ||
127 | return pkginfo | ||
128 | |||
129 | def getlastpkgversion(pkg): | ||
130 | try: | ||
131 | histfile = os.path.join(pkghistdir, pkg, "latest") | ||
132 | return readPackageInfo(pkg, histfile) | ||
133 | except EnvironmentError: | ||
134 | return None | ||
135 | |||
136 | def sortpkglist(string): | ||
137 | pkgiter = re.finditer(r'[a-zA-Z0-9.+-]+( \([><=]+ [^ )]+\))?', string, 0) | ||
138 | pkglist = [p.group(0) for p in pkgiter] | ||
139 | pkglist.sort() | ||
140 | return ' '.join(pkglist) | ||
141 | |||
142 | def sortlist(string): | ||
143 | items = string.split(' ') | ||
144 | items.sort() | ||
145 | return ' '.join(items) | ||
146 | |||
147 | pn = d.getVar('PN', True) | ||
148 | pe = d.getVar('PE', True) or "0" | ||
149 | pv = d.getVar('PV', True) | ||
150 | pr = d.getVar('PR', True) | ||
151 | |||
152 | pkgdata_dir = d.getVar('PKGDATA_DIR', True) | ||
153 | packages = "" | ||
154 | try: | ||
155 | with open(os.path.join(pkgdata_dir, pn)) as f: | ||
156 | for line in f.readlines(): | ||
157 | if line.startswith('PACKAGES: '): | ||
158 | packages = squashspaces(line.split(': ', 1)[1]) | ||
159 | break | ||
160 | except IOError as e: | ||
161 | if e.errno == errno.ENOENT: | ||
162 | # Probably a -cross recipe, just ignore | ||
163 | return 0 | ||
164 | else: | ||
165 | raise | ||
166 | |||
167 | packagelist = packages.split() | ||
168 | if not os.path.exists(pkghistdir): | ||
169 | bb.utils.mkdirhier(pkghistdir) | ||
170 | else: | ||
171 | # Remove files for packages that no longer exist | ||
172 | for item in os.listdir(pkghistdir): | ||
173 | if item != "latest" and item != "latest_srcrev": | ||
174 | if item not in packagelist: | ||
175 | subdir = os.path.join(pkghistdir, item) | ||
176 | for subfile in os.listdir(subdir): | ||
177 | os.unlink(os.path.join(subdir, subfile)) | ||
178 | os.rmdir(subdir) | ||
179 | |||
180 | rcpinfo = RecipeInfo(pn) | ||
181 | rcpinfo.pe = pe | ||
182 | rcpinfo.pv = pv | ||
183 | rcpinfo.pr = pr | ||
184 | rcpinfo.depends = sortlist(squashspaces(d.getVar('DEPENDS', True) or "")) | ||
185 | rcpinfo.packages = packages | ||
186 | write_recipehistory(rcpinfo, d) | ||
187 | |||
188 | pkgdest = d.getVar('PKGDEST', True) | ||
189 | for pkg in packagelist: | ||
190 | pkgdata = {} | ||
191 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | ||
192 | for line in f.readlines(): | ||
193 | item = line.rstrip('\n').split(': ', 1) | ||
194 | key = item[0] | ||
195 | if key.endswith('_' + pkg): | ||
196 | key = key[:-len(pkg)-1] | ||
197 | pkgdata[key] = item[1].decode('utf-8').decode('string_escape') | ||
198 | |||
199 | pkge = pkgdata.get('PKGE', '0') | ||
200 | pkgv = pkgdata['PKGV'] | ||
201 | pkgr = pkgdata['PKGR'] | ||
202 | # | ||
203 | # Find out what the last version was | ||
204 | # Make sure the version did not decrease | ||
205 | # | ||
206 | lastversion = getlastpkgversion(pkg) | ||
207 | if lastversion: | ||
208 | last_pkge = lastversion.pkge | ||
209 | last_pkgv = lastversion.pkgv | ||
210 | last_pkgr = lastversion.pkgr | ||
211 | r = bb.utils.vercmp((pkge, pkgv, pkgr), (last_pkge, last_pkgv, last_pkgr)) | ||
212 | if r < 0: | ||
213 | msg = "Package version for package %s went backwards which would break package feeds from (%s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr) | ||
214 | package_qa_handle_error("version-going-backwards", msg, d) | ||
215 | |||
216 | pkginfo = PackageInfo(pkg) | ||
217 | # Apparently the version can be different on a per-package basis (see Python) | ||
218 | pkginfo.pe = pkgdata.get('PE', '0') | ||
219 | pkginfo.pv = pkgdata['PV'] | ||
220 | pkginfo.pr = pkgdata['PR'] | ||
221 | pkginfo.pkg = pkgdata['PKG'] | ||
222 | pkginfo.pkge = pkge | ||
223 | pkginfo.pkgv = pkgv | ||
224 | pkginfo.pkgr = pkgr | ||
225 | pkginfo.rprovides = sortpkglist(squashspaces(pkgdata.get('RPROVIDES', ""))) | ||
226 | pkginfo.rdepends = sortpkglist(squashspaces(pkgdata.get('RDEPENDS', ""))) | ||
227 | pkginfo.rrecommends = sortpkglist(squashspaces(pkgdata.get('RRECOMMENDS', ""))) | ||
228 | pkginfo.rsuggests = sortpkglist(squashspaces(pkgdata.get('RSUGGESTS', ""))) | ||
229 | pkginfo.rreplaces = sortpkglist(squashspaces(pkgdata.get('RREPLACES', ""))) | ||
230 | pkginfo.rconflicts = sortpkglist(squashspaces(pkgdata.get('RCONFLICTS', ""))) | ||
231 | pkginfo.files = squashspaces(pkgdata.get('FILES', "")) | ||
232 | for filevar in pkginfo.filevars: | ||
233 | pkginfo.filevars[filevar] = pkgdata.get(filevar, "") | ||
234 | |||
235 | # Gather information about packaged files | ||
236 | val = pkgdata.get('FILES_INFO', '') | ||
237 | dictval = json.loads(val) | ||
238 | filelist = dictval.keys() | ||
239 | filelist.sort() | ||
240 | pkginfo.filelist = " ".join(filelist) | ||
241 | |||
242 | pkginfo.size = int(pkgdata['PKGSIZE']) | ||
243 | |||
244 | write_pkghistory(pkginfo, d) | ||
245 | } | ||
246 | |||
247 | |||
248 | def write_recipehistory(rcpinfo, d): | ||
249 | import codecs | ||
250 | |||
251 | bb.debug(2, "Writing recipe history") | ||
252 | |||
253 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | ||
254 | |||
255 | infofile = os.path.join(pkghistdir, "latest") | ||
256 | with codecs.open(infofile, "w", encoding='utf8') as f: | ||
257 | if rcpinfo.pe != "0": | ||
258 | f.write(u"PE = %s\n" % rcpinfo.pe) | ||
259 | f.write(u"PV = %s\n" % rcpinfo.pv) | ||
260 | f.write(u"PR = %s\n" % rcpinfo.pr) | ||
261 | f.write(u"DEPENDS = %s\n" % rcpinfo.depends) | ||
262 | f.write(u"PACKAGES = %s\n" % rcpinfo.packages) | ||
263 | |||
264 | |||
265 | def write_pkghistory(pkginfo, d): | ||
266 | import codecs | ||
267 | |||
268 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) | ||
269 | |||
270 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | ||
271 | |||
272 | pkgpath = os.path.join(pkghistdir, pkginfo.name) | ||
273 | if not os.path.exists(pkgpath): | ||
274 | bb.utils.mkdirhier(pkgpath) | ||
275 | |||
276 | infofile = os.path.join(pkgpath, "latest") | ||
277 | with codecs.open(infofile, "w", encoding='utf8') as f: | ||
278 | if pkginfo.pe != "0": | ||
279 | f.write(u"PE = %s\n" % pkginfo.pe) | ||
280 | f.write(u"PV = %s\n" % pkginfo.pv) | ||
281 | f.write(u"PR = %s\n" % pkginfo.pr) | ||
282 | |||
283 | pkgvars = {} | ||
284 | pkgvars['PKG'] = pkginfo.pkg if pkginfo.pkg != pkginfo.name else '' | ||
285 | pkgvars['PKGE'] = pkginfo.pkge if pkginfo.pkge != pkginfo.pe else '' | ||
286 | pkgvars['PKGV'] = pkginfo.pkgv if pkginfo.pkgv != pkginfo.pv else '' | ||
287 | pkgvars['PKGR'] = pkginfo.pkgr if pkginfo.pkgr != pkginfo.pr else '' | ||
288 | for pkgvar in pkgvars: | ||
289 | val = pkgvars[pkgvar] | ||
290 | if val: | ||
291 | f.write(u"%s = %s\n" % (pkgvar, val)) | ||
292 | |||
293 | f.write(u"RPROVIDES = %s\n" % pkginfo.rprovides) | ||
294 | f.write(u"RDEPENDS = %s\n" % pkginfo.rdepends) | ||
295 | f.write(u"RRECOMMENDS = %s\n" % pkginfo.rrecommends) | ||
296 | if pkginfo.rsuggests: | ||
297 | f.write(u"RSUGGESTS = %s\n" % pkginfo.rsuggests) | ||
298 | if pkginfo.rreplaces: | ||
299 | f.write(u"RREPLACES = %s\n" % pkginfo.rreplaces) | ||
300 | if pkginfo.rconflicts: | ||
301 | f.write(u"RCONFLICTS = %s\n" % pkginfo.rconflicts) | ||
302 | f.write(u"PKGSIZE = %d\n" % pkginfo.size) | ||
303 | f.write(u"FILES = %s\n" % pkginfo.files) | ||
304 | f.write(u"FILELIST = %s\n" % pkginfo.filelist) | ||
305 | |||
306 | for filevar in pkginfo.filevars: | ||
307 | filevarpath = os.path.join(pkgpath, "latest.%s" % filevar) | ||
308 | val = pkginfo.filevars[filevar] | ||
309 | if val: | ||
310 | with codecs.open(filevarpath, "w", encoding='utf8') as f: | ||
311 | f.write(val) | ||
312 | else: | ||
313 | if os.path.exists(filevarpath): | ||
314 | os.unlink(filevarpath) | ||
315 | |||
316 | # | ||
317 | # rootfs_type can be: image, sdk_target, sdk_host | ||
318 | # | ||
319 | def buildhistory_list_installed(d, rootfs_type="image"): | ||
320 | from oe.rootfs import image_list_installed_packages | ||
321 | from oe.sdk import sdk_list_installed_packages | ||
322 | |||
323 | process_list = [('file', 'bh_installed_pkgs.txt'),\ | ||
324 | ('deps', 'bh_installed_pkgs_deps.txt')] | ||
325 | |||
326 | for output_type, output_file in process_list: | ||
327 | output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file) | ||
328 | |||
329 | with open(output_file_full, 'w') as output: | ||
330 | if rootfs_type == "image": | ||
331 | output.write(image_list_installed_packages(d, output_type)) | ||
332 | else: | ||
333 | output.write(sdk_list_installed_packages(d, rootfs_type == "sdk_target", output_type)) | ||
334 | |||
335 | python buildhistory_list_installed_image() { | ||
336 | buildhistory_list_installed(d) | ||
337 | } | ||
338 | |||
339 | python buildhistory_list_installed_sdk_target() { | ||
340 | buildhistory_list_installed(d, "sdk_target") | ||
341 | } | ||
342 | |||
343 | python buildhistory_list_installed_sdk_host() { | ||
344 | buildhistory_list_installed(d, "sdk_host") | ||
345 | } | ||
346 | |||
347 | buildhistory_get_installed() { | ||
348 | mkdir -p $1 | ||
349 | |||
350 | # Get list of installed packages | ||
351 | pkgcache="$1/installed-packages.tmp" | ||
352 | cat ${WORKDIR}/bh_installed_pkgs.txt | sort > $pkgcache && rm ${WORKDIR}/bh_installed_pkgs.txt | ||
353 | |||
354 | cat $pkgcache | awk '{ print $1 }' > $1/installed-package-names.txt | ||
355 | if [ -s $pkgcache ] ; then | ||
356 | cat $pkgcache | awk '{ print $2 }' | xargs -n1 basename > $1/installed-packages.txt | ||
357 | else | ||
358 | printf "" > $1/installed-packages.txt | ||
359 | fi | ||
360 | |||
361 | # Produce dependency graph | ||
362 | # First, quote each name to handle characters that cause issues for dot | ||
363 | sed 's:\([^| ]*\):"\1":g' ${WORKDIR}/bh_installed_pkgs_deps.txt > $1/depends.tmp && \ | ||
364 | rm ${WORKDIR}/bh_installed_pkgs_deps.txt | ||
365 | # Change delimiter from pipe to -> and set style for recommend lines | ||
366 | sed -i -e 's:|: -> :' -e 's:"\[REC\]":[style=dotted]:' -e 's:$:;:' $1/depends.tmp | ||
367 | # Add header, sorted and de-duped contents and footer and then delete the temp file | ||
368 | printf "digraph depends {\n node [shape=plaintext]\n" > $1/depends.dot | ||
369 | cat $1/depends.tmp | sort | uniq >> $1/depends.dot | ||
370 | echo "}" >> $1/depends.dot | ||
371 | rm $1/depends.tmp | ||
372 | |||
373 | # Produce installed package sizes list | ||
374 | printf "" > $1/installed-package-sizes.tmp | ||
375 | cat $pkgcache | while read pkg pkgfile pkgarch | ||
376 | do | ||
377 | size=`oe-pkgdata-util read-value ${PKGDATA_DIR} "PKGSIZE" ${pkg}_${pkgarch}` | ||
378 | if [ "$size" != "" ] ; then | ||
379 | echo "$size $pkg" >> $1/installed-package-sizes.tmp | ||
380 | fi | ||
381 | done | ||
382 | cat $1/installed-package-sizes.tmp | sort -n -r | awk '{print $1 "\tKiB " $2}' > $1/installed-package-sizes.txt | ||
383 | rm $1/installed-package-sizes.tmp | ||
384 | |||
385 | # We're now done with the cache, delete it | ||
386 | rm $pkgcache | ||
387 | |||
388 | if [ "$2" != "sdk" ] ; then | ||
389 | # Produce some cut-down graphs (for readability) | ||
390 | grep -v kernel_image $1/depends.dot | grep -v kernel-2 | grep -v kernel-3 > $1/depends-nokernel.dot | ||
391 | grep -v libc6 $1/depends-nokernel.dot | grep -v libgcc > $1/depends-nokernel-nolibc.dot | ||
392 | grep -v update- $1/depends-nokernel-nolibc.dot > $1/depends-nokernel-nolibc-noupdate.dot | ||
393 | grep -v kernel-module $1/depends-nokernel-nolibc-noupdate.dot > $1/depends-nokernel-nolibc-noupdate-nomodules.dot | ||
394 | fi | ||
395 | |||
396 | # add complementary package information | ||
397 | if [ -e ${WORKDIR}/complementary_pkgs.txt ]; then | ||
398 | cp ${WORKDIR}/complementary_pkgs.txt $1 | ||
399 | fi | ||
400 | } | ||
401 | |||
402 | buildhistory_get_image_installed() { | ||
403 | # Anything requiring the use of the packaging system should be done in here | ||
404 | # in case the packaging files are going to be removed for this image | ||
405 | |||
406 | if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'image', '1', '0', d)}" = "0" ] ; then | ||
407 | return | ||
408 | fi | ||
409 | |||
410 | buildhistory_get_installed ${BUILDHISTORY_DIR_IMAGE} | ||
411 | } | ||
412 | |||
413 | buildhistory_get_sdk_installed() { | ||
414 | # Anything requiring the use of the packaging system should be done in here | ||
415 | # in case the packaging files are going to be removed for this SDK | ||
416 | |||
417 | if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'sdk', '1', '0', d)}" = "0" ] ; then | ||
418 | return | ||
419 | fi | ||
420 | |||
421 | buildhistory_get_installed ${BUILDHISTORY_DIR_SDK}/$1 sdk | ||
422 | } | ||
423 | |||
424 | buildhistory_get_sdk_installed_host() { | ||
425 | buildhistory_get_sdk_installed host | ||
426 | } | ||
427 | |||
428 | buildhistory_get_sdk_installed_target() { | ||
429 | buildhistory_get_sdk_installed target | ||
430 | } | ||
431 | |||
432 | buildhistory_list_files() { | ||
433 | # List the files in the specified directory, but exclude date/time etc. | ||
434 | # This awk script is somewhat messy, but handles where the size is not printed for device files under pseudo | ||
435 | ( cd $1 && find . -printf "%M %-10u %-10g %10s %p -> %l\n" | sort -k5 | sed 's/ * -> $//' > $2 ) | ||
436 | } | ||
437 | |||
438 | |||
439 | buildhistory_get_imageinfo() { | ||
440 | if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'image', '1', '0', d)}" = "0" ] ; then | ||
441 | return | ||
442 | fi | ||
443 | |||
444 | buildhistory_list_files ${IMAGE_ROOTFS} ${BUILDHISTORY_DIR_IMAGE}/files-in-image.txt | ||
445 | |||
446 | # Collect files requested in BUILDHISTORY_IMAGE_FILES | ||
447 | rm -rf ${BUILDHISTORY_DIR_IMAGE}/image-files | ||
448 | for f in ${BUILDHISTORY_IMAGE_FILES}; do | ||
449 | if [ -f ${IMAGE_ROOTFS}/$f ] ; then | ||
450 | mkdir -p ${BUILDHISTORY_DIR_IMAGE}/image-files/`dirname $f` | ||
451 | cp ${IMAGE_ROOTFS}/$f ${BUILDHISTORY_DIR_IMAGE}/image-files/$f | ||
452 | fi | ||
453 | done | ||
454 | |||
455 | # Record some machine-readable meta-information about the image | ||
456 | printf "" > ${BUILDHISTORY_DIR_IMAGE}/image-info.txt | ||
457 | cat >> ${BUILDHISTORY_DIR_IMAGE}/image-info.txt <<END | ||
458 | ${@buildhistory_get_imagevars(d)} | ||
459 | END | ||
460 | imagesize=`du -ks ${IMAGE_ROOTFS} | awk '{ print $1 }'` | ||
461 | echo "IMAGESIZE = $imagesize" >> ${BUILDHISTORY_DIR_IMAGE}/image-info.txt | ||
462 | |||
463 | # Add some configuration information | ||
464 | echo "${MACHINE}: ${IMAGE_BASENAME} configured for ${DISTRO} ${DISTRO_VERSION}" > ${BUILDHISTORY_DIR_IMAGE}/build-id.txt | ||
465 | |||
466 | cat >> ${BUILDHISTORY_DIR_IMAGE}/build-id.txt <<END | ||
467 | ${@buildhistory_get_build_id(d)} | ||
468 | END | ||
469 | } | ||
470 | |||
471 | buildhistory_get_sdkinfo() { | ||
472 | if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'sdk', '1', '0', d)}" = "0" ] ; then | ||
473 | return | ||
474 | fi | ||
475 | |||
476 | buildhistory_list_files ${SDK_OUTPUT} ${BUILDHISTORY_DIR_SDK}/files-in-sdk.txt | ||
477 | |||
478 | # Record some machine-readable meta-information about the SDK | ||
479 | printf "" > ${BUILDHISTORY_DIR_SDK}/sdk-info.txt | ||
480 | cat >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt <<END | ||
481 | ${@buildhistory_get_sdkvars(d)} | ||
482 | END | ||
483 | sdksize=`du -ks ${SDK_OUTPUT} | awk '{ print $1 }'` | ||
484 | echo "SDKSIZE = $sdksize" >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt | ||
485 | } | ||
486 | |||
487 | # By prepending we get in before the removal of packaging files | ||
488 | ROOTFS_POSTPROCESS_COMMAND =+ " buildhistory_list_installed_image ;\ | ||
489 | buildhistory_get_image_installed ; " | ||
490 | |||
491 | IMAGE_POSTPROCESS_COMMAND += " buildhistory_get_imageinfo ; " | ||
492 | |||
493 | # We want these to be the last run so that we get called after complementary package installation | ||
494 | POPULATE_SDK_POST_TARGET_COMMAND_append = " buildhistory_list_installed_sdk_target ;\ | ||
495 | buildhistory_get_sdk_installed_target ; " | ||
496 | POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_list_installed_sdk_host ;\ | ||
497 | buildhistory_get_sdk_installed_host ; " | ||
498 | |||
499 | SDK_POSTPROCESS_COMMAND += "buildhistory_get_sdkinfo ; " | ||
500 | |||
501 | def buildhistory_get_build_id(d): | ||
502 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | ||
503 | return "" | ||
504 | localdata = bb.data.createCopy(d) | ||
505 | bb.data.update_data(localdata) | ||
506 | statuslines = [] | ||
507 | for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): | ||
508 | g = globals() | ||
509 | if func not in g: | ||
510 | bb.warn("Build configuration function '%s' does not exist" % func) | ||
511 | else: | ||
512 | flines = g[func](localdata) | ||
513 | if flines: | ||
514 | statuslines.extend(flines) | ||
515 | |||
516 | statusheader = d.getVar('BUILDCFG_HEADER', True) | ||
517 | return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | ||
518 | |||
519 | def buildhistory_get_metadata_revs(d): | ||
520 | # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want | ||
521 | layers = (d.getVar("BBLAYERS", True) or "").split() | ||
522 | medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ | ||
523 | base_get_metadata_git_branch(i, None).strip(), \ | ||
524 | base_get_metadata_git_revision(i, None)) \ | ||
525 | for i in layers] | ||
526 | return '\n'.join(medadata_revs) | ||
527 | |||
528 | |||
529 | def squashspaces(string): | ||
530 | import re | ||
531 | return re.sub("\s+", " ", string).strip() | ||
532 | |||
533 | def outputvars(vars, listvars, d): | ||
534 | vars = vars.split() | ||
535 | listvars = listvars.split() | ||
536 | ret = "" | ||
537 | for var in vars: | ||
538 | value = d.getVar(var, True) or "" | ||
539 | if var in listvars: | ||
540 | # Squash out spaces | ||
541 | value = squashspaces(value) | ||
542 | ret += "%s = %s\n" % (var, value) | ||
543 | return ret.rstrip('\n') | ||
544 | |||
545 | def buildhistory_get_imagevars(d): | ||
546 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | ||
547 | return "" | ||
548 | imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" | ||
549 | listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" | ||
550 | return outputvars(imagevars, listvars, d) | ||
551 | |||
552 | def buildhistory_get_sdkvars(d): | ||
553 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | ||
554 | return "" | ||
555 | sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" | ||
556 | listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" | ||
557 | return outputvars(sdkvars, listvars, d) | ||
558 | |||
559 | |||
560 | def buildhistory_get_cmdline(d): | ||
561 | if sys.argv[0].endswith('bin/bitbake'): | ||
562 | bincmd = 'bitbake' | ||
563 | else: | ||
564 | bincmd = sys.argv[0] | ||
565 | return '%s %s' % (bincmd, ' '.join(sys.argv[1:])) | ||
566 | |||
567 | |||
568 | buildhistory_commit() { | ||
569 | if [ ! -d ${BUILDHISTORY_DIR} ] ; then | ||
570 | # Code above that creates this dir never executed, so there can't be anything to commit | ||
571 | return | ||
572 | fi | ||
573 | |||
574 | # Create a machine-readable list of metadata revisions for each layer | ||
575 | cat > ${BUILDHISTORY_DIR}/metadata-revs <<END | ||
576 | ${@buildhistory_get_metadata_revs(d)} | ||
577 | END | ||
578 | |||
579 | ( cd ${BUILDHISTORY_DIR}/ | ||
580 | # Initialise the repo if necessary | ||
581 | if [ ! -d .git ] ; then | ||
582 | git init -q | ||
583 | else | ||
584 | git tag -f build-minus-3 build-minus-2 > /dev/null 2>&1 || true | ||
585 | git tag -f build-minus-2 build-minus-1 > /dev/null 2>&1 || true | ||
586 | git tag -f build-minus-1 > /dev/null 2>&1 || true | ||
587 | fi | ||
588 | # Check if there are new/changed files to commit (other than metadata-revs) | ||
589 | repostatus=`git status --porcelain | grep -v " metadata-revs$"` | ||
590 | HOSTNAME=`hostname 2>/dev/null || echo unknown` | ||
591 | CMDLINE="${@buildhistory_get_cmdline(d)}" | ||
592 | if [ "$repostatus" != "" ] ; then | ||
593 | git add -A . | ||
594 | # porcelain output looks like "?? packages/foo/bar" | ||
595 | # Ensure we commit metadata-revs with the first commit | ||
596 | for entry in `echo "$repostatus" | awk '{print $2}' | awk -F/ '{print $1}' | sort | uniq` ; do | ||
597 | git commit $entry metadata-revs -m "$entry: Build ${BUILDNAME} of ${DISTRO} ${DISTRO_VERSION} for machine ${MACHINE} on $HOSTNAME" -m "cmd: $CMDLINE" --author "${BUILDHISTORY_COMMIT_AUTHOR}" > /dev/null | ||
598 | done | ||
599 | git gc --auto --quiet | ||
600 | if [ "${BUILDHISTORY_PUSH_REPO}" != "" ] ; then | ||
601 | git push -q ${BUILDHISTORY_PUSH_REPO} | ||
602 | fi | ||
603 | else | ||
604 | git commit ${BUILDHISTORY_DIR}/ --allow-empty -m "No changes: Build ${BUILDNAME} of ${DISTRO} ${DISTRO_VERSION} for machine ${MACHINE} on $HOSTNAME" -m "cmd: $CMDLINE" --author "${BUILDHISTORY_COMMIT_AUTHOR}" > /dev/null | ||
605 | fi) || true | ||
606 | } | ||
607 | |||
608 | python buildhistory_eventhandler() { | ||
609 | if e.data.getVar('BUILDHISTORY_FEATURES', True).strip(): | ||
610 | if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1": | ||
611 | bb.note("Writing buildhistory") | ||
612 | bb.build.exec_func("buildhistory_commit", e.data) | ||
613 | } | ||
614 | |||
615 | addhandler buildhistory_eventhandler | ||
616 | buildhistory_eventhandler[eventmask] = "bb.event.BuildCompleted" | ||
617 | |||
618 | |||
619 | # FIXME this ought to be moved into the fetcher | ||
620 | def _get_srcrev_values(d): | ||
621 | """ | ||
622 | Return the version strings for the current recipe | ||
623 | """ | ||
624 | |||
625 | scms = [] | ||
626 | fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d) | ||
627 | urldata = fetcher.ud | ||
628 | for u in urldata: | ||
629 | if urldata[u].method.supports_srcrev(): | ||
630 | scms.append(u) | ||
631 | |||
632 | autoinc_templ = 'AUTOINC+' | ||
633 | dict_srcrevs = {} | ||
634 | dict_tag_srcrevs = {} | ||
635 | for scm in scms: | ||
636 | ud = urldata[scm] | ||
637 | for name in ud.names: | ||
638 | try: | ||
639 | rev = ud.method.sortable_revision(ud, d, name) | ||
640 | except TypeError: | ||
641 | # support old bitbake versions | ||
642 | rev = ud.method.sortable_revision(scm, ud, d, name) | ||
643 | # Clean this up when we next bump bitbake version | ||
644 | if type(rev) != str: | ||
645 | autoinc, rev = rev | ||
646 | elif rev.startswith(autoinc_templ): | ||
647 | rev = rev[len(autoinc_templ):] | ||
648 | dict_srcrevs[name] = rev | ||
649 | if 'tag' in ud.parm: | ||
650 | tag = ud.parm['tag']; | ||
651 | key = name+'_'+tag | ||
652 | dict_tag_srcrevs[key] = rev | ||
653 | return (dict_srcrevs, dict_tag_srcrevs) | ||
654 | |||
655 | do_fetch[postfuncs] += "write_srcrev" | ||
656 | do_fetch[vardepsexclude] += "write_srcrev" | ||
657 | python write_srcrev() { | ||
658 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | ||
659 | srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') | ||
660 | |||
661 | srcrevs, tag_srcrevs = _get_srcrev_values(d) | ||
662 | if srcrevs: | ||
663 | if not os.path.exists(pkghistdir): | ||
664 | bb.utils.mkdirhier(pkghistdir) | ||
665 | old_tag_srcrevs = {} | ||
666 | if os.path.exists(srcrevfile): | ||
667 | with open(srcrevfile) as f: | ||
668 | for line in f: | ||
669 | if line.startswith('# tag_'): | ||
670 | key, value = line.split("=", 1) | ||
671 | key = key.replace('# tag_', '').strip() | ||
672 | value = value.replace('"', '').strip() | ||
673 | old_tag_srcrevs[key] = value | ||
674 | with open(srcrevfile, 'w') as f: | ||
675 | orig_srcrev = d.getVar('SRCREV', False) or 'INVALID' | ||
676 | if orig_srcrev != 'INVALID': | ||
677 | f.write('# SRCREV = "%s"\n' % orig_srcrev) | ||
678 | if len(srcrevs) > 1: | ||
679 | for name, srcrev in srcrevs.items(): | ||
680 | orig_srcrev = d.getVar('SRCREV_%s' % name, False) | ||
681 | if orig_srcrev: | ||
682 | f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev)) | ||
683 | f.write('SRCREV_%s = "%s"\n' % (name, srcrev)) | ||
684 | else: | ||
685 | f.write('SRCREV = "%s"\n' % srcrevs.itervalues().next()) | ||
686 | if len(tag_srcrevs) > 0: | ||
687 | for name, srcrev in tag_srcrevs.items(): | ||
688 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) | ||
689 | if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: | ||
690 | pkg = d.getVar('PN', True) | ||
691 | bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) | ||
692 | |||
693 | else: | ||
694 | if os.path.exists(srcrevfile): | ||
695 | os.remove(srcrevfile) | ||
696 | } | ||