summaryrefslogtreecommitdiffstats
path: root/meta/classes/archiver.bbclass
diff options
context:
space:
mode:
authorAdrian Dudau <adrian.dudau@enea.com>2013-12-12 13:38:32 +0100
committerAdrian Dudau <adrian.dudau@enea.com>2013-12-12 13:50:20 +0100
commite2e6f6fe07049f33cb6348780fa975162752e421 (patch)
treeb1813295411235d1297a0ed642b1346b24fdfb12 /meta/classes/archiver.bbclass
downloadpoky-e2e6f6fe07049f33cb6348780fa975162752e421.tar.gz
initial commit of Enea Linux 3.1
Migrated from the internal git server on the dora-enea branch Signed-off-by: Adrian Dudau <adrian.dudau@enea.com>
Diffstat (limited to 'meta/classes/archiver.bbclass')
-rw-r--r--meta/classes/archiver.bbclass569
1 files changed, 569 insertions, 0 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
new file mode 100644
index 0000000000..66efe7d54b
--- /dev/null
+++ b/meta/classes/archiver.bbclass
@@ -0,0 +1,569 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This file is used for archiving sources, patches, and logs to a
5# tarball. It also output building environment to xxx.dump.data and
6# create xxx.diff.gz to record all content in ${S} to a diff file.
7#
8
9ARCHIVE_EXCLUDE_FROM ?= ".pc autom4te.cache"
10ARCHIVE_TYPE ?= "tar srpm"
11PATCHES_ARCHIVE_WITH_SERIES = 'yes'
12SOURCE_ARCHIVE_LOG_WITH_SCRIPTS ?= '${@d.getVarFlag('ARCHIVER_MODE', 'log_type') \
13 if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'logs_with_scripts'}'
14SOURCE_ARCHIVE_PACKAGE_TYPE ?= '${@d.getVarFlag('ARCHIVER_MODE', 'type') \
15 if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'tar'}'
16FILTER ?= '${@d.getVarFlag('ARCHIVER_MODE', 'filter') \
17 if d.getVarFlag('ARCHIVER_MODE', 'filter')!= 'none' else 'no'}'
18
19
20COPYLEFT_LICENSE_INCLUDE ?= 'GPL* LGPL*'
21COPYLEFT_LICENSE_INCLUDE[type] = 'list'
22COPYLEFT_LICENSE_INCLUDE[doc] = 'Space separated list of globs which include licenses'
23
24COPYLEFT_LICENSE_EXCLUDE ?= 'CLOSED Proprietary'
25COPYLEFT_LICENSE_EXCLUDE[type] = 'list'
26COPYLEFT_LICENSE_INCLUDE[doc] = 'Space separated list of globs which exclude licenses'
27
28COPYLEFT_RECIPE_TYPE ?= '${@copyleft_recipe_type(d)}'
29COPYLEFT_RECIPE_TYPE[doc] = 'The "type" of the current recipe (e.g. target, native, cross)'
30
31COPYLEFT_RECIPE_TYPES ?= 'target'
32COPYLEFT_RECIPE_TYPES[type] = 'list'
33COPYLEFT_RECIPE_TYPES[doc] = 'Space separated list of recipe types to include'
34
35COPYLEFT_AVAILABLE_RECIPE_TYPES = 'target native nativesdk cross crosssdk cross-canadian'
36COPYLEFT_AVAILABLE_RECIPE_TYPES[type] = 'list'
37COPYLEFT_AVAILABLE_RECIPE_TYPES[doc] = 'Space separated list of available recipe types'
38
39def copyleft_recipe_type(d):
40 for recipe_type in oe.data.typed_value('COPYLEFT_AVAILABLE_RECIPE_TYPES', d):
41 if oe.utils.inherits(d, recipe_type):
42 return recipe_type
43 return 'target'
44
45def copyleft_should_include(d):
46 """
47 Determine if this recipe's sources should be deployed for compliance
48 """
49 import ast
50 import oe.license
51 from fnmatch import fnmatchcase as fnmatch
52
53 recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE', True)
54 if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d):
55 return False, 'recipe type "%s" is excluded' % recipe_type
56
57 include = oe.data.typed_value('COPYLEFT_LICENSE_INCLUDE', d)
58 exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d)
59
60 try:
61 is_included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude)
62 except oe.license.LicenseError as exc:
63 bb.fatal('%s: %s' % (d.getVar('PF', True), exc))
64 else:
65 if is_included:
66 if reason:
67 return True, 'recipe has included licenses: %s' % ', '.join(reason)
68 else:
69 return False, 'recipe does not include a copyleft license'
70 else:
71 return False, 'recipe has excluded licenses: %s' % ', '.join(reason)
72
73def tar_filter(d):
74 """
75 Only archive the package belongs to COPYLEFT_LICENSE_INCLUDE
76 and ignore the one in COPYLEFT_LICENSE_EXCLUDE. Don't exclude any
77 packages when \"FILTER\" is \"no\"
78 """
79 if d.getVar('FILTER', True) == "yes":
80 included, reason = copyleft_should_include(d)
81 return not included
82 else:
83 return False
84
85def get_bb_inc(d):
86 """
87 create a directory "script-logs" including .bb and .inc file in ${WORKDIR}
88 """
89 import re
90 import shutil
91
92 bbinc = []
93 pat=re.compile('require\s*([^\s]*\.*)(.*)')
94 work_dir = d.getVar('WORKDIR', True)
95 bbfile = d.getVar('FILE', True)
96 bbdir = os.path.dirname(bbfile)
97 target_sys = d.getVar('TARGET_SYS', True)
98 pf = d.getVar('PF', True)
99 licenses = get_licenses(d)
100 script_logs = os.path.join(work_dir, 'script-logs/'+ target_sys + '/' + licenses + '/' + pf + '/script-logs')
101 bb_inc = os.path.join(script_logs, 'bb_inc')
102 bb.utils.mkdirhier(bb_inc)
103
104 def find_file(dir, file):
105 for root, dirs, files in os.walk(dir):
106 if file in files:
107 return os.path.join(root, file)
108
109 def get_inc (file):
110 f = open(file, 'r')
111 for line in f.readlines():
112 if 'require' not in line:
113 bbinc.append(file)
114 else:
115 try:
116 incfile = pat.match(line).group(1)
117 incfile = bb.data.expand(os.path.basename(incfile), d)
118 abs_incfile = find_file(bbdir, incfile)
119 if abs_incfile:
120 bbinc.append(abs_incfile)
121 get_inc(abs_incfile)
122 except AttributeError:
123 pass
124 get_inc(bbfile)
125 bbinc = list(set(bbinc))
126 for bbincfile in bbinc:
127 shutil.copy(bbincfile, bb_inc)
128
129 return script_logs
130
131def get_logs(d):
132 """
133 create a directory "script-logs" in ${WORKDIR}
134 """
135 work_dir = d.getVar('WORKDIR', True)
136 target_sys = d.getVar('TARGET_SYS', True)
137 pf = d.getVar('PF', True)
138 licenses = get_licenses(d)
139 script_logs = os.path.join(work_dir, 'script-logs/'+ target_sys + '/' + licenses + '/' + pf + '/script-logs')
140
141 try:
142 bb.utils.mkdirhier(os.path.join(script_logs, 'temp'))
143 oe.path.copytree(os.path.join(work_dir, 'temp'), os.path.join(script_logs, 'temp'))
144 except (IOError, AttributeError):
145 pass
146 return script_logs
147
148def get_series(d):
149 """
150 copy patches and series file to a pointed directory which will be
151 archived to tarball in ${WORKDIR}
152 """
153 import shutil
154
155 src_patches=[]
156 pf = d.getVar('PF', True)
157 work_dir = d.getVar('WORKDIR', True)
158 s = d.getVar('S', True)
159 dest = os.path.join(work_dir, pf + '-series')
160 shutil.rmtree(dest, ignore_errors=True)
161 bb.utils.mkdirhier(dest)
162
163 src_uri = d.getVar('SRC_URI', True).split()
164 fetch = bb.fetch2.Fetch(src_uri, d)
165 locals = (fetch.localpath(url) for url in fetch.urls)
166 for local in locals:
167 src_patches.append(local)
168 if not cmp(work_dir, s):
169 tmp_list = src_patches
170 else:
171 tmp_list = src_patches[1:]
172
173 for patch in tmp_list:
174 try:
175 shutil.copy(patch, dest)
176 except IOError:
177 if os.path.isdir(patch):
178 bb.utils.mkdirhier(os.path.join(dest, patch))
179 oe.path.copytree(patch, os.path.join(dest, patch))
180 return dest
181
182def get_applying_patches(d):
183 """
184 only copy applying patches to a pointed directory which will be
185 archived to tarball
186 """
187 import shutil
188
189 pf = d.getVar('PF', True)
190 work_dir = d.getVar('WORKDIR', True)
191 dest = os.path.join(work_dir, pf + '-patches')
192 shutil.rmtree(dest, ignore_errors=True)
193 bb.utils.mkdirhier(dest)
194
195 patches = src_patches(d)
196 for patch in patches:
197 _, _, local, _, _, parm = bb.fetch.decodeurl(patch)
198 if local:
199 shutil.copy(local, dest)
200 return dest
201
202def not_tarball(d):
203 """
204 packages including key words 'work-shared', 'native', 'packagegroup-' will be passed
205 """
206 workdir = d.getVar('WORKDIR', True)
207 s = d.getVar('S', True)
208 if 'work-shared' in s or 'packagegroup-' in workdir or 'native' in workdir:
209 return True
210 else:
211 return False
212
213def get_source_from_downloads(d, stage_name):
214 """
215 copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR
216 """
217 if stage_name in 'patched' 'configured':
218 return
219 pf = d.getVar('PF', True)
220 dl_dir = d.getVar('DL_DIR', True)
221 try:
222 source = os.path.join(dl_dir, os.path.basename(d.getVar('SRC_URI', True).split()[0]))
223 if os.path.exists(source) and not os.path.isdir(source):
224 return source
225 except (IndexError, OSError):
226 pass
227 return ''
228
229def do_tarball(workdir, srcdir, tarname):
230 """
231 tar "srcdir" under "workdir" to "tarname"
232 """
233 import tarfile
234
235 sav_dir = os.getcwd()
236 os.chdir(workdir)
237 if (len(os.listdir(srcdir))) != 0:
238 tar = tarfile.open(tarname, "w:gz")
239 tar.add(srcdir)
240 tar.close()
241 else:
242 tarname = ''
243 os.chdir(sav_dir)
244 return tarname
245
246def archive_sources_from_directory(d, stage_name):
247 """
248 archive sources codes tree to tarball when tarball of $P doesn't
249 exist in $DL_DIR
250 """
251
252 s = d.getVar('S', True)
253 work_dir=d.getVar('WORKDIR', True)
254 PF = d.getVar('PF', True)
255 tarname = PF + '-' + stage_name + ".tar.gz"
256
257 if os.path.exists(s) and work_dir in s:
258 try:
259 source_dir = os.path.join(work_dir, [ i for i in s.replace(work_dir, '').split('/') if i][0])
260 except IndexError:
261 if not cmp(s, work_dir):
262 return ''
263 else:
264 return ''
265 source = os.path.basename(source_dir)
266 return do_tarball(work_dir, source, tarname)
267
268def archive_sources(d, stage_name):
269 """
270 copy tarball from $DL_DIR to $WORKDIR if have tarball, archive
271 source codes tree in $WORKDIR if $P is directory instead of tarball
272 """
273 import shutil
274
275 work_dir = d.getVar('WORKDIR', True)
276 file = get_source_from_downloads(d, stage_name)
277 if file:
278 shutil.copy(file, work_dir)
279 file = os.path.basename(file)
280 else:
281 file = archive_sources_from_directory(d, stage_name)
282 return file
283
284def archive_patches(d, patchdir, series):
285 """
286 archive patches to tarball and also include series files if 'series' is True
287 """
288 import shutil
289
290 s = d.getVar('S', True)
291 work_dir = d.getVar('WORKDIR', True)
292 patch_dir = os.path.basename(patchdir)
293 tarname = patch_dir + ".tar.gz"
294 if series == 'all' and os.path.exists(os.path.join(s, 'patches/series')):
295 shutil.copy(os.path.join(s, 'patches/series'), patchdir)
296 tarname = do_tarball(work_dir, patch_dir, tarname)
297 shutil.rmtree(patchdir, ignore_errors=True)
298 return tarname
299
300def select_archive_patches(d, option):
301 """
302 select to archive all patches including non-applying and series or
303 applying patches
304 """
305 if option == "all":
306 patchdir = get_series(d)
307 elif option == "applying":
308 patchdir = get_applying_patches(d)
309 try:
310 os.rmdir(patchdir)
311 except OSError:
312 tarpatch = archive_patches(d, patchdir, option)
313 return tarpatch
314 return
315
316def archive_logs(d, logdir, bbinc=False):
317 """
318 archive logs in temp to tarball and .bb and .inc files if bbinc is True
319 """
320 import shutil
321
322 pf = d.getVar('PF', True)
323 work_dir = d.getVar('WORKDIR', True)
324 log_dir = os.path.basename(logdir)
325 tarname = pf + '-' + log_dir + ".tar.gz"
326 archive_dir = os.path.join( logdir, '..' )
327 tarname = do_tarball(archive_dir, log_dir, tarname)
328 if bbinc:
329 shutil.rmtree(logdir, ignore_errors=True)
330 return tarname
331
332def get_licenses(d):
333 """get licenses for running .bb file"""
334 import oe.license
335
336 licenses_type = d.getVar('LICENSE', True) or ""
337 lics = oe.license.is_included(licenses_type)[1:][0]
338 lice = ''
339 for lic in lics:
340 licens = d.getVarFlag('SPDXLICENSEMAP', lic)
341 if licens != None:
342 lice += licens
343 else:
344 lice += lic
345 return lice
346
347
348def move_tarball_deploy(d, tarball_list):
349 """move tarball in location to ${DEPLOY_DIR}/sources"""
350 import shutil
351
352 if tarball_list is []:
353 return
354 target_sys = d.getVar('TARGET_SYS', True)
355 pf = d.getVar('PF', True)
356 licenses = get_licenses(d)
357 work_dir = d.getVar('WORKDIR', True)
358 tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf
359 if not os.path.exists(tar_sources):
360 bb.utils.mkdirhier(tar_sources)
361 for source in tarball_list:
362 if source:
363 if os.path.exists(os.path.join(tar_sources, source)):
364 os.remove(os.path.join(tar_sources, source))
365 shutil.move(os.path.join(work_dir, source), tar_sources)
366
367def check_archiving_type(d):
368 """check the type for archiving package('tar' or 'srpm')"""
369 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) not in d.getVar('ARCHIVE_TYPE', True).split():
370 bb.fatal("\"SOURCE_ARCHIVE_PACKAGE_TYPE\" is \'tar\' or \'srpm\', no other types")
371
372def store_package(d, package_name):
373 """
374 store tarbablls name to file "tar-package"
375 """
376 f = open(os.path.join(d.getVar('WORKDIR', True), 'tar-package'), 'a')
377 f.write(package_name + ' ')
378 f.close()
379
380def get_package(d):
381 """
382 get tarballs name from "tar-package"
383 """
384 work_dir = (d.getVar('WORKDIR', True))
385 tarlist = os.path.join(work_dir, 'tar-package')
386 if os.path.exists(tarlist):
387 f = open(tarlist, 'r')
388 line = f.readline().rstrip('\n').split()
389 f.close()
390 return line
391 return []
392
393
394def archive_sources_patches(d, stage_name):
395 """
396 archive sources and patches to tarball. stage_name will append
397 strings ${stage_name} to ${PR} as middle name. for example,
398 zlib-1.4.6-prepatch(stage_name).tar.gz
399 """
400 import shutil
401
402 check_archiving_type(d)
403
404 source_tar_name = archive_sources(d, stage_name)
405 if stage_name == "prepatch":
406 if d.getVar('PATCHES_ARCHIVE_WITH_SERIES', True) == 'yes':
407 patch_tar_name = select_archive_patches(d, "all")
408 elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES', True) == 'no':
409 patch_tar_name = select_archive_patches(d, "applying")
410 else:
411 bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' to 'yes' or 'no' ")
412 else:
413 patch_tar_name = ''
414
415 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) != 'srpm':
416 move_tarball_deploy(d, [source_tar_name, patch_tar_name])
417 else:
418 tarlist = os.path.join(d.getVar('WORKDIR', True), 'tar-package')
419 if os.path.exists(tarlist):
420 os.remove(tarlist)
421 for package in os.path.basename(source_tar_name), patch_tar_name:
422 if package:
423 store_package(d, str(package) + ' ')
424
425def archive_scripts_logs(d):
426 """
427 archive scripts and logs. scripts include .bb and .inc files and
428 logs include stuff in "temp".
429 """
430 import shutil
431
432 work_dir = d.getVar('WORKDIR', True)
433 temp_dir = os.path.join(work_dir, 'temp')
434 source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True)
435 if source_archive_log_with_scripts == 'logs_with_scripts':
436 logdir = get_logs(d)
437 logdir = get_bb_inc(d)
438 elif source_archive_log_with_scripts == 'logs':
439 logdir = get_logs(d)
440 else:
441 return
442
443 tarlog = archive_logs(d, logdir, True)
444
445 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) == 'srpm':
446 store_package(d, tarlog)
447
448def dumpdata(d):
449 """
450 dump environment to "${P}-${PR}.showdata.dump" including all
451 kinds of variables and functions when running a task
452 """
453
454 workdir = bb.data.getVar('WORKDIR', d, 1)
455 distro = bb.data.getVar('DISTRO', d, 1)
456 s = d.getVar('S', True)
457 pf = d.getVar('PF', True)
458 target_sys = d.getVar('TARGET_SYS', True)
459 licenses = get_licenses(d)
460 dumpdir = os.path.join(workdir, 'diffgz-envdata/'+ target_sys + '/' + licenses + '/' + pf )
461 if not os.path.exists(dumpdir):
462 bb.utils.mkdirhier(dumpdir)
463
464 dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump", d))
465
466 bb.note("Dumping metadata into '%s'" % dumpfile)
467 f = open(dumpfile, "w")
468 # emit variables and shell functions
469 bb.data.emit_env(f, d, True)
470 # emit the metadata which isn't valid shell
471 for e in d.keys():
472 if bb.data.getVarFlag(e, 'python', d):
473 f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1)))
474 f.close()
475
476def create_diff_gz(d):
477 """
478 creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.g gz for
479 mapping all content in 's' including patches to xxx.diff.gz
480 """
481 import shutil
482 import subprocess
483
484 work_dir = d.getVar('WORKDIR', True)
485 exclude_from = d.getVar('ARCHIVE_EXCLUDE_FROM', True).split()
486 pf = d.getVar('PF', True)
487 licenses = get_licenses(d)
488 target_sys = d.getVar('TARGET_SYS', True)
489 diff_dir = os.path.join(work_dir, 'diffgz-envdata/'+ target_sys + '/' + licenses + '/' + pf )
490 diff_file = os.path.join(diff_dir, bb.data.expand("${P}-${PR}.diff.gz",d))
491
492 f = open(os.path.join(work_dir,'temp/exclude-from-file'), 'a')
493 for i in exclude_from:
494 f.write(i)
495 f.write("\n")
496 f.close()
497
498 s=d.getVar('S', True)
499 distro = d.getVar('DISTRO',True) or ""
500 dest = s + '/' + distro + '/files'
501 if not os.path.exists(dest):
502 bb.utils.mkdirhier(dest)
503 for i in os.listdir(os.getcwd()):
504 if os.path.isfile(i):
505 try:
506 shutil.copy(i, dest)
507 except IOError:
508 subprocess.call('fakeroot cp -rf ' + i + " " + dest, shell=True)
509
510 bb.note("Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz")
511 cmd = "LC_ALL=C TZ=UTC0 diff --exclude-from=" + work_dir + "/temp/exclude-from-file -Naur " + s + '.org' + ' ' + s + " | gzip -c > " + diff_file
512 d.setVar('DIFF', cmd + "\n")
513 d.setVarFlag('DIFF', 'func', '1')
514 bb.build.exec_func('DIFF', d)
515 shutil.rmtree(s + '.org', ignore_errors=True)
516
517# This function will run when user want to get tarball for sources and
518# patches after do_unpack
519python do_archive_original_sources_patches(){
520 archive_sources_patches(d, 'prepatch')
521}
522
523# This function will run when user want to get tarball for patched
524# sources after do_patch
525python do_archive_patched_sources(){
526 archive_sources_patches(d, 'patched')
527}
528
529# This function will run when user want to get tarball for configured
530# sources after do_configure
531python do_archive_configured_sources(){
532 archive_sources_patches(d, 'configured')
533}
534
535# This function will run when user want to get tarball for logs or both
536# logs and scripts(.bb and .inc files)
537python do_archive_scripts_logs(){
538 archive_scripts_logs(d)
539}
540
541# This function will run when user want to know what variable and
542# functions in a running task are and also can get a diff file including
543# all content a package should include.
544python do_dumpdata_create_diff_gz(){
545 dumpdata(d)
546 create_diff_gz(d)
547}
548
549# This functions prepare for archiving "linux-yocto" because this
550# package create directory 's' before do_patch instead of after
551# do_unpack. This is special control for archiving linux-yocto only.
552python do_archive_linux_yocto(){
553 s = d.getVar('S', True)
554 if 'linux-yocto' in s:
555 source_tar_name = archive_sources(d, '')
556 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) != 'srpm':
557 move_tarball_deploy(d, [source_tar_name, ''])
558}
559do_kernel_checkout[postfuncs] += "do_archive_linux_yocto "
560
561# remove tarball for sources, patches and logs after creating srpm.
562python do_delete_tarlist(){
563 work_dir = d.getVar('WORKDIR', True)
564 tarlist = os.path.join(work_dir, 'tar-package')
565 if os.path.exists(tarlist):
566 os.remove(tarlist)
567}
568do_delete_tarlist[deptask] = "do_archive_scripts_logs"
569do_package_write_rpm[postfuncs] += "do_delete_tarlist "