diff options
author | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
---|---|---|
committer | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
commit | c527fd1f14c27855a37f2e8ac5346ce8d940ced2 (patch) | |
tree | bb002c1fdf011c41dbd2f0927bed23ecb5f83c97 /meta/classes/archiver.bbclass | |
download | poky-c527fd1f14c27855a37f2e8ac5346ce8d940ced2.tar.gz |
initial commit for Enea Linux 4.0-140929daisy-140929
Migrated from the internal git server on the daisy-enea-point-release branch
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'meta/classes/archiver.bbclass')
-rw-r--r-- | meta/classes/archiver.bbclass | 368 |
1 files changed, 368 insertions, 0 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass new file mode 100644 index 0000000000..8d8e7c42a8 --- /dev/null +++ b/meta/classes/archiver.bbclass | |||
@@ -0,0 +1,368 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # This bbclass is used for creating archive for: | ||
5 | # 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" | ||
6 | # 2) patched source: ARCHIVER_MODE[src] = "patched" (default) | ||
7 | # 3) configured source: ARCHIVER_MODE[src] = "configured" | ||
8 | # 4) The patches between do_unpack and do_patch: | ||
9 | # ARCHIVER_MODE[diff] = "1" | ||
10 | # And you can set the one that you'd like to exclude from the diff: | ||
11 | # ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | ||
12 | # 5) The environment data, similar to 'bitbake -e recipe': | ||
13 | # ARCHIVER_MODE[dumpdata] = "1" | ||
14 | # 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" | ||
15 | # 7) Whether output the .src.rpm package: | ||
16 | # ARCHIVER_MODE[srpm] = "1" | ||
17 | # 8) Filter the license, the recipe whose license in | ||
18 | # COPYLEFT_LICENSE_INCLUDE will be included, and in | ||
19 | # COPYLEFT_LICENSE_EXCLUDE will be excluded. | ||
20 | # COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' | ||
21 | # COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' | ||
22 | # 9) The recipe type that will be archived: | ||
23 | # COPYLEFT_RECIPE_TYPES = 'target' | ||
24 | # | ||
25 | |||
26 | # Don't filter the license by default | ||
27 | COPYLEFT_LICENSE_INCLUDE ?= '' | ||
28 | COPYLEFT_LICENSE_EXCLUDE ?= '' | ||
29 | # Create archive for all the recipe types | ||
30 | COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' | ||
31 | inherit copyleft_filter | ||
32 | |||
33 | ARCHIVER_MODE[srpm] ?= "0" | ||
34 | ARCHIVER_MODE[src] ?= "patched" | ||
35 | ARCHIVER_MODE[diff] ?= "0" | ||
36 | ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | ||
37 | ARCHIVER_MODE[dumpdata] ?= "0" | ||
38 | ARCHIVER_MODE[recipe] ?= "0" | ||
39 | |||
40 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" | ||
41 | ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources" | ||
42 | ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" | ||
43 | ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" | ||
44 | |||
45 | do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" | ||
46 | do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" | ||
47 | do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" | ||
48 | |||
49 | # This is a convenience for the shell script to use it | ||
50 | |||
51 | |||
52 | python () { | ||
53 | pn = d.getVar('PN', True) | ||
54 | |||
55 | if d.getVar('COPYLEFT_LICENSE_INCLUDE', True) or \ | ||
56 | d.getVar('COPYLEFT_LICENSE_EXCLUDE', True): | ||
57 | included, reason = copyleft_should_include(d) | ||
58 | if not included: | ||
59 | bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) | ||
60 | return | ||
61 | else: | ||
62 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) | ||
63 | |||
64 | ar_src = d.getVarFlag('ARCHIVER_MODE', 'src', True) | ||
65 | ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata', True) | ||
66 | ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe', True) | ||
67 | |||
68 | if ar_src == "original": | ||
69 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) | ||
70 | elif ar_src == "patched": | ||
71 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) | ||
72 | elif ar_src == "configured": | ||
73 | # We can't use "addtask do_ar_configured after do_configure" since it | ||
74 | # will cause the deptask of do_populate_sysroot to run not matter what | ||
75 | # archives we need, so we add the depends here. | ||
76 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) | ||
77 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) | ||
78 | elif ar_src: | ||
79 | bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) | ||
80 | |||
81 | if ar_dumpdata == "1": | ||
82 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) | ||
83 | |||
84 | if ar_recipe == "1": | ||
85 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) | ||
86 | |||
87 | # Output the srpm package | ||
88 | ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) | ||
89 | if ar_srpm == "1": | ||
90 | if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': | ||
91 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) | ||
92 | if ar_dumpdata == "1": | ||
93 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) | ||
94 | if ar_recipe == "1": | ||
95 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) | ||
96 | if ar_src == "original": | ||
97 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) | ||
98 | elif ar_src == "patched": | ||
99 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) | ||
100 | elif ar_src == "configured": | ||
101 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) | ||
102 | |||
103 | # The gcc staff uses shared source | ||
104 | flag = d.getVarFlag("do_unpack", "stamp-base", True) | ||
105 | if flag: | ||
106 | if ar_src in [ 'original', 'patched' ]: | ||
107 | ar_outdir = os.path.join(d.getVar('ARCHIVER_TOPDIR', True), 'work-shared') | ||
108 | d.setVar('ARCHIVER_OUTDIR', ar_outdir) | ||
109 | d.setVarFlag('do_ar_original', 'stamp-base', flag) | ||
110 | d.setVarFlag('do_ar_patched', 'stamp-base', flag) | ||
111 | d.setVarFlag('do_unpack_and_patch', 'stamp-base', flag) | ||
112 | d.setVarFlag('do_ar_original', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') | ||
113 | d.setVarFlag('do_unpack_and_patch', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') | ||
114 | d.setVarFlag('do_ar_patched', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') | ||
115 | d.setVarFlag('create_diff_gz', 'vardepsexclude', 'PF') | ||
116 | d.setVarFlag('create_tarball', 'vardepsexclude', 'PF') | ||
117 | |||
118 | flag_clean = d.getVarFlag('do_unpack', 'stamp-base-clean', True) | ||
119 | if flag_clean: | ||
120 | d.setVarFlag('do_ar_original', 'stamp-base-clean', flag_clean) | ||
121 | d.setVarFlag('do_ar_patched', 'stamp-base-clean', flag_clean) | ||
122 | d.setVarFlag('do_unpack_and_patch', 'stamp-base-clean', flag_clean) | ||
123 | } | ||
124 | |||
125 | # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. | ||
126 | # Files in SRC_URI are copied directly, anything that's a directory | ||
127 | # (e.g. git repositories) is "unpacked" and then put into a tarball. | ||
128 | python do_ar_original() { | ||
129 | |||
130 | import shutil, tarfile, tempfile | ||
131 | |||
132 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": | ||
133 | return | ||
134 | |||
135 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | ||
136 | bb.note('Archiving the original source...') | ||
137 | fetch = bb.fetch2.Fetch([], d) | ||
138 | for url in fetch.urls: | ||
139 | local = fetch.localpath(url) | ||
140 | if os.path.isfile(local): | ||
141 | shutil.copy(local, ar_outdir) | ||
142 | elif os.path.isdir(local): | ||
143 | basename = os.path.basename(local) | ||
144 | |||
145 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) | ||
146 | fetch.unpack(tmpdir, (url,)) | ||
147 | |||
148 | os.chdir(tmpdir) | ||
149 | tarname = os.path.join(ar_outdir, basename + '.tar.gz') | ||
150 | tar = tarfile.open(tarname, 'w:gz') | ||
151 | tar.add('.') | ||
152 | tar.close() | ||
153 | |||
154 | # Emit patch series files for 'original' | ||
155 | bb.note('Writing patch series files...') | ||
156 | for patch in src_patches(d): | ||
157 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) | ||
158 | patchdir = parm.get('patchdir') | ||
159 | if patchdir: | ||
160 | series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) | ||
161 | else: | ||
162 | series = os.path.join(ar_outdir, 'series') | ||
163 | |||
164 | with open(series, 'a') as s: | ||
165 | s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) | ||
166 | } | ||
167 | |||
168 | python do_ar_patched() { | ||
169 | |||
170 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != 'patched': | ||
171 | return | ||
172 | |||
173 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR | ||
174 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | ||
175 | bb.note('Archiving the patched source...') | ||
176 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) | ||
177 | # The gcc staff uses shared source | ||
178 | flag = d.getVarFlag('do_unpack', 'stamp-base', True) | ||
179 | if flag: | ||
180 | create_tarball(d, d.getVar('S', True), 'patched', ar_outdir, 'gcc') | ||
181 | else: | ||
182 | create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) | ||
183 | } | ||
184 | |||
185 | python do_ar_configured() { | ||
186 | import shutil | ||
187 | |||
188 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | ||
189 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': | ||
190 | bb.note('Archiving the configured source...') | ||
191 | # The libtool-native's do_configure will remove the | ||
192 | # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the | ||
193 | # do_configure, we archive the already configured ${S} to | ||
194 | # instead of. | ||
195 | if d.getVar('PN', True) != 'libtool-native': | ||
196 | # Change the WORKDIR to make do_configure run in another dir. | ||
197 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) | ||
198 | if bb.data.inherits_class('kernel-yocto', d): | ||
199 | bb.build.exec_func('do_kernel_configme', d) | ||
200 | if bb.data.inherits_class('cmake', d): | ||
201 | bb.build.exec_func('do_generate_toolchain_file', d) | ||
202 | prefuncs = d.getVarFlag('do_configure', 'prefuncs', True) | ||
203 | for func in (prefuncs or '').split(): | ||
204 | if func != "sysroot_cleansstate": | ||
205 | bb.build.exec_func(func, d) | ||
206 | bb.build.exec_func('do_configure', d) | ||
207 | postfuncs = d.getVarFlag('do_configure', 'postfuncs', True) | ||
208 | for func in (postfuncs or '').split(): | ||
209 | if func != "do_qa_configure": | ||
210 | bb.build.exec_func(func, d) | ||
211 | srcdir = d.getVar('S', True) | ||
212 | builddir = d.getVar('B', True) | ||
213 | if srcdir != builddir: | ||
214 | if os.path.exists(builddir): | ||
215 | oe.path.copytree(builddir, os.path.join(srcdir, \ | ||
216 | 'build.%s.ar_configured' % d.getVar('PF', True))) | ||
217 | create_tarball(d, srcdir, 'configured', ar_outdir) | ||
218 | } | ||
219 | |||
220 | def create_tarball(d, srcdir, suffix, ar_outdir, pf=None): | ||
221 | """ | ||
222 | create the tarball from srcdir | ||
223 | """ | ||
224 | import tarfile | ||
225 | |||
226 | bb.utils.mkdirhier(ar_outdir) | ||
227 | if pf: | ||
228 | tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % (pf, suffix)) | ||
229 | else: | ||
230 | tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % \ | ||
231 | (d.getVar('PF', True), suffix)) | ||
232 | |||
233 | srcdir = srcdir.rstrip('/') | ||
234 | dirname = os.path.dirname(srcdir) | ||
235 | basename = os.path.basename(srcdir) | ||
236 | os.chdir(dirname) | ||
237 | bb.note('Creating %s' % tarname) | ||
238 | tar = tarfile.open(tarname, 'w:gz') | ||
239 | tar.add(basename) | ||
240 | tar.close() | ||
241 | |||
242 | # creating .diff.gz between source.orig and source | ||
243 | def create_diff_gz(d, src_orig, src, ar_outdir): | ||
244 | |||
245 | import subprocess | ||
246 | |||
247 | if not os.path.isdir(src) or not os.path.isdir(src_orig): | ||
248 | return | ||
249 | |||
250 | # The diff --exclude can't exclude the file with path, so we copy | ||
251 | # the patched source, and remove the files that we'd like to | ||
252 | # exclude. | ||
253 | src_patched = src + '.patched' | ||
254 | oe.path.copyhardlinktree(src, src_patched) | ||
255 | for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude', True).split(): | ||
256 | bb.utils.remove(os.path.join(src_orig, i), recurse=True) | ||
257 | bb.utils.remove(os.path.join(src_patched, i), recurse=True) | ||
258 | |||
259 | dirname = os.path.dirname(src) | ||
260 | basename = os.path.basename(src) | ||
261 | os.chdir(dirname) | ||
262 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) | ||
263 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) | ||
264 | subprocess.call(diff_cmd, shell=True) | ||
265 | bb.utils.remove(src_patched, recurse=True) | ||
266 | |||
267 | # Run do_unpack and do_patch | ||
268 | python do_unpack_and_patch() { | ||
269 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) not in \ | ||
270 | [ 'patched', 'configured'] and \ | ||
271 | d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': | ||
272 | return | ||
273 | |||
274 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) | ||
275 | |||
276 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
277 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) | ||
278 | |||
279 | # The kernel source is ready after do_validate_branches | ||
280 | if bb.data.inherits_class('kernel-yocto', d): | ||
281 | bb.build.exec_func('do_unpack', d) | ||
282 | bb.build.exec_func('do_kernel_checkout', d) | ||
283 | bb.build.exec_func('do_validate_branches', d) | ||
284 | else: | ||
285 | bb.build.exec_func('do_unpack', d) | ||
286 | |||
287 | # Save the original source for creating the patches | ||
288 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': | ||
289 | src = d.getVar('S', True).rstrip('/') | ||
290 | src_orig = '%s.orig' % src | ||
291 | oe.path.copytree(src, src_orig) | ||
292 | bb.build.exec_func('do_patch', d) | ||
293 | # Create the patches | ||
294 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': | ||
295 | bb.note('Creating diff gz...') | ||
296 | create_diff_gz(d, src_orig, src, ar_outdir) | ||
297 | bb.utils.remove(src_orig, recurse=True) | ||
298 | } | ||
299 | |||
300 | python do_ar_recipe () { | ||
301 | """ | ||
302 | archive the recipe, including .bb and .inc. | ||
303 | """ | ||
304 | import re | ||
305 | import shutil | ||
306 | |||
307 | require_re = re.compile( r"require\s+(.+)" ) | ||
308 | include_re = re.compile( r"include\s+(.+)" ) | ||
309 | bbfile = d.getVar('FILE', True) | ||
310 | outdir = os.path.join(d.getVar('WORKDIR', True), \ | ||
311 | '%s-recipe' % d.getVar('PF', True)) | ||
312 | bb.utils.mkdirhier(outdir) | ||
313 | shutil.copy(bbfile, outdir) | ||
314 | |||
315 | dirname = os.path.dirname(bbfile) | ||
316 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) | ||
317 | f = open(bbfile, 'r') | ||
318 | for line in f.readlines(): | ||
319 | incfile = None | ||
320 | if require_re.match(line): | ||
321 | incfile = require_re.match(line).group(1) | ||
322 | elif include_re.match(line): | ||
323 | incfile = include_re.match(line).group(1) | ||
324 | if incfile: | ||
325 | incfile = bb.data.expand(incfile, d) | ||
326 | incfile = bb.utils.which(bbpath, incfile) | ||
327 | if incfile: | ||
328 | shutil.copy(incfile, outdir) | ||
329 | |||
330 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) | ||
331 | bb.utils.remove(outdir, recurse=True) | ||
332 | } | ||
333 | |||
334 | python do_dumpdata () { | ||
335 | """ | ||
336 | dump environment data to ${PF}-showdata.dump | ||
337 | """ | ||
338 | |||
339 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ | ||
340 | '%s-showdata.dump' % d.getVar('PF', True)) | ||
341 | bb.note('Dumping metadata into %s' % dumpfile) | ||
342 | f = open(dumpfile, 'w') | ||
343 | # emit variables and shell functions | ||
344 | bb.data.emit_env(f, d, True) | ||
345 | # emit the metadata which isn't valid shell | ||
346 | for e in d.keys(): | ||
347 | if bb.data.getVarFlag(e, 'python', d): | ||
348 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, True))) | ||
349 | f.close() | ||
350 | } | ||
351 | |||
352 | SSTATETASKS += "do_deploy_archives" | ||
353 | do_deploy_archives () { | ||
354 | echo "Deploying source archive files ..." | ||
355 | } | ||
356 | python do_deploy_archives_setscene () { | ||
357 | sstate_setscene(d) | ||
358 | } | ||
359 | do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" | ||
360 | do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" | ||
361 | |||
362 | addtask do_ar_original after do_unpack | ||
363 | addtask do_unpack_and_patch after do_patch | ||
364 | addtask do_ar_patched after do_unpack_and_patch | ||
365 | addtask do_ar_configured after do_unpack_and_patch | ||
366 | addtask do_dumpdata | ||
367 | addtask do_ar_recipe | ||
368 | addtask do_deploy_archives before do_build | ||