diff options
author | Xiaofeng Yan <xiaofeng.yan@windriver.com> | 2012-03-26 18:49:26 +0800 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-03-26 23:13:20 +0100 |
commit | 6dd4ddf7eacc0b71f046a3086d949830918bc4c3 (patch) | |
tree | 72a46a254be5b67fc0157e266ac0137976c8fe13 /meta/classes/archiver.bbclass | |
parent | 6a99e0f8fbb5c7a411da2e4b724c43f28e6fa0c1 (diff) | |
download | poky-6dd4ddf7eacc0b71f046a3086d949830918bc4c3.tar.gz |
archiver.bbclass:
1 Archive sources in ${S} in the different stage
(do_unpack,do_patch,do_configure).
2 Archive patches including series
3 Archive logs including scripts (.bb and .inc files)
4 dump environment resources which show all variable and functions
used to xxx.showdata.dump when running a task
5 dump all content in 's' including patches to file xxx.diff.gz
All archiving packages will be deployed to ${DEPLOY_DIR}/sources/
[YOCTO #1977]
(From OE-Core rev: 2fdc271887db8c0ef0641472d00e850e8b3caa19)
Signed-off-by: Xiaofeng Yan <xiaofeng.yan@windriver.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/archiver.bbclass')
-rw-r--r-- | meta/classes/archiver.bbclass | 460 |
1 files changed, 460 insertions, 0 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass new file mode 100644 index 0000000000..471430e24f --- /dev/null +++ b/meta/classes/archiver.bbclass | |||
@@ -0,0 +1,460 @@ | |||
1 | # This file is used for archiving sources ,patches,and logs to tarball. | ||
2 | # It also output building environment to xxx.dump.data and create xxx.diff.gz to record | ||
3 | # all content in ${S} to a diff file. | ||
4 | |||
5 | EXCLUDE_FROM ?= ".pc autom4te.cache" | ||
6 | ARCHIVE_TYPE ?= "TAR SRPM" | ||
7 | DISTRO ?= "poky" | ||
8 | PATCHES_ARCHIVE_WITH_SERIES = 'TRUE' | ||
9 | |||
10 | def parse_var(d,var): | ||
11 | ''' parse variable like ${PV} in "require xxx_${PV}.inc" to a real value. for example, change "require xxx_${PV}.inc" to "require xxx_1.2.inc" ''' | ||
12 | import re | ||
13 | pat = re.compile('.*\$({(.*)}).*') | ||
14 | if '$' not in var and '/' not in var: | ||
15 | return var | ||
16 | else: | ||
17 | if '/' in var: | ||
18 | return [i for i in var.split('/') if i.endswith('.inc')][0] | ||
19 | elif '$' in var: | ||
20 | m = pat.match(var) | ||
21 | patstr = '\$' + m.group(1) | ||
22 | var_str = m.group(2) | ||
23 | return re.sub(patstr,d.getVar(var_str,True),var) | ||
24 | else: | ||
25 | return var | ||
26 | |||
27 | def get_bb_inc(d): | ||
28 | '''create a directory "script-logs" including .bb and .inc file in ${WORKDIR}''' | ||
29 | import re | ||
30 | import os | ||
31 | import shutil | ||
32 | |||
33 | bbinc = [] | ||
34 | pat=re.compile('require\s*([^\s]*\.*)(.*)') | ||
35 | file_dir = d.getVar('FILE', True) | ||
36 | bbdir = os.path.dirname(file_dir) | ||
37 | work_dir = d.getVar('WORKDIR', True) | ||
38 | os.chdir(work_dir) | ||
39 | bb.mkdirhier("script-logs") | ||
40 | os.chdir(bbdir) | ||
41 | bbfile = os.path.basename(file_dir) | ||
42 | bbinc.append(bbfile) | ||
43 | |||
44 | def get_inc (file): | ||
45 | f = open(file,'r') | ||
46 | for line in f.readlines(): | ||
47 | if 'require' not in line: | ||
48 | bbinc.append(file) | ||
49 | else: | ||
50 | try: | ||
51 | incfile = pat.match(line).group(1) | ||
52 | incfile = parse_var(d,incfile) | ||
53 | bbinc.append(incfile) | ||
54 | get_inc(incfile) | ||
55 | except (IOError,AttributeError): | ||
56 | pass | ||
57 | get_inc(bbfile) | ||
58 | os.chdir(work_dir) | ||
59 | for root, dirs, files in os.walk(bbdir): | ||
60 | for file in bbinc: | ||
61 | if file in files: | ||
62 | shutil.copy(root + '/' + file,'script-logs') | ||
63 | oe.path.copytree('temp', 'script-logs') | ||
64 | return work_dir + '/script-logs' | ||
65 | |||
66 | def get_all_patches(d): | ||
67 | '''copy patches and series file to a pointed directory which will be archived to tarball in ${WORKDIR}''' | ||
68 | import shutil | ||
69 | |||
70 | src_patches=[] | ||
71 | pf = d.getVar('PF', True) | ||
72 | work_dir = d.getVar('WORKDIR', True) | ||
73 | s = d.getVar('S',True) | ||
74 | dest = os.path.join(work_dir, pf + '-patches') | ||
75 | shutil.rmtree(dest, ignore_errors=True) | ||
76 | bb.mkdirhier(dest) | ||
77 | |||
78 | src_uri = d.getVar('SRC_URI', 1).split() | ||
79 | fetch = bb.fetch2.Fetch(src_uri, d) | ||
80 | locals = (fetch.localpath(url) for url in fetch.urls) | ||
81 | for local in locals: | ||
82 | src_patches.append(local) | ||
83 | if not cmp(work_dir,s): | ||
84 | tmp_list = src_patches | ||
85 | else: | ||
86 | tmp_list = src_patches[1:] | ||
87 | |||
88 | for patch in tmp_list: | ||
89 | try: | ||
90 | shutil.copy(patch,dest) | ||
91 | except IOError: | ||
92 | if os.path.isdir(patch): | ||
93 | oe.path.copytree(patch,dest) | ||
94 | return dest | ||
95 | |||
96 | def get_applying_patches(d): | ||
97 | """only copy applying patches to a pointed directory which will be archived to tarball""" | ||
98 | import os | ||
99 | import shutil | ||
100 | |||
101 | |||
102 | pf = d.getVar('PF', True) | ||
103 | work_dir = d.getVar('WORKDIR', True) | ||
104 | dest = os.path.join(work_dir, pf + '-patches') | ||
105 | shutil.rmtree(dest, ignore_errors=True) | ||
106 | bb.mkdirhier(dest) | ||
107 | |||
108 | |||
109 | patches = src_patches(d) | ||
110 | for patch in patches: | ||
111 | _, _, local, _, _, parm = bb.decodeurl(patch) | ||
112 | if local: | ||
113 | shutil.copy(local,dest) | ||
114 | return dest | ||
115 | |||
116 | def not_tarball(d): | ||
117 | '''packages including key words 'work-shared','native', 'task-' will be passed''' | ||
118 | import os | ||
119 | |||
120 | workdir = d.getVar('WORKDIR',True) | ||
121 | s = d.getVar('S',True) | ||
122 | if 'work-shared' in s or 'task-' in workdir or 'native' in workdir: | ||
123 | return True | ||
124 | else: | ||
125 | return False | ||
126 | |||
127 | def get_source_from_downloads(d,middle_name): | ||
128 | '''copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR''' | ||
129 | if middle_name in 'patched' 'configured': | ||
130 | return | ||
131 | pf = d.getVar('PF', True) | ||
132 | dl_dir = d.getVar('DL_DIR',True) | ||
133 | try: | ||
134 | source = os.path.basename(d.getVar('SRC_URI', 1).split()[0]) | ||
135 | os.chdir(dl_dir) | ||
136 | if os.path.exists(source) and not os.path.isdir(source): | ||
137 | return source | ||
138 | except (IndexError, OSError): | ||
139 | pass | ||
140 | |||
141 | def archive_sources_from_directory(d,middle_name): | ||
142 | '''archive sources codes tree to tarball when tarball of $P doesn't exist in $DL_DIR''' | ||
143 | import tarfile | ||
144 | import shutil | ||
145 | |||
146 | s = d.getVar('S',True) | ||
147 | workdir=d.getVar('WORKDIR', True) | ||
148 | PF = d.getVar('PF',True) | ||
149 | tarname = PF + '-' + middle_name + ".tar.gz" | ||
150 | |||
151 | if os.path.exists(s) and s is not workdir: | ||
152 | sourcedir = os.path.basename(s) | ||
153 | tarbase = os.path.dirname(s) | ||
154 | if not sourcedir or os.path.dirname(tarbase) == workdir: | ||
155 | sourcedir = os.path.basename(os.path.dirname(s)) | ||
156 | tarbase = os.path.dirname(os.path.dirname(s)) | ||
157 | os.chdir(tarbase) | ||
158 | else: | ||
159 | sourcedir = os.path.basename(s) | ||
160 | if not os.path.exists(sourcedir): | ||
161 | os.mkdir(sourcedir) | ||
162 | try: | ||
163 | for file in os.listdir(s): | ||
164 | if file is not 'temp' and file is not sourcedir: | ||
165 | shutil.copy(file,sourcedir) | ||
166 | except (IOError,OSError): | ||
167 | pass | ||
168 | |||
169 | if (len(os.listdir(sourcedir))) != 0: | ||
170 | tar = tarfile.open( tarname, "w:gz") | ||
171 | tar.add(sourcedir) | ||
172 | tar.close() | ||
173 | if cmp(workdir,os.path.dirname(s)) and not os.path.exists(workdir + '/' + tarname): | ||
174 | shutil.move(os.path.dirname(s) + '/' + tarname,workdir) | ||
175 | else: | ||
176 | return | ||
177 | return tarname | ||
178 | |||
179 | def archive_sources(d,middle_name): | ||
180 | '''copy tarball from $DL_DIR to $WORKDIR if have tarball, archive source codes tree in $WORKDIR if $P is directory instead of tarball''' | ||
181 | import shutil | ||
182 | work_dir = d.getVar('WORKDIR',True) | ||
183 | file = get_source_from_downloads(d,middle_name) | ||
184 | if file: | ||
185 | shutil.copy(file,work_dir) | ||
186 | else: | ||
187 | file = archive_sources_from_directory(d,middle_name) | ||
188 | return file | ||
189 | |||
190 | |||
191 | def archive_patches(d,patchdir,series): | ||
192 | '''archive patches to tarball and also include series files if 'series' is True''' | ||
193 | import tarfile | ||
194 | import shutil | ||
195 | |||
196 | s = d.getVar('S',True) | ||
197 | work_dir = d.getVar('WORKDIR', True) | ||
198 | os.chdir(work_dir) | ||
199 | patch_dir = os.path.basename(patchdir) | ||
200 | tarname = patch_dir + ".tar.gz" | ||
201 | if series == 'all' and os.path.exists(s + '/patches/series'): | ||
202 | shutil.copy(s + '/patches/series',patch_dir) | ||
203 | tar = tarfile.open(tarname, "w:gz") | ||
204 | tar.add(patch_dir) | ||
205 | tar.close() | ||
206 | shutil.rmtree(patch_dir, ignore_errors=True) | ||
207 | return tarname | ||
208 | |||
209 | def select_archive_patches(d,option): | ||
210 | '''select to archive all patches including non-applying and series or applying patches ''' | ||
211 | if option == "all": | ||
212 | patchdir = get_all_patches(d) | ||
213 | elif option == "applying": | ||
214 | patchdir = get_applying_patches(d) | ||
215 | try: | ||
216 | os.rmdir(patchdir) | ||
217 | except OSError: | ||
218 | tarpatch = archive_patches(d,patchdir,option) | ||
219 | return tarpatch | ||
220 | return | ||
221 | |||
222 | def archive_logs(d,logdir,bbinc=False): | ||
223 | '''archive logs in temp to tarball and .bb and .inc files if bbinc is True ''' | ||
224 | import tarfile | ||
225 | import shutil | ||
226 | |||
227 | log_dir = os.path.basename(logdir) | ||
228 | pf = d.getVar('PF',True) | ||
229 | tarname = pf + '-' + log_dir + ".tar.gz" | ||
230 | tar = tarfile.open(tarname, "w:gz") | ||
231 | tar.add(log_dir) | ||
232 | tar.close() | ||
233 | if bbinc: | ||
234 | shutil.rmtree(log_dir, ignore_errors=True) | ||
235 | return tarname | ||
236 | |||
237 | def get_licenses(d): | ||
238 | '''get licenses for running .bb file''' | ||
239 | licenses = d.getVar('LICENSE', 1).replace('&', '|') | ||
240 | licenses = licenses.replace('(', '').replace(')', '') | ||
241 | clean_licenses = "" | ||
242 | for x in licenses.split(): | ||
243 | if x.strip() == '' or x == 'CLOSED': | ||
244 | continue | ||
245 | if x != "|": | ||
246 | clean_licenses += x | ||
247 | if '|' in clean_licenses: | ||
248 | clean_licenses = clean_licenses.replace('|','') | ||
249 | return clean_licenses | ||
250 | |||
251 | |||
252 | def move_tarball_deploy(d,tarball_list): | ||
253 | '''move tarball in location to ${DEPLOY_DIR}/sources''' | ||
254 | import shutil | ||
255 | |||
256 | if tarball_list is []: | ||
257 | return | ||
258 | target_sys = d.getVar('TARGET_SYS', True) | ||
259 | pf = d.getVar('PF', True) | ||
260 | licenses = get_licenses(d) | ||
261 | tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | ||
262 | if not os.path.exists(tar_sources): | ||
263 | bb.mkdirhier(tar_sources) | ||
264 | for source in tarball_list: | ||
265 | if source: | ||
266 | if os.path.exists(tar_sources + '/' + source): | ||
267 | os.remove(tar_sources + '/' + source) | ||
268 | shutil.move(source,tar_sources) | ||
269 | |||
270 | def verify_var(d): | ||
271 | '''check the type for archiving package('tar' or 'srpm')''' | ||
272 | try: | ||
273 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in d.getVar('ARCHIVE_TYPE', True).split(): | ||
274 | raise AttributeError | ||
275 | except AttributeError: | ||
276 | bb.fatal("\"SOURCE_ARCHIVE_PACKAGE_TYPE\" is \'tar\' or \'srpm\', no other types") | ||
277 | |||
278 | def store_package(d,package_name): | ||
279 | '''store tarbablls name to file "tar-package"''' | ||
280 | try: | ||
281 | f = open(d.getVar('WORKDIR',True )+ '/tar-package','a') | ||
282 | f.write(package_name + ' ') | ||
283 | f.close() | ||
284 | except IOError: | ||
285 | pass | ||
286 | |||
287 | def get_package(d): | ||
288 | '''get tarballs name from "tar-package"''' | ||
289 | try: | ||
290 | os.chdir(d.getVar('WORKDIR', True)) | ||
291 | f = open('tar-package','r') | ||
292 | line = list(set(f.readline().replace('\n','').split())) | ||
293 | f.close() | ||
294 | return line | ||
295 | except IOError: | ||
296 | pass | ||
297 | |||
298 | |||
299 | def archive_sources_patches(d,middle_name): | ||
300 | '''archive sources and patches to tarball. middle_name will append strings ${middle_name} to ${PR} as middle name. for example, zlib-1.4.6-prepatch(middle_name).tar.gz ''' | ||
301 | import shutil | ||
302 | verify_var(d) | ||
303 | if not_tarball(d): | ||
304 | return | ||
305 | |||
306 | source_tar_name = archive_sources(d,middle_name) | ||
307 | if middle_name == "prepatch": | ||
308 | if d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'TRUE': | ||
309 | patch_tar_name = select_archive_patches(d,"all") | ||
310 | elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'FALSE': | ||
311 | patch_tar_name = select_archive_patches(d,"applying") | ||
312 | else: | ||
313 | bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' is strings 'True' or 'False' ") | ||
314 | else: | ||
315 | patch_tar_name = '' | ||
316 | |||
317 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | ||
318 | move_tarball_deploy(d,[source_tar_name,patch_tar_name]) | ||
319 | else: | ||
320 | tarpackage = d.getVar('WORKDIR', True) + '/tar-package' | ||
321 | if os.path.exists(tarpackage): | ||
322 | os.remove(tarpackage) | ||
323 | for package in source_tar_name, patch_tar_name: | ||
324 | if package: | ||
325 | store_package(d,str(package) + ' ') | ||
326 | |||
327 | def archive_scripts_logs(d): | ||
328 | '''archive scripts and logs. scripts include .bb and .inc files and logs include stuff in "temp".''' | ||
329 | |||
330 | work_dir = d.getVar('WORKDIR', True) | ||
331 | os.chdir(work_dir) | ||
332 | source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True) | ||
333 | if source_archive_log_with_scripts == 'logs_with_scripts': | ||
334 | logdir = get_bb_inc(d) | ||
335 | tarlog = archive_logs(d,logdir,True) | ||
336 | elif source_archive_log_with_scripts == 'logs': | ||
337 | if os.path.exists('temp'): | ||
338 | tarlog = archive_logs(d,'temp',False) | ||
339 | else: | ||
340 | return | ||
341 | |||
342 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | ||
343 | move_tarball_deploy(d,[tarlog]) | ||
344 | |||
345 | else: | ||
346 | store_package(d,tarlog) | ||
347 | |||
348 | def dumpdata(d): | ||
349 | '''dump environment to "${P}-${PR}.showdata.dump" including all kinds of variables and functions when running a task''' | ||
350 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
351 | distro = bb.data.getVar('DISTRO', d, 1) | ||
352 | s = d.getVar('S', True) | ||
353 | pf = d.getVar('PF', True) | ||
354 | target_sys = d.getVar('TARGET_SYS', True) | ||
355 | licenses = get_licenses(d) | ||
356 | dumpdir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | ||
357 | if not os.path.exists(dumpdir): | ||
358 | bb.mkdirhier(dumpdir) | ||
359 | |||
360 | dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump",d)) | ||
361 | |||
362 | bb.note("Dumping metadata into '%s'" % dumpfile) | ||
363 | f = open(dumpfile, "w") | ||
364 | # emit variables and shell functions | ||
365 | bb.data.emit_env(f, d, True) | ||
366 | # emit the metadata which isnt valid shell | ||
367 | for e in d.keys(): | ||
368 | if bb.data.getVarFlag(e, 'python', d): | ||
369 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | ||
370 | f.close() | ||
371 | |||
372 | def create_diff_gz(d): | ||
373 | '''creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.g gz for mapping all content in 's' including patches to xxx.diff.gz''' | ||
374 | import shutil | ||
375 | |||
376 | work_dir = d.getVar('WORKDIR', True) | ||
377 | exclude_from = d.getVar('EXCLUDE_FROM', True).split() | ||
378 | pf = d.getVar('PF', True) | ||
379 | licenses = get_licenses(d) | ||
380 | target_sys = d.getVar('TARGET_SYS', True) | ||
381 | diff_dir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | ||
382 | diff_file = os.path.join(diff_dir, bb.data.expand("${P}-${PR}.diff.gz",d)) | ||
383 | os.chdir(work_dir) | ||
384 | f = open('temp/exclude-from-file', 'a') | ||
385 | for i in exclude_from: | ||
386 | f.write(i) | ||
387 | f.write("\n") | ||
388 | f.close() | ||
389 | |||
390 | s=d.getVar('S', True) | ||
391 | distro = d.getVar('DISTRO',True) | ||
392 | dest = s + '/' + distro + '/files' | ||
393 | if not os.path.exists(dest): | ||
394 | bb.mkdirhier(dest) | ||
395 | for i in os.listdir(os.getcwd()): | ||
396 | if os.path.isfile(i): | ||
397 | shutil.copy(i, dest) | ||
398 | |||
399 | bb.note("Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz") | ||
400 | cmd = "LC_ALL=C TZ=UTC0 diff --exclude-from=" + work_dir + "/temp/exclude-from-file -Naur " + s + '.org' + ' ' + s + " | gzip -c > " + diff_file | ||
401 | d.setVar('DIFF', cmd + "\n") | ||
402 | d.setVarFlag('DIFF', 'func', '1') | ||
403 | bb.build.exec_func('DIFF', d) | ||
404 | shutil.rmtree(s + '.org', ignore_errors=True) | ||
405 | |||
406 | # This function will run when user want to get tarball for sources and patches after do_unpack | ||
407 | python do_archive_original_sources_patches(){ | ||
408 | archive_sources_patches(d,'prepatch') | ||
409 | } | ||
410 | |||
411 | # This function will run when user want to get tarball for patched sources after do_patch | ||
412 | python do_archive_patched_sources(){ | ||
413 | archive_sources_patches(d,'patched') | ||
414 | } | ||
415 | |||
416 | # This function will run when user want to get tarball for configured sources after do_configure | ||
417 | python do_archive_configured_sources(){ | ||
418 | archive_sources_patches(d,'configured') | ||
419 | } | ||
420 | |||
421 | # This function will run when user want to get tarball for logs or both logs and scripts(.bb and .inc files) | ||
422 | python do_archive_scripts_logs(){ | ||
423 | archive_scripts_logs(d) | ||
424 | } | ||
425 | |||
426 | # This function will run when user want to know what variable and functions in a running task are and also can get a diff file including | ||
427 | # all content a package should include. | ||
428 | python do_dumpdata_create_diff_gz(){ | ||
429 | dumpdata(d) | ||
430 | create_diff_gz(d) | ||
431 | } | ||
432 | |||
433 | # This functions prepare for archiving "linux-yocto" because this package create directory 's' before do_patch instead of after do_unpack. | ||
434 | # This is special control for archiving linux-yocto only. | ||
435 | python do_archive_linux_yocto(){ | ||
436 | s = d.getVar('S', True) | ||
437 | if 'linux-yocto' in s: | ||
438 | source_tar_name = archive_sources(d,'') | ||
439 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | ||
440 | move_tarball_deploy(d,[source_tar_name,'']) | ||
441 | } | ||
442 | do_kernel_checkout[postfuncs] += "do_archive_linux_yocto " | ||
443 | |||
444 | # remove tarball for sources, patches and logs after creating srpm. | ||
445 | python do_remove_tarball(){ | ||
446 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': | ||
447 | work_dir = d.getVar('WORKDIR', True) | ||
448 | os.chdir(work_dir) | ||
449 | try: | ||
450 | for file in os.listdir(os.getcwd()): | ||
451 | if file in get_package(d): | ||
452 | os.remove(file) | ||
453 | os.remove('tar-package') | ||
454 | except (TypeError,OSError): | ||
455 | pass | ||
456 | } | ||
457 | do_remove_taball[deptask] = "do_archive_scripts_logs" | ||
458 | do_package_write_rpm[postfuncs] += "do_remove_tarball " | ||
459 | export get_licenses | ||
460 | export get_package | ||