summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorXiaofeng Yan <xiaofeng.yan@windriver.com>2012-04-01 16:25:50 +0800
committerRichard Purdie <richard.purdie@linuxfoundation.org>2012-04-04 17:17:52 +0100
commit0aaddbc32b7d125ea754aecfd503ce96fe6e1d0b (patch)
tree13d60fbcf4263266dad47de31c2e55cd43fe1bb3
parent6c73e458da03e79a6b029da758dce60bcad17b33 (diff)
downloadpoky-0aaddbc32b7d125ea754aecfd503ce96fe6e1d0b.tar.gz
archiver.bbclass: enhance code readability
The modification is as follow: - Modify some codes with more preferable readability and vague description. - Use existed functions instead of custom functions. (From OE-Core rev: 514319c4a15156cd63a4ac3c6ee903f64c98884e) Signed-off-by: Xiaofeng Yan <xiaofeng.yan@windriver.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r--meta/classes/archiver.bbclass227
1 files changed, 108 insertions, 119 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index 471430e24f..4e4e964a24 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -2,28 +2,11 @@
2# It also output building environment to xxx.dump.data and create xxx.diff.gz to record 2# It also output building environment to xxx.dump.data and create xxx.diff.gz to record
3# all content in ${S} to a diff file. 3# all content in ${S} to a diff file.
4 4
5EXCLUDE_FROM ?= ".pc autom4te.cache" 5ARCHIVE_EXCLUDE_FROM ?= ".pc autom4te.cache"
6ARCHIVE_TYPE ?= "TAR SRPM" 6ARCHIVE_TYPE ?= "TAR SRPM"
7DISTRO ?= "poky" 7DISTRO ?= "poky"
8PATCHES_ARCHIVE_WITH_SERIES = 'TRUE' 8PATCHES_ARCHIVE_WITH_SERIES = 'TRUE'
9 9
10def parse_var(d,var):
11 ''' parse variable like ${PV} in "require xxx_${PV}.inc" to a real value. for example, change "require xxx_${PV}.inc" to "require xxx_1.2.inc" '''
12 import re
13 pat = re.compile('.*\$({(.*)}).*')
14 if '$' not in var and '/' not in var:
15 return var
16 else:
17 if '/' in var:
18 return [i for i in var.split('/') if i.endswith('.inc')][0]
19 elif '$' in var:
20 m = pat.match(var)
21 patstr = '\$' + m.group(1)
22 var_str = m.group(2)
23 return re.sub(patstr,d.getVar(var_str,True),var)
24 else:
25 return var
26
27def get_bb_inc(d): 10def get_bb_inc(d):
28 '''create a directory "script-logs" including .bb and .inc file in ${WORKDIR}''' 11 '''create a directory "script-logs" including .bb and .inc file in ${WORKDIR}'''
29 import re 12 import re
@@ -32,14 +15,18 @@ def get_bb_inc(d):
32 15
33 bbinc = [] 16 bbinc = []
34 pat=re.compile('require\s*([^\s]*\.*)(.*)') 17 pat=re.compile('require\s*([^\s]*\.*)(.*)')
35 file_dir = d.getVar('FILE', True)
36 bbdir = os.path.dirname(file_dir)
37 work_dir = d.getVar('WORKDIR', True) 18 work_dir = d.getVar('WORKDIR', True)
38 os.chdir(work_dir) 19 bbfile = d.getVar('FILE', True)
39 bb.mkdirhier("script-logs") 20 bbdir = os.path.dirname(bbfile)
40 os.chdir(bbdir) 21 script_logs = os.path.join(work_dir,'script-logs')
41 bbfile = os.path.basename(file_dir) 22 bb_inc = os.path.join(script_logs,'bb_inc')
42 bbinc.append(bbfile) 23 bb.mkdirhier(script_logs)
24 bb.mkdirhier(bb_inc)
25
26 def find_file(dir,file):
27 for root, dirs, files in os.walk(dir):
28 if file in files:
29 return os.path.join(root,file)
43 30
44 def get_inc (file): 31 def get_inc (file):
45 f = open(file,'r') 32 f = open(file,'r')
@@ -49,21 +36,26 @@ def get_bb_inc(d):
49 else: 36 else:
50 try: 37 try:
51 incfile = pat.match(line).group(1) 38 incfile = pat.match(line).group(1)
52 incfile = parse_var(d,incfile) 39 incfile = bb.data.expand(os.path.basename(incfile),d)
53 bbinc.append(incfile) 40 abs_incfile = find_file(bbdir,incfile)
54 get_inc(incfile) 41 if abs_incfile:
55 except (IOError,AttributeError): 42 bbinc.append(abs_incfile)
43 get_inc(abs_incfile)
44 except AttributeError:
56 pass 45 pass
57 get_inc(bbfile) 46 get_inc(bbfile)
58 os.chdir(work_dir) 47 bbinc = list(set(bbinc))
59 for root, dirs, files in os.walk(bbdir): 48 for bbincfile in bbinc:
60 for file in bbinc: 49 shutil.copy(bbincfile,bb_inc)
61 if file in files: 50
62 shutil.copy(root + '/' + file,'script-logs') 51 try:
63 oe.path.copytree('temp', 'script-logs') 52 bb.mkdirhier(os.path.join(script_logs,'temp'))
64 return work_dir + '/script-logs' 53 oe.path.copytree(os.path.join(work_dir,'temp'), os.path.join(script_logs,'temp'))
65 54 except (IOError,AttributeError):
66def get_all_patches(d): 55 pass
56 return script_logs
57
58def get_series(d):
67 '''copy patches and series file to a pointed directory which will be archived to tarball in ${WORKDIR}''' 59 '''copy patches and series file to a pointed directory which will be archived to tarball in ${WORKDIR}'''
68 import shutil 60 import shutil
69 61
@@ -71,11 +63,11 @@ def get_all_patches(d):
71 pf = d.getVar('PF', True) 63 pf = d.getVar('PF', True)
72 work_dir = d.getVar('WORKDIR', True) 64 work_dir = d.getVar('WORKDIR', True)
73 s = d.getVar('S',True) 65 s = d.getVar('S',True)
74 dest = os.path.join(work_dir, pf + '-patches') 66 dest = os.path.join(work_dir, pf + '-series')
75 shutil.rmtree(dest, ignore_errors=True) 67 shutil.rmtree(dest, ignore_errors=True)
76 bb.mkdirhier(dest) 68 bb.mkdirhier(dest)
77 69
78 src_uri = d.getVar('SRC_URI', 1).split() 70 src_uri = d.getVar('SRC_URI', True).split()
79 fetch = bb.fetch2.Fetch(src_uri, d) 71 fetch = bb.fetch2.Fetch(src_uri, d)
80 locals = (fetch.localpath(url) for url in fetch.urls) 72 locals = (fetch.localpath(url) for url in fetch.urls)
81 for local in locals: 73 for local in locals:
@@ -90,7 +82,8 @@ def get_all_patches(d):
90 shutil.copy(patch,dest) 82 shutil.copy(patch,dest)
91 except IOError: 83 except IOError:
92 if os.path.isdir(patch): 84 if os.path.isdir(patch):
93 oe.path.copytree(patch,dest) 85 bb.mkdirhier(os.path.join(dest,patch))
86 oe.path.copytree(patch, os.path.join(dest,patch))
94 return dest 87 return dest
95 88
96def get_applying_patches(d): 89def get_applying_patches(d):
@@ -124,92 +117,85 @@ def not_tarball(d):
124 else: 117 else:
125 return False 118 return False
126 119
127def get_source_from_downloads(d,middle_name): 120def get_source_from_downloads(d,stage_name):
128 '''copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR''' 121 '''copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR'''
129 if middle_name in 'patched' 'configured': 122 if stage_name in 'patched' 'configured':
130 return 123 return
131 pf = d.getVar('PF', True) 124 pf = d.getVar('PF', True)
132 dl_dir = d.getVar('DL_DIR',True) 125 dl_dir = d.getVar('DL_DIR',True)
133 try: 126 try:
134 source = os.path.basename(d.getVar('SRC_URI', 1).split()[0]) 127 source = os.path.join(dl_dir,os.path.basename(d.getVar('SRC_URI', True).split()[0]))
135 os.chdir(dl_dir)
136 if os.path.exists(source) and not os.path.isdir(source): 128 if os.path.exists(source) and not os.path.isdir(source):
137 return source 129 return source
138 except (IndexError, OSError): 130 except (IndexError, OSError):
139 pass 131 pass
132 return ''
133
134def do_tarball(workdir,srcdir,tarname):
135 '''tar "srcdir" under "workdir" to "tarname"'''
136 import tarfile
140 137
141def archive_sources_from_directory(d,middle_name): 138 sav_dir = os.getcwd()
139 os.chdir(workdir)
140 if (len(os.listdir(srcdir))) != 0:
141 tar = tarfile.open(tarname, "w:gz")
142 tar.add(srcdir)
143 tar.close()
144 else:
145 tarname = ''
146 os.chdir(sav_dir)
147 return tarname
148
149def archive_sources_from_directory(d,stage_name):
142 '''archive sources codes tree to tarball when tarball of $P doesn't exist in $DL_DIR''' 150 '''archive sources codes tree to tarball when tarball of $P doesn't exist in $DL_DIR'''
143 import tarfile
144 import shutil 151 import shutil
145 152
146 s = d.getVar('S',True) 153 s = d.getVar('S',True)
147 workdir=d.getVar('WORKDIR', True) 154 work_dir=d.getVar('WORKDIR', True)
148 PF = d.getVar('PF',True) 155 PF = d.getVar('PF',True)
149 tarname = PF + '-' + middle_name + ".tar.gz" 156 tarname = PF + '-' + stage_name + ".tar.gz"
150 157
151 if os.path.exists(s) and s is not workdir: 158 if os.path.exists(s) and work_dir in s:
152 sourcedir = os.path.basename(s)
153 tarbase = os.path.dirname(s)
154 if not sourcedir or os.path.dirname(tarbase) == workdir:
155 sourcedir = os.path.basename(os.path.dirname(s))
156 tarbase = os.path.dirname(os.path.dirname(s))
157 os.chdir(tarbase)
158 else:
159 sourcedir = os.path.basename(s)
160 if not os.path.exists(sourcedir):
161 os.mkdir(sourcedir)
162 try: 159 try:
163 for file in os.listdir(s): 160 source_dir = os.path.join(work_dir,[ i for i in s.replace(work_dir,'').split('/') if i][0])
164 if file is not 'temp' and file is not sourcedir: 161 except IndexError:
165 shutil.copy(file,sourcedir) 162 if not cmp(s,work_dir):
166 except (IOError,OSError): 163 return ''
167 pass
168
169 if (len(os.listdir(sourcedir))) != 0:
170 tar = tarfile.open( tarname, "w:gz")
171 tar.add(sourcedir)
172 tar.close()
173 if cmp(workdir,os.path.dirname(s)) and not os.path.exists(workdir + '/' + tarname):
174 shutil.move(os.path.dirname(s) + '/' + tarname,workdir)
175 else: 164 else:
176 return 165 return ''
177 return tarname 166 source = os.path.basename(source_dir)
167 return do_tarball(work_dir,source,tarname)
178 168
179def archive_sources(d,middle_name): 169def archive_sources(d,stage_name):
180 '''copy tarball from $DL_DIR to $WORKDIR if have tarball, archive source codes tree in $WORKDIR if $P is directory instead of tarball''' 170 '''copy tarball from $DL_DIR to $WORKDIR if have tarball, archive source codes tree in $WORKDIR if $P is directory instead of tarball'''
181 import shutil 171 import shutil
182 work_dir = d.getVar('WORKDIR',True) 172 work_dir = d.getVar('WORKDIR',True)
183 file = get_source_from_downloads(d,middle_name) 173 file = get_source_from_downloads(d,stage_name)
184 if file: 174 if file:
185 shutil.copy(file,work_dir) 175 shutil.copy(file,work_dir)
186 else: 176 else:
187 file = archive_sources_from_directory(d,middle_name) 177 file = archive_sources_from_directory(d,stage_name)
188 return file 178 return file
189 179
190 180
191def archive_patches(d,patchdir,series): 181def archive_patches(d,patchdir,series):
192 '''archive patches to tarball and also include series files if 'series' is True''' 182 '''archive patches to tarball and also include series files if 'series' is True'''
193 import tarfile
194 import shutil 183 import shutil
195 184
196 s = d.getVar('S',True) 185 s = d.getVar('S',True)
197 work_dir = d.getVar('WORKDIR', True) 186 work_dir = d.getVar('WORKDIR', True)
198 os.chdir(work_dir)
199 patch_dir = os.path.basename(patchdir) 187 patch_dir = os.path.basename(patchdir)
200 tarname = patch_dir + ".tar.gz" 188 tarname = patch_dir + ".tar.gz"
201 if series == 'all' and os.path.exists(s + '/patches/series'): 189 if series == 'all' and os.path.exists(os.path.join(s,'patches/series')):
202 shutil.copy(s + '/patches/series',patch_dir) 190 shutil.copy(os.path.join(s,'patches/series'),patchdir)
203 tar = tarfile.open(tarname, "w:gz") 191 tarname = do_tarball(work_dir,patch_dir,tarname)
204 tar.add(patch_dir) 192 shutil.rmtree(patchdir, ignore_errors=True)
205 tar.close()
206 shutil.rmtree(patch_dir, ignore_errors=True)
207 return tarname 193 return tarname
208 194
209def select_archive_patches(d,option): 195def select_archive_patches(d,option):
210 '''select to archive all patches including non-applying and series or applying patches ''' 196 '''select to archive all patches including non-applying and series or applying patches '''
211 if option == "all": 197 if option == "all":
212 patchdir = get_all_patches(d) 198 patchdir = get_series(d)
213 elif option == "applying": 199 elif option == "applying":
214 patchdir = get_applying_patches(d) 200 patchdir = get_applying_patches(d)
215 try: 201 try:
@@ -221,17 +207,15 @@ def select_archive_patches(d,option):
221 207
222def archive_logs(d,logdir,bbinc=False): 208def archive_logs(d,logdir,bbinc=False):
223 '''archive logs in temp to tarball and .bb and .inc files if bbinc is True ''' 209 '''archive logs in temp to tarball and .bb and .inc files if bbinc is True '''
224 import tarfile
225 import shutil 210 import shutil
226 211
227 log_dir = os.path.basename(logdir)
228 pf = d.getVar('PF',True) 212 pf = d.getVar('PF',True)
213 work_dir = d.getVar('WORKDIR',True)
214 log_dir = os.path.basename(logdir)
229 tarname = pf + '-' + log_dir + ".tar.gz" 215 tarname = pf + '-' + log_dir + ".tar.gz"
230 tar = tarfile.open(tarname, "w:gz") 216 tarname = do_tarball(work_dir,log_dir,tarname)
231 tar.add(log_dir)
232 tar.close()
233 if bbinc: 217 if bbinc:
234 shutil.rmtree(log_dir, ignore_errors=True) 218 shutil.rmtree(logdir, ignore_errors=True)
235 return tarname 219 return tarname
236 220
237def get_licenses(d): 221def get_licenses(d):
@@ -258,16 +242,17 @@ def move_tarball_deploy(d,tarball_list):
258 target_sys = d.getVar('TARGET_SYS', True) 242 target_sys = d.getVar('TARGET_SYS', True)
259 pf = d.getVar('PF', True) 243 pf = d.getVar('PF', True)
260 licenses = get_licenses(d) 244 licenses = get_licenses(d)
245 work_dir = d.getVar('WORKDIR',True)
261 tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf 246 tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf
262 if not os.path.exists(tar_sources): 247 if not os.path.exists(tar_sources):
263 bb.mkdirhier(tar_sources) 248 bb.mkdirhier(tar_sources)
264 for source in tarball_list: 249 for source in tarball_list:
265 if source: 250 if source:
266 if os.path.exists(tar_sources + '/' + source): 251 if os.path.exists(os.path.join(tar_sources, source)):
267 os.remove(tar_sources + '/' + source) 252 os.remove(os.path.join(tar_sources,source))
268 shutil.move(source,tar_sources) 253 shutil.move(os.path.join(work_dir,source),tar_sources)
269 254
270def verify_var(d): 255def check_archiving_type(d):
271 '''check the type for archiving package('tar' or 'srpm')''' 256 '''check the type for archiving package('tar' or 'srpm')'''
272 try: 257 try:
273 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in d.getVar('ARCHIVE_TYPE', True).split(): 258 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in d.getVar('ARCHIVE_TYPE', True).split():
@@ -278,7 +263,7 @@ def verify_var(d):
278def store_package(d,package_name): 263def store_package(d,package_name):
279 '''store tarbablls name to file "tar-package"''' 264 '''store tarbablls name to file "tar-package"'''
280 try: 265 try:
281 f = open(d.getVar('WORKDIR',True )+ '/tar-package','a') 266 f = open(os.path.join(d.getVar('WORKDIR',True),'tar-package'),'a')
282 f.write(package_name + ' ') 267 f.write(package_name + ' ')
283 f.close() 268 f.close()
284 except IOError: 269 except IOError:
@@ -286,25 +271,27 @@ def store_package(d,package_name):
286 271
287def get_package(d): 272def get_package(d):
288 '''get tarballs name from "tar-package"''' 273 '''get tarballs name from "tar-package"'''
274 work_dir = (d.getVar('WORKDIR', True))
275 tarpackage = os.path.join(work_dir,'tar-package')
289 try: 276 try:
290 os.chdir(d.getVar('WORKDIR', True)) 277 f = open(tarpackage,'r')
291 f = open('tar-package','r')
292 line = list(set(f.readline().replace('\n','').split())) 278 line = list(set(f.readline().replace('\n','').split()))
293 f.close()
294 return line
295 except IOError: 279 except IOError:
296 pass 280 pass
281 f.close()
282 return line
297 283
298 284
299def archive_sources_patches(d,middle_name): 285def archive_sources_patches(d,stage_name):
300 '''archive sources and patches to tarball. middle_name will append strings ${middle_name} to ${PR} as middle name. for example, zlib-1.4.6-prepatch(middle_name).tar.gz ''' 286 '''archive sources and patches to tarball. stage_name will append strings ${stage_name} to ${PR} as middle name. for example, zlib-1.4.6-prepatch(stage_name).tar.gz '''
301 import shutil 287 import shutil
302 verify_var(d) 288
289 check_archiving_type(d)
303 if not_tarball(d): 290 if not_tarball(d):
304 return 291 return
305 292
306 source_tar_name = archive_sources(d,middle_name) 293 source_tar_name = archive_sources(d,stage_name)
307 if middle_name == "prepatch": 294 if stage_name == "prepatch":
308 if d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'TRUE': 295 if d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'TRUE':
309 patch_tar_name = select_archive_patches(d,"all") 296 patch_tar_name = select_archive_patches(d,"all")
310 elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'FALSE': 297 elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'FALSE':
@@ -313,14 +300,14 @@ def archive_sources_patches(d,middle_name):
313 bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' is strings 'True' or 'False' ") 300 bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' is strings 'True' or 'False' ")
314 else: 301 else:
315 patch_tar_name = '' 302 patch_tar_name = ''
316 303
317 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': 304 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM':
318 move_tarball_deploy(d,[source_tar_name,patch_tar_name]) 305 move_tarball_deploy(d,[source_tar_name,patch_tar_name])
319 else: 306 else:
320 tarpackage = d.getVar('WORKDIR', True) + '/tar-package' 307 tarpackage = os.path.join(d.getVar('WORKDIR', True),'tar-package')
321 if os.path.exists(tarpackage): 308 if os.path.exists(tarpackage):
322 os.remove(tarpackage) 309 os.remove(tarpackage)
323 for package in source_tar_name, patch_tar_name: 310 for package in os.path.basename(source_tar_name), patch_tar_name:
324 if package: 311 if package:
325 store_package(d,str(package) + ' ') 312 store_package(d,str(package) + ' ')
326 313
@@ -328,14 +315,14 @@ def archive_scripts_logs(d):
328 '''archive scripts and logs. scripts include .bb and .inc files and logs include stuff in "temp".''' 315 '''archive scripts and logs. scripts include .bb and .inc files and logs include stuff in "temp".'''
329 316
330 work_dir = d.getVar('WORKDIR', True) 317 work_dir = d.getVar('WORKDIR', True)
331 os.chdir(work_dir) 318 temp_dir = os.path.join(work_dir,'temp')
332 source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True) 319 source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True)
333 if source_archive_log_with_scripts == 'logs_with_scripts': 320 if source_archive_log_with_scripts == 'logs_with_scripts':
334 logdir = get_bb_inc(d) 321 logdir = get_bb_inc(d)
335 tarlog = archive_logs(d,logdir,True) 322 tarlog = archive_logs(d,logdir,True)
336 elif source_archive_log_with_scripts == 'logs': 323 elif source_archive_log_with_scripts == 'logs':
337 if os.path.exists('temp'): 324 if os.path.exists(temp_dir):
338 tarlog = archive_logs(d,'temp',False) 325 tarlog = archive_logs(d,temp_dir,False)
339 else: 326 else:
340 return 327 return
341 328
@@ -374,14 +361,14 @@ def create_diff_gz(d):
374 import shutil 361 import shutil
375 362
376 work_dir = d.getVar('WORKDIR', True) 363 work_dir = d.getVar('WORKDIR', True)
377 exclude_from = d.getVar('EXCLUDE_FROM', True).split() 364 exclude_from = d.getVar('ARCHIVE_EXCLUDE_FROM', True).split()
378 pf = d.getVar('PF', True) 365 pf = d.getVar('PF', True)
379 licenses = get_licenses(d) 366 licenses = get_licenses(d)
380 target_sys = d.getVar('TARGET_SYS', True) 367 target_sys = d.getVar('TARGET_SYS', True)
381 diff_dir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf 368 diff_dir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf
382 diff_file = os.path.join(diff_dir, bb.data.expand("${P}-${PR}.diff.gz",d)) 369 diff_file = os.path.join(diff_dir, bb.data.expand("${P}-${PR}.diff.gz",d))
383 os.chdir(work_dir) 370
384 f = open('temp/exclude-from-file', 'a') 371 f = open(os.path.join(work_dir,'temp/exclude-from-file'), 'a')
385 for i in exclude_from: 372 for i in exclude_from:
386 f.write(i) 373 f.write(i)
387 f.write("\n") 374 f.write("\n")
@@ -394,7 +381,10 @@ def create_diff_gz(d):
394 bb.mkdirhier(dest) 381 bb.mkdirhier(dest)
395 for i in os.listdir(os.getcwd()): 382 for i in os.listdir(os.getcwd()):
396 if os.path.isfile(i): 383 if os.path.isfile(i):
397 shutil.copy(i, dest) 384 try:
385 shutil.copy(i, dest)
386 except IOError:
387 os.system('fakeroot cp -rf ' + i + " " + dest )
398 388
399 bb.note("Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz") 389 bb.note("Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz")
400 cmd = "LC_ALL=C TZ=UTC0 diff --exclude-from=" + work_dir + "/temp/exclude-from-file -Naur " + s + '.org' + ' ' + s + " | gzip -c > " + diff_file 390 cmd = "LC_ALL=C TZ=UTC0 diff --exclude-from=" + work_dir + "/temp/exclude-from-file -Naur " + s + '.org' + ' ' + s + " | gzip -c > " + diff_file
@@ -445,12 +435,11 @@ do_kernel_checkout[postfuncs] += "do_archive_linux_yocto "
445python do_remove_tarball(){ 435python do_remove_tarball(){
446 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': 436 if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
447 work_dir = d.getVar('WORKDIR', True) 437 work_dir = d.getVar('WORKDIR', True)
448 os.chdir(work_dir)
449 try: 438 try:
450 for file in os.listdir(os.getcwd()): 439 for file in os.listdir(os.getcwd()):
451 if file in get_package(d): 440 if file in get_package(d):
452 os.remove(file) 441 os.remove(file)
453 os.remove('tar-package') 442 os.remove(os.path.join(work_dir,'tar-package'))
454 except (TypeError,OSError): 443 except (TypeError,OSError):
455 pass 444 pass
456} 445}