diff options
author | Robert Yang <liezhi.yang@windriver.com> | 2012-08-23 23:03:10 +0800 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-08-29 16:02:07 -0700 |
commit | c96f6ee659cd4c20fd826f633e634ba0ef2f340b (patch) | |
tree | 946d035b3c438333f8e9479b9cc498f367ecee11 /meta/classes | |
parent | a6588b8f79afa0a06c346dc00aa1eb3b1e691a7a (diff) | |
download | poky-c96f6ee659cd4c20fd826f633e634ba0ef2f340b.tar.gz |
archiver.bbclass: indent fixes
Several fixes:
* It uses mixed tab and whitespace as the indent in one function,
Fix them to use "4 spaces" as the indent.
* Remove the unwanted blank, for example, more than one blank lines
appeared together.
* Remove the tail whitespace.
[YOCTO #2619]
(From OE-Core rev: 5eacbcdf306d9e743164a1563559cd24eb5fffe0)
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Signed-off-by: Saul Wold <sgw@linux.intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/archiver.bbclass | 757 | ||||
-rw-r--r-- | meta/classes/package_rpm.bbclass | 1 |
2 files changed, 379 insertions, 379 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index 8e97e447c9..117ad02da4 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass | |||
@@ -1,6 +1,10 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
1 | # This file is used for archiving sources ,patches,and logs to tarball. | 4 | # This file is used for archiving sources ,patches,and logs to tarball. |
2 | # It also output building environment to xxx.dump.data and create xxx.diff.gz to record | 5 | # It also output building environment to xxx.dump.data and create xxx.diff.gz to record |
3 | # all content in ${S} to a diff file. | 6 | # all content in ${S} to a diff file. |
7 | # | ||
4 | 8 | ||
5 | ARCHIVE_EXCLUDE_FROM ?= ".pc autom4te.cache" | 9 | ARCHIVE_EXCLUDE_FROM ?= ".pc autom4te.cache" |
6 | ARCHIVE_TYPE ?= "TAR SRPM" | 10 | ARCHIVE_TYPE ?= "TAR SRPM" |
@@ -67,454 +71,451 @@ def tar_filter(d): | |||
67 | if d.getVar('FILTER', True).upper() == "YES": | 71 | if d.getVar('FILTER', True).upper() == "YES": |
68 | included, reason = copyleft_should_include(d) | 72 | included, reason = copyleft_should_include(d) |
69 | if not included: | 73 | if not included: |
70 | return False | 74 | return False |
71 | else: | 75 | else: |
72 | return True | 76 | return True |
73 | else: | 77 | else: |
74 | return False | 78 | return False |
75 | 79 | ||
76 | def get_bb_inc(d): | 80 | def get_bb_inc(d): |
77 | '''create a directory "script-logs" including .bb and .inc file in ${WORKDIR}''' | 81 | '''create a directory "script-logs" including .bb and .inc file in ${WORKDIR}''' |
78 | import re | 82 | import re |
79 | import shutil | 83 | import shutil |
80 | 84 | ||
81 | bbinc = [] | 85 | bbinc = [] |
82 | pat=re.compile('require\s*([^\s]*\.*)(.*)') | 86 | pat=re.compile('require\s*([^\s]*\.*)(.*)') |
83 | work_dir = d.getVar('WORKDIR', True) | 87 | work_dir = d.getVar('WORKDIR', True) |
84 | bbfile = d.getVar('FILE', True) | 88 | bbfile = d.getVar('FILE', True) |
85 | bbdir = os.path.dirname(bbfile) | 89 | bbdir = os.path.dirname(bbfile) |
86 | script_logs = os.path.join(work_dir,'script-logs') | 90 | script_logs = os.path.join(work_dir,'script-logs') |
87 | bb_inc = os.path.join(script_logs,'bb_inc') | 91 | bb_inc = os.path.join(script_logs,'bb_inc') |
88 | bb.mkdirhier(script_logs) | 92 | bb.mkdirhier(script_logs) |
89 | bb.mkdirhier(bb_inc) | 93 | bb.mkdirhier(bb_inc) |
90 | 94 | ||
91 | def find_file(dir,file): | 95 | def find_file(dir,file): |
92 | for root, dirs, files in os.walk(dir): | 96 | for root, dirs, files in os.walk(dir): |
93 | if file in files: | 97 | if file in files: |
94 | return os.path.join(root,file) | 98 | return os.path.join(root,file) |
95 | 99 | ||
96 | def get_inc (file): | 100 | def get_inc (file): |
97 | f = open(file,'r') | 101 | f = open(file,'r') |
98 | for line in f.readlines(): | 102 | for line in f.readlines(): |
99 | if 'require' not in line: | 103 | if 'require' not in line: |
100 | bbinc.append(file) | 104 | bbinc.append(file) |
101 | else: | 105 | else: |
102 | try: | 106 | try: |
103 | incfile = pat.match(line).group(1) | 107 | incfile = pat.match(line).group(1) |
104 | incfile = bb.data.expand(os.path.basename(incfile),d) | 108 | incfile = bb.data.expand(os.path.basename(incfile),d) |
105 | abs_incfile = find_file(bbdir,incfile) | 109 | abs_incfile = find_file(bbdir,incfile) |
106 | if abs_incfile: | 110 | if abs_incfile: |
107 | bbinc.append(abs_incfile) | 111 | bbinc.append(abs_incfile) |
108 | get_inc(abs_incfile) | 112 | get_inc(abs_incfile) |
109 | except AttributeError: | 113 | except AttributeError: |
110 | pass | 114 | pass |
111 | get_inc(bbfile) | 115 | get_inc(bbfile) |
112 | bbinc = list(set(bbinc)) | 116 | bbinc = list(set(bbinc)) |
113 | for bbincfile in bbinc: | 117 | for bbincfile in bbinc: |
114 | shutil.copy(bbincfile,bb_inc) | 118 | shutil.copy(bbincfile,bb_inc) |
115 | |||
116 | try: | ||
117 | bb.mkdirhier(os.path.join(script_logs,'temp')) | ||
118 | oe.path.copytree(os.path.join(work_dir,'temp'), os.path.join(script_logs,'temp')) | ||
119 | except (IOError,AttributeError): | ||
120 | pass | ||
121 | return script_logs | ||
122 | |||
123 | def get_series(d): | ||
124 | '''copy patches and series file to a pointed directory which will be archived to tarball in ${WORKDIR}''' | ||
125 | import shutil | ||
126 | |||
127 | src_patches=[] | ||
128 | pf = d.getVar('PF', True) | ||
129 | work_dir = d.getVar('WORKDIR', True) | ||
130 | s = d.getVar('S',True) | ||
131 | dest = os.path.join(work_dir, pf + '-series') | ||
132 | shutil.rmtree(dest, ignore_errors=True) | ||
133 | bb.mkdirhier(dest) | ||
134 | |||
135 | src_uri = d.getVar('SRC_URI', True).split() | ||
136 | fetch = bb.fetch2.Fetch(src_uri, d) | ||
137 | locals = (fetch.localpath(url) for url in fetch.urls) | ||
138 | for local in locals: | ||
139 | src_patches.append(local) | ||
140 | if not cmp(work_dir,s): | ||
141 | tmp_list = src_patches | ||
142 | else: | ||
143 | tmp_list = src_patches[1:] | ||
144 | |||
145 | for patch in tmp_list: | ||
146 | try: | ||
147 | shutil.copy(patch,dest) | ||
148 | except IOError: | ||
149 | if os.path.isdir(patch): | ||
150 | bb.mkdirhier(os.path.join(dest,patch)) | ||
151 | oe.path.copytree(patch, os.path.join(dest,patch)) | ||
152 | return dest | ||
153 | |||
154 | def get_applying_patches(d): | ||
155 | """only copy applying patches to a pointed directory which will be archived to tarball""" | ||
156 | import shutil | ||
157 | 119 | ||
120 | try: | ||
121 | bb.mkdirhier(os.path.join(script_logs,'temp')) | ||
122 | oe.path.copytree(os.path.join(work_dir,'temp'), os.path.join(script_logs,'temp')) | ||
123 | except (IOError,AttributeError): | ||
124 | pass | ||
125 | return script_logs | ||
158 | 126 | ||
159 | pf = d.getVar('PF', True) | 127 | def get_series(d): |
160 | work_dir = d.getVar('WORKDIR', True) | 128 | '''copy patches and series file to a pointed directory which will be archived to tarball in ${WORKDIR}''' |
161 | dest = os.path.join(work_dir, pf + '-patches') | 129 | import shutil |
162 | shutil.rmtree(dest, ignore_errors=True) | 130 | |
163 | bb.mkdirhier(dest) | 131 | src_patches=[] |
132 | pf = d.getVar('PF', True) | ||
133 | work_dir = d.getVar('WORKDIR', True) | ||
134 | s = d.getVar('S',True) | ||
135 | dest = os.path.join(work_dir, pf + '-series') | ||
136 | shutil.rmtree(dest, ignore_errors=True) | ||
137 | bb.mkdirhier(dest) | ||
138 | |||
139 | src_uri = d.getVar('SRC_URI', True).split() | ||
140 | fetch = bb.fetch2.Fetch(src_uri, d) | ||
141 | locals = (fetch.localpath(url) for url in fetch.urls) | ||
142 | for local in locals: | ||
143 | src_patches.append(local) | ||
144 | if not cmp(work_dir,s): | ||
145 | tmp_list = src_patches | ||
146 | else: | ||
147 | tmp_list = src_patches[1:] | ||
164 | 148 | ||
149 | for patch in tmp_list: | ||
150 | try: | ||
151 | shutil.copy(patch,dest) | ||
152 | except IOError: | ||
153 | if os.path.isdir(patch): | ||
154 | bb.mkdirhier(os.path.join(dest,patch)) | ||
155 | oe.path.copytree(patch, os.path.join(dest,patch)) | ||
156 | return dest | ||
165 | 157 | ||
166 | patches = src_patches(d) | 158 | def get_applying_patches(d): |
167 | for patch in patches: | 159 | """only copy applying patches to a pointed directory which will be archived to tarball""" |
168 | _, _, local, _, _, parm = bb.decodeurl(patch) | 160 | import shutil |
169 | if local: | 161 | |
170 | shutil.copy(local,dest) | 162 | pf = d.getVar('PF', True) |
171 | return dest | 163 | work_dir = d.getVar('WORKDIR', True) |
164 | dest = os.path.join(work_dir, pf + '-patches') | ||
165 | shutil.rmtree(dest, ignore_errors=True) | ||
166 | bb.mkdirhier(dest) | ||
167 | |||
168 | patches = src_patches(d) | ||
169 | for patch in patches: | ||
170 | _, _, local, _, _, parm = bb.decodeurl(patch) | ||
171 | if local: | ||
172 | shutil.copy(local,dest) | ||
173 | return dest | ||
172 | 174 | ||
173 | def not_tarball(d): | 175 | def not_tarball(d): |
174 | '''packages including key words 'work-shared','native', 'task-' will be passed''' | 176 | '''packages including key words 'work-shared','native', 'task-' will be passed''' |
175 | 177 | ||
176 | workdir = d.getVar('WORKDIR',True) | 178 | workdir = d.getVar('WORKDIR',True) |
177 | s = d.getVar('S',True) | 179 | s = d.getVar('S',True) |
178 | if 'work-shared' in s or 'task-' in workdir or 'native' in workdir: | 180 | if 'work-shared' in s or 'task-' in workdir or 'native' in workdir: |
179 | return True | 181 | return True |
180 | else: | 182 | else: |
181 | return False | 183 | return False |
182 | 184 | ||
183 | def get_source_from_downloads(d,stage_name): | 185 | def get_source_from_downloads(d,stage_name): |
184 | '''copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR''' | 186 | '''copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR''' |
185 | if stage_name in 'patched' 'configured': | 187 | if stage_name in 'patched' 'configured': |
186 | return | 188 | return |
187 | pf = d.getVar('PF', True) | 189 | pf = d.getVar('PF', True) |
188 | dl_dir = d.getVar('DL_DIR',True) | 190 | dl_dir = d.getVar('DL_DIR',True) |
189 | try: | 191 | try: |
190 | source = os.path.join(dl_dir,os.path.basename(d.getVar('SRC_URI', True).split()[0])) | 192 | source = os.path.join(dl_dir,os.path.basename(d.getVar('SRC_URI', True).split()[0])) |
191 | if os.path.exists(source) and not os.path.isdir(source): | 193 | if os.path.exists(source) and not os.path.isdir(source): |
192 | return source | 194 | return source |
193 | except (IndexError, OSError): | 195 | except (IndexError, OSError): |
194 | pass | 196 | pass |
195 | return '' | 197 | return '' |
196 | 198 | ||
197 | def do_tarball(workdir,srcdir,tarname): | 199 | def do_tarball(workdir,srcdir,tarname): |
198 | '''tar "srcdir" under "workdir" to "tarname"''' | 200 | '''tar "srcdir" under "workdir" to "tarname"''' |
199 | import tarfile | 201 | import tarfile |
200 | 202 | ||
201 | sav_dir = os.getcwd() | 203 | sav_dir = os.getcwd() |
202 | os.chdir(workdir) | 204 | os.chdir(workdir) |
203 | if (len(os.listdir(srcdir))) != 0: | 205 | if (len(os.listdir(srcdir))) != 0: |
204 | tar = tarfile.open(tarname, "w:gz") | 206 | tar = tarfile.open(tarname, "w:gz") |
205 | tar.add(srcdir) | 207 | tar.add(srcdir) |
206 | tar.close() | 208 | tar.close() |
207 | else: | 209 | else: |
208 | tarname = '' | 210 | tarname = '' |
209 | os.chdir(sav_dir) | 211 | os.chdir(sav_dir) |
210 | return tarname | 212 | return tarname |
211 | 213 | ||
212 | def archive_sources_from_directory(d,stage_name): | 214 | def archive_sources_from_directory(d,stage_name): |
213 | '''archive sources codes tree to tarball when tarball of $P doesn't exist in $DL_DIR''' | 215 | '''archive sources codes tree to tarball when tarball of $P doesn't exist in $DL_DIR''' |
214 | import shutil | 216 | import shutil |
215 | 217 | ||
216 | s = d.getVar('S',True) | 218 | s = d.getVar('S',True) |
217 | work_dir=d.getVar('WORKDIR', True) | 219 | work_dir=d.getVar('WORKDIR', True) |
218 | PF = d.getVar('PF',True) | 220 | PF = d.getVar('PF',True) |
219 | tarname = PF + '-' + stage_name + ".tar.gz" | 221 | tarname = PF + '-' + stage_name + ".tar.gz" |
220 | 222 | ||
221 | if os.path.exists(s) and work_dir in s: | 223 | if os.path.exists(s) and work_dir in s: |
222 | try: | 224 | try: |
223 | source_dir = os.path.join(work_dir,[ i for i in s.replace(work_dir,'').split('/') if i][0]) | 225 | source_dir = os.path.join(work_dir,[ i for i in s.replace(work_dir,'').split('/') if i][0]) |
224 | except IndexError: | 226 | except IndexError: |
225 | if not cmp(s,work_dir): | 227 | if not cmp(s,work_dir): |
226 | return '' | 228 | return '' |
227 | else: | 229 | else: |
228 | return '' | 230 | return '' |
229 | source = os.path.basename(source_dir) | 231 | source = os.path.basename(source_dir) |
230 | return do_tarball(work_dir,source,tarname) | 232 | return do_tarball(work_dir,source,tarname) |
231 | 233 | ||
232 | def archive_sources(d,stage_name): | 234 | def archive_sources(d,stage_name): |
233 | '''copy tarball from $DL_DIR to $WORKDIR if have tarball, archive source codes tree in $WORKDIR if $P is directory instead of tarball''' | 235 | '''copy tarball from $DL_DIR to $WORKDIR if have tarball, archive source codes tree in $WORKDIR if $P is directory instead of tarball''' |
234 | import shutil | 236 | import shutil |
235 | work_dir = d.getVar('WORKDIR',True) | 237 | work_dir = d.getVar('WORKDIR',True) |
236 | file = get_source_from_downloads(d,stage_name) | 238 | file = get_source_from_downloads(d,stage_name) |
237 | if file: | 239 | if file: |
238 | shutil.copy(file,work_dir) | 240 | shutil.copy(file,work_dir) |
239 | file = os.path.basename(file) | 241 | file = os.path.basename(file) |
240 | else: | 242 | else: |
241 | file = archive_sources_from_directory(d,stage_name) | 243 | file = archive_sources_from_directory(d,stage_name) |
242 | return file | 244 | return file |
243 | |||
244 | 245 | ||
245 | def archive_patches(d,patchdir,series): | 246 | def archive_patches(d,patchdir,series): |
246 | '''archive patches to tarball and also include series files if 'series' is True''' | 247 | '''archive patches to tarball and also include series files if 'series' is True''' |
247 | import shutil | 248 | import shutil |
248 | 249 | ||
249 | s = d.getVar('S',True) | 250 | s = d.getVar('S',True) |
250 | work_dir = d.getVar('WORKDIR', True) | 251 | work_dir = d.getVar('WORKDIR', True) |
251 | patch_dir = os.path.basename(patchdir) | 252 | patch_dir = os.path.basename(patchdir) |
252 | tarname = patch_dir + ".tar.gz" | 253 | tarname = patch_dir + ".tar.gz" |
253 | if series == 'all' and os.path.exists(os.path.join(s,'patches/series')): | 254 | if series == 'all' and os.path.exists(os.path.join(s,'patches/series')): |
254 | shutil.copy(os.path.join(s,'patches/series'),patchdir) | 255 | shutil.copy(os.path.join(s,'patches/series'),patchdir) |
255 | tarname = do_tarball(work_dir,patch_dir,tarname) | 256 | tarname = do_tarball(work_dir,patch_dir,tarname) |
256 | shutil.rmtree(patchdir, ignore_errors=True) | 257 | shutil.rmtree(patchdir, ignore_errors=True) |
257 | return tarname | 258 | return tarname |
258 | 259 | ||
259 | def select_archive_patches(d,option): | 260 | def select_archive_patches(d,option): |
260 | '''select to archive all patches including non-applying and series or applying patches ''' | 261 | '''select to archive all patches including non-applying and series or applying patches ''' |
261 | if option == "all": | 262 | if option == "all": |
262 | patchdir = get_series(d) | 263 | patchdir = get_series(d) |
263 | elif option == "applying": | 264 | elif option == "applying": |
264 | patchdir = get_applying_patches(d) | 265 | patchdir = get_applying_patches(d) |
265 | try: | 266 | try: |
266 | os.rmdir(patchdir) | 267 | os.rmdir(patchdir) |
267 | except OSError: | 268 | except OSError: |
268 | tarpatch = archive_patches(d,patchdir,option) | 269 | tarpatch = archive_patches(d,patchdir,option) |
269 | return tarpatch | 270 | return tarpatch |
270 | return | 271 | return |
271 | 272 | ||
272 | def archive_logs(d,logdir,bbinc=False): | 273 | def archive_logs(d,logdir,bbinc=False): |
273 | '''archive logs in temp to tarball and .bb and .inc files if bbinc is True ''' | 274 | '''archive logs in temp to tarball and .bb and .inc files if bbinc is True ''' |
274 | import shutil | 275 | import shutil |
275 | 276 | ||
276 | pf = d.getVar('PF',True) | 277 | pf = d.getVar('PF',True) |
277 | work_dir = d.getVar('WORKDIR',True) | 278 | work_dir = d.getVar('WORKDIR',True) |
278 | log_dir = os.path.basename(logdir) | 279 | log_dir = os.path.basename(logdir) |
279 | tarname = pf + '-' + log_dir + ".tar.gz" | 280 | tarname = pf + '-' + log_dir + ".tar.gz" |
280 | tarname = do_tarball(work_dir,log_dir,tarname) | 281 | tarname = do_tarball(work_dir,log_dir,tarname) |
281 | if bbinc: | 282 | if bbinc: |
282 | shutil.rmtree(logdir, ignore_errors=True) | 283 | shutil.rmtree(logdir, ignore_errors=True) |
283 | return tarname | 284 | return tarname |
284 | 285 | ||
285 | def get_licenses(d): | 286 | def get_licenses(d): |
286 | '''get licenses for running .bb file''' | 287 | '''get licenses for running .bb file''' |
287 | import oe.license | 288 | import oe.license |
288 | 289 | ||
289 | licenses_type = d.getVar('LICENSE', True) or "" | 290 | licenses_type = d.getVar('LICENSE', True) or "" |
290 | lics = oe.license.is_included(licenses_type)[1:][0] | 291 | lics = oe.license.is_included(licenses_type)[1:][0] |
291 | lice = '' | 292 | lice = '' |
292 | for lic in lics: | 293 | for lic in lics: |
293 | licens = d.getVarFlag('SPDXLICENSEMAP', lic) | 294 | licens = d.getVarFlag('SPDXLICENSEMAP', lic) |
294 | if licens != None: | 295 | if licens != None: |
295 | lice += licens | 296 | lice += licens |
296 | else: | 297 | else: |
297 | lice += lic | 298 | lice += lic |
298 | return lice | 299 | return lice |
299 | 300 | ||
300 | 301 | ||
301 | def move_tarball_deploy(d,tarball_list): | 302 | def move_tarball_deploy(d,tarball_list): |
302 | '''move tarball in location to ${DEPLOY_DIR}/sources''' | 303 | '''move tarball in location to ${DEPLOY_DIR}/sources''' |
303 | import shutil | 304 | import shutil |
304 | 305 | ||
305 | if tarball_list is []: | 306 | if tarball_list is []: |
306 | return | 307 | return |
307 | target_sys = d.getVar('TARGET_SYS', True) | 308 | target_sys = d.getVar('TARGET_SYS', True) |
308 | pf = d.getVar('PF', True) | 309 | pf = d.getVar('PF', True) |
309 | licenses = get_licenses(d) | 310 | licenses = get_licenses(d) |
310 | work_dir = d.getVar('WORKDIR',True) | 311 | work_dir = d.getVar('WORKDIR',True) |
311 | tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | 312 | tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf |
312 | if not os.path.exists(tar_sources): | 313 | if not os.path.exists(tar_sources): |
313 | bb.mkdirhier(tar_sources) | 314 | bb.mkdirhier(tar_sources) |
314 | for source in tarball_list: | 315 | for source in tarball_list: |
315 | if source: | 316 | if source: |
316 | if os.path.exists(os.path.join(tar_sources, source)): | 317 | if os.path.exists(os.path.join(tar_sources, source)): |
317 | os.remove(os.path.join(tar_sources,source)) | 318 | os.remove(os.path.join(tar_sources,source)) |
318 | shutil.move(os.path.join(work_dir,source),tar_sources) | 319 | shutil.move(os.path.join(work_dir,source),tar_sources) |
319 | 320 | ||
320 | def check_archiving_type(d): | 321 | def check_archiving_type(d): |
321 | '''check the type for archiving package('tar' or 'srpm')''' | 322 | '''check the type for archiving package('tar' or 'srpm')''' |
322 | try: | 323 | try: |
323 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in d.getVar('ARCHIVE_TYPE', True).split(): | 324 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in d.getVar('ARCHIVE_TYPE', True).split(): |
324 | raise AttributeError | 325 | raise AttributeError |
325 | except AttributeError: | 326 | except AttributeError: |
326 | bb.fatal("\"SOURCE_ARCHIVE_PACKAGE_TYPE\" is \'tar\' or \'srpm\', no other types") | 327 | bb.fatal("\"SOURCE_ARCHIVE_PACKAGE_TYPE\" is \'tar\' or \'srpm\', no other types") |
327 | 328 | ||
328 | def store_package(d,package_name): | 329 | def store_package(d,package_name): |
329 | '''store tarbablls name to file "tar-package"''' | 330 | '''store tarbablls name to file "tar-package"''' |
330 | try: | 331 | try: |
331 | f = open(os.path.join(d.getVar('WORKDIR',True),'tar-package'),'a') | 332 | f = open(os.path.join(d.getVar('WORKDIR',True),'tar-package'),'a') |
332 | f.write(package_name + ' ') | 333 | f.write(package_name + ' ') |
333 | f.close() | 334 | f.close() |
334 | except IOError: | 335 | except IOError: |
335 | pass | 336 | pass |
336 | 337 | ||
337 | def get_package(d): | 338 | def get_package(d): |
338 | '''get tarballs name from "tar-package"''' | 339 | '''get tarballs name from "tar-package"''' |
339 | work_dir = (d.getVar('WORKDIR', True)) | 340 | work_dir = (d.getVar('WORKDIR', True)) |
340 | tarpackage = os.path.join(work_dir,'tar-package') | 341 | tarpackage = os.path.join(work_dir,'tar-package') |
341 | try: | 342 | try: |
342 | f = open(tarpackage,'r') | 343 | f = open(tarpackage,'r') |
343 | line = list(set(f.readline().replace('\n','').split())) | 344 | line = list(set(f.readline().replace('\n','').split())) |
344 | except UnboundLocalError,IOError: | 345 | except UnboundLocalError,IOError: |
345 | pass | 346 | pass |
346 | f.close() | 347 | f.close() |
347 | return line | 348 | return line |
348 | 349 | ||
349 | 350 | ||
350 | def archive_sources_patches(d,stage_name): | 351 | def archive_sources_patches(d,stage_name): |
351 | '''archive sources and patches to tarball. stage_name will append strings ${stage_name} to ${PR} as middle name. for example, zlib-1.4.6-prepatch(stage_name).tar.gz ''' | 352 | '''archive sources and patches to tarball. stage_name will append strings ${stage_name} to ${PR} as middle name. for example, zlib-1.4.6-prepatch(stage_name).tar.gz ''' |
352 | import shutil | 353 | import shutil |
353 | 354 | ||
354 | check_archiving_type(d) | 355 | check_archiving_type(d) |
355 | if not_tarball(d) or tar_filter(d): | 356 | if not_tarball(d) or tar_filter(d): |
356 | return | 357 | return |
357 | 358 | ||
358 | source_tar_name = archive_sources(d,stage_name) | 359 | source_tar_name = archive_sources(d,stage_name) |
359 | if stage_name == "prepatch": | 360 | if stage_name == "prepatch": |
360 | if d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'TRUE': | 361 | if d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'TRUE': |
361 | patch_tar_name = select_archive_patches(d,"all") | 362 | patch_tar_name = select_archive_patches(d,"all") |
362 | elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'FALSE': | 363 | elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'FALSE': |
363 | patch_tar_name = select_archive_patches(d,"applying") | 364 | patch_tar_name = select_archive_patches(d,"applying") |
364 | else: | 365 | else: |
365 | bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' is strings 'True' or 'False' ") | 366 | bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' is strings 'True' or 'False' ") |
366 | else: | 367 | else: |
367 | patch_tar_name = '' | 368 | patch_tar_name = '' |
368 | 369 | ||
369 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | 370 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': |
370 | move_tarball_deploy(d,[source_tar_name,patch_tar_name]) | 371 | move_tarball_deploy(d,[source_tar_name,patch_tar_name]) |
371 | else: | 372 | else: |
372 | tarpackage = os.path.join(d.getVar('WORKDIR', True),'tar-package') | 373 | tarpackage = os.path.join(d.getVar('WORKDIR', True),'tar-package') |
373 | if os.path.exists(tarpackage): | 374 | if os.path.exists(tarpackage): |
374 | os.remove(tarpackage) | 375 | os.remove(tarpackage) |
375 | for package in os.path.basename(source_tar_name), patch_tar_name: | 376 | for package in os.path.basename(source_tar_name), patch_tar_name: |
376 | if package: | 377 | if package: |
377 | store_package(d,str(package) + ' ') | 378 | store_package(d,str(package) + ' ') |
378 | 379 | ||
379 | def archive_scripts_logs(d): | 380 | def archive_scripts_logs(d): |
380 | '''archive scripts and logs. scripts include .bb and .inc files and logs include stuff in "temp".''' | 381 | '''archive scripts and logs. scripts include .bb and .inc files and logs include stuff in "temp".''' |
381 | 382 | ||
382 | if tar_filter(d): | 383 | if tar_filter(d): |
383 | return | 384 | return |
384 | work_dir = d.getVar('WORKDIR', True) | 385 | work_dir = d.getVar('WORKDIR', True) |
385 | temp_dir = os.path.join(work_dir,'temp') | 386 | temp_dir = os.path.join(work_dir,'temp') |
386 | source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True) | 387 | source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True) |
387 | if source_archive_log_with_scripts == 'logs_with_scripts': | 388 | if source_archive_log_with_scripts == 'logs_with_scripts': |
388 | logdir = get_bb_inc(d) | 389 | logdir = get_bb_inc(d) |
389 | tarlog = archive_logs(d,logdir,True) | 390 | tarlog = archive_logs(d,logdir,True) |
390 | elif source_archive_log_with_scripts == 'logs': | 391 | elif source_archive_log_with_scripts == 'logs': |
391 | if os.path.exists(temp_dir): | 392 | if os.path.exists(temp_dir): |
392 | tarlog = archive_logs(d,temp_dir,False) | 393 | tarlog = archive_logs(d,temp_dir,False) |
393 | else: | 394 | else: |
394 | return | 395 | return |
395 | 396 | ||
396 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | 397 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': |
397 | move_tarball_deploy(d,[tarlog]) | 398 | move_tarball_deploy(d,[tarlog]) |
398 | 399 | ||
399 | else: | 400 | else: |
400 | store_package(d,tarlog) | 401 | store_package(d,tarlog) |
401 | 402 | ||
402 | def dumpdata(d): | 403 | def dumpdata(d): |
403 | '''dump environment to "${P}-${PR}.showdata.dump" including all kinds of variables and functions when running a task''' | 404 | '''dump environment to "${P}-${PR}.showdata.dump" including all kinds of variables and functions when running a task''' |
404 | 405 | ||
405 | if tar_filter(d): | 406 | if tar_filter(d): |
406 | return | 407 | return |
407 | workdir = bb.data.getVar('WORKDIR', d, 1) | 408 | workdir = bb.data.getVar('WORKDIR', d, 1) |
408 | distro = bb.data.getVar('DISTRO', d, 1) | 409 | distro = bb.data.getVar('DISTRO', d, 1) |
409 | s = d.getVar('S', True) | 410 | s = d.getVar('S', True) |
410 | pf = d.getVar('PF', True) | 411 | pf = d.getVar('PF', True) |
411 | target_sys = d.getVar('TARGET_SYS', True) | 412 | target_sys = d.getVar('TARGET_SYS', True) |
412 | licenses = get_licenses(d) | 413 | licenses = get_licenses(d) |
413 | dumpdir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | 414 | dumpdir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf |
414 | if not os.path.exists(dumpdir): | 415 | if not os.path.exists(dumpdir): |
415 | bb.mkdirhier(dumpdir) | 416 | bb.mkdirhier(dumpdir) |
416 | 417 | ||
417 | dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump",d)) | 418 | dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump",d)) |
418 | 419 | ||
419 | bb.note("Dumping metadata into '%s'" % dumpfile) | 420 | bb.note("Dumping metadata into '%s'" % dumpfile) |
420 | f = open(dumpfile, "w") | 421 | f = open(dumpfile, "w") |
421 | # emit variables and shell functions | 422 | # emit variables and shell functions |
422 | bb.data.emit_env(f, d, True) | 423 | bb.data.emit_env(f, d, True) |
423 | # emit the metadata which isnt valid shell | 424 | # emit the metadata which isnt valid shell |
424 | for e in d.keys(): | 425 | for e in d.keys(): |
425 | if bb.data.getVarFlag(e, 'python', d): | 426 | if bb.data.getVarFlag(e, 'python', d): |
426 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | 427 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) |
427 | f.close() | 428 | f.close() |
428 | 429 | ||
429 | def create_diff_gz(d): | 430 | def create_diff_gz(d): |
430 | '''creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.g gz for mapping all content in 's' including patches to xxx.diff.gz''' | 431 | '''creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.g gz for mapping all content in 's' including patches to xxx.diff.gz''' |
431 | import shutil | 432 | import shutil |
432 | import subprocess | 433 | import subprocess |
433 | 434 | ||
434 | if tar_filter(d): | 435 | if tar_filter(d): |
435 | return | 436 | return |
436 | work_dir = d.getVar('WORKDIR', True) | 437 | work_dir = d.getVar('WORKDIR', True) |
437 | exclude_from = d.getVar('ARCHIVE_EXCLUDE_FROM', True).split() | 438 | exclude_from = d.getVar('ARCHIVE_EXCLUDE_FROM', True).split() |
438 | pf = d.getVar('PF', True) | 439 | pf = d.getVar('PF', True) |
439 | licenses = get_licenses(d) | 440 | licenses = get_licenses(d) |
440 | target_sys = d.getVar('TARGET_SYS', True) | 441 | target_sys = d.getVar('TARGET_SYS', True) |
441 | diff_dir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | 442 | diff_dir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf |
442 | diff_file = os.path.join(diff_dir, bb.data.expand("${P}-${PR}.diff.gz",d)) | 443 | diff_file = os.path.join(diff_dir, bb.data.expand("${P}-${PR}.diff.gz",d)) |
443 | 444 | ||
444 | f = open(os.path.join(work_dir,'temp/exclude-from-file'), 'a') | 445 | f = open(os.path.join(work_dir,'temp/exclude-from-file'), 'a') |
445 | for i in exclude_from: | 446 | for i in exclude_from: |
446 | f.write(i) | 447 | f.write(i) |
447 | f.write("\n") | 448 | f.write("\n") |
448 | f.close() | 449 | f.close() |
449 | 450 | ||
450 | s=d.getVar('S', True) | 451 | s=d.getVar('S', True) |
451 | distro = d.getVar('DISTRO',True) | 452 | distro = d.getVar('DISTRO',True) |
452 | dest = s + '/' + distro + '/files' | 453 | dest = s + '/' + distro + '/files' |
453 | if not os.path.exists(dest): | 454 | if not os.path.exists(dest): |
454 | bb.mkdirhier(dest) | 455 | bb.mkdirhier(dest) |
455 | for i in os.listdir(os.getcwd()): | 456 | for i in os.listdir(os.getcwd()): |
456 | if os.path.isfile(i): | 457 | if os.path.isfile(i): |
457 | try: | 458 | try: |
458 | shutil.copy(i, dest) | 459 | shutil.copy(i, dest) |
459 | except IOError: | 460 | except IOError: |
460 | subprocess.call('fakeroot cp -rf ' + i + " " + dest, shell=True) | 461 | subprocess.call('fakeroot cp -rf ' + i + " " + dest, shell=True) |
461 | 462 | ||
462 | bb.note("Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz") | 463 | bb.note("Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz") |
463 | cmd = "LC_ALL=C TZ=UTC0 diff --exclude-from=" + work_dir + "/temp/exclude-from-file -Naur " + s + '.org' + ' ' + s + " | gzip -c > " + diff_file | 464 | cmd = "LC_ALL=C TZ=UTC0 diff --exclude-from=" + work_dir + "/temp/exclude-from-file -Naur " + s + '.org' + ' ' + s + " | gzip -c > " + diff_file |
464 | d.setVar('DIFF', cmd + "\n") | 465 | d.setVar('DIFF', cmd + "\n") |
465 | d.setVarFlag('DIFF', 'func', '1') | 466 | d.setVarFlag('DIFF', 'func', '1') |
466 | bb.build.exec_func('DIFF', d) | 467 | bb.build.exec_func('DIFF', d) |
467 | shutil.rmtree(s + '.org', ignore_errors=True) | 468 | shutil.rmtree(s + '.org', ignore_errors=True) |
468 | 469 | ||
469 | # This function will run when user want to get tarball for sources and patches after do_unpack | 470 | # This function will run when user want to get tarball for sources and patches after do_unpack |
470 | python do_archive_original_sources_patches(){ | 471 | python do_archive_original_sources_patches(){ |
471 | archive_sources_patches(d,'prepatch') | 472 | archive_sources_patches(d,'prepatch') |
472 | } | 473 | } |
473 | 474 | ||
474 | # This function will run when user want to get tarball for patched sources after do_patch | 475 | # This function will run when user want to get tarball for patched sources after do_patch |
475 | python do_archive_patched_sources(){ | 476 | python do_archive_patched_sources(){ |
476 | archive_sources_patches(d,'patched') | 477 | archive_sources_patches(d,'patched') |
477 | } | 478 | } |
478 | 479 | ||
479 | # This function will run when user want to get tarball for configured sources after do_configure | 480 | # This function will run when user want to get tarball for configured sources after do_configure |
480 | python do_archive_configured_sources(){ | 481 | python do_archive_configured_sources(){ |
481 | archive_sources_patches(d,'configured') | 482 | archive_sources_patches(d,'configured') |
482 | } | 483 | } |
483 | 484 | ||
484 | # This function will run when user want to get tarball for logs or both logs and scripts(.bb and .inc files) | 485 | # This function will run when user want to get tarball for logs or both logs and scripts(.bb and .inc files) |
485 | python do_archive_scripts_logs(){ | 486 | python do_archive_scripts_logs(){ |
486 | archive_scripts_logs(d) | 487 | archive_scripts_logs(d) |
487 | } | 488 | } |
488 | 489 | ||
489 | # This function will run when user want to know what variable and functions in a running task are and also can get a diff file including | 490 | # This function will run when user want to know what variable and functions in a running task are and also can get a diff file including |
490 | # all content a package should include. | 491 | # all content a package should include. |
491 | python do_dumpdata_create_diff_gz(){ | 492 | python do_dumpdata_create_diff_gz(){ |
492 | dumpdata(d) | 493 | dumpdata(d) |
493 | create_diff_gz(d) | 494 | create_diff_gz(d) |
494 | } | 495 | } |
495 | 496 | ||
496 | # This functions prepare for archiving "linux-yocto" because this package create directory 's' before do_patch instead of after do_unpack. | 497 | # This functions prepare for archiving "linux-yocto" because this package create directory 's' before do_patch instead of after do_unpack. |
497 | # This is special control for archiving linux-yocto only. | 498 | # This is special control for archiving linux-yocto only. |
498 | python do_archive_linux_yocto(){ | 499 | python do_archive_linux_yocto(){ |
499 | s = d.getVar('S', True) | 500 | s = d.getVar('S', True) |
500 | if 'linux-yocto' in s: | 501 | if 'linux-yocto' in s: |
501 | source_tar_name = archive_sources(d,'') | 502 | source_tar_name = archive_sources(d,'') |
502 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | 503 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': |
503 | move_tarball_deploy(d,[source_tar_name,'']) | 504 | move_tarball_deploy(d,[source_tar_name,'']) |
504 | } | 505 | } |
505 | do_kernel_checkout[postfuncs] += "do_archive_linux_yocto " | 506 | do_kernel_checkout[postfuncs] += "do_archive_linux_yocto " |
506 | 507 | ||
507 | # remove tarball for sources, patches and logs after creating srpm. | 508 | # remove tarball for sources, patches and logs after creating srpm. |
508 | python do_remove_tarball(){ | 509 | python do_remove_tarball(){ |
509 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': | 510 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM': |
510 | work_dir = d.getVar('WORKDIR', True) | 511 | work_dir = d.getVar('WORKDIR', True) |
511 | try: | 512 | try: |
512 | for file in os.listdir(os.getcwd()): | 513 | for file in os.listdir(os.getcwd()): |
513 | if file in get_package(d): | 514 | if file in get_package(d): |
514 | os.remove(file) | 515 | os.remove(file) |
515 | os.remove(os.path.join(work_dir,'tar-package')) | 516 | os.remove(os.path.join(work_dir,'tar-package')) |
516 | except (TypeError,OSError): | 517 | except (TypeError,OSError): |
517 | pass | 518 | pass |
518 | } | 519 | } |
519 | do_remove_taball[deptask] = "do_archive_scripts_logs" | 520 | do_remove_taball[deptask] = "do_archive_scripts_logs" |
520 | do_package_write_rpm[postfuncs] += "do_remove_tarball " | 521 | do_package_write_rpm[postfuncs] += "do_remove_tarball " |
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index 9abad5e093..58a9aac779 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass | |||
@@ -1152,7 +1152,6 @@ python do_package_rpm () { | |||
1152 | d.setVarFlag('SBUILDSPEC', 'func', '1') | 1152 | d.setVarFlag('SBUILDSPEC', 'func', '1') |
1153 | bb.build.exec_func('SBUILDSPEC', d) | 1153 | bb.build.exec_func('SBUILDSPEC', d) |
1154 | 1154 | ||
1155 | |||
1156 | # Build the rpm package! | 1155 | # Build the rpm package! |
1157 | d.setVar('BUILDSPEC', cmd + "\n") | 1156 | d.setVar('BUILDSPEC', cmd + "\n") |
1158 | d.setVarFlag('BUILDSPEC', 'func', '1') | 1157 | d.setVarFlag('BUILDSPEC', 'func', '1') |