diff options
author | Robert Yang <liezhi.yang@windriver.com> | 2012-08-23 23:08:22 +0800 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2012-08-29 16:02:07 -0700 |
commit | 2ba95cc79e44bb2bf94d07b90cd305c3198fee68 (patch) | |
tree | fef8c42f2738b2a9a35259fc28d28c842993c9ea /meta/classes | |
parent | c96f6ee659cd4c20fd826f633e634ba0ef2f340b (diff) | |
download | poky-2ba95cc79e44bb2bf94d07b90cd305c3198fee68.tar.gz |
archiver.bbclass: fix the coding style
* Make the comment line under 80 characters.
* Use the triple double quotes for the docstring.
* Add a whitespace behind the comma (,).
* Other minor fixes.
[YOCTO #2619]
(From OE-Core rev: 885a95992abe11ebef7a8e4363e6002ee80403bf)
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Signed-off-by: Saul Wold <sgw@linux.intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/archive-configured-source.bbclass | 9 | ||||
-rw-r--r-- | meta/classes/archive-original-source.bbclass | 9 | ||||
-rw-r--r-- | meta/classes/archive-patched-source.bbclass | 9 | ||||
-rw-r--r-- | meta/classes/archiver.bbclass | 288 |
4 files changed, 190 insertions, 125 deletions
diff --git a/meta/classes/archive-configured-source.bbclass b/meta/classes/archive-configured-source.bbclass index 1a609b36db..1eaaf4cf25 100644 --- a/meta/classes/archive-configured-source.bbclass +++ b/meta/classes/archive-configured-source.bbclass | |||
@@ -1,6 +1,9 @@ | |||
1 | # This file is for getting archiving packages with configured sources(archive 's' after configure stage),logs(archive 'temp' after package_write_rpm),dump data | 1 | # This file is for getting archiving packages with configured |
2 | # and creating diff file(get all environment variables and functions in building and mapping all content in 's' including patches to xxx.diff.gz. | 2 | # sources(archive ${S} after configure stage), logs(archive 'temp' after |
3 | # All archived packages will be deployed in ${DEPLOY_DIR}/sources | 3 | # package_write_rpm), dump data and creating diff file(get all |
4 | # environment variables and functions in building and mapping all | ||
5 | # content in ${S} including patches to xxx.diff.gz. All archived | ||
6 | # packages will be deployed in ${DEPLOY_DIR}/sources | ||
4 | 7 | ||
5 | inherit archiver | 8 | inherit archiver |
6 | 9 | ||
diff --git a/meta/classes/archive-original-source.bbclass b/meta/classes/archive-original-source.bbclass index b08553365c..1b3f8d0b34 100644 --- a/meta/classes/archive-original-source.bbclass +++ b/meta/classes/archive-original-source.bbclass | |||
@@ -1,6 +1,9 @@ | |||
1 | # This file is for getting archiving packages with original sources(archive 's' after unpack stage),patches,logs(archive 'temp' after package_write_rpm),dump data and | 1 | # This file is for getting archiving packages with original |
2 | # creating diff file(get all environment variables and functions in building and mapping all content in 's' including patches to xxx.diff.gz. | 2 | # sources(archive ${S} after unpack stage), patches, logs(archive 'temp' |
3 | # All archived packages will be deployed in ${DEPLOY_DIR}/sources | 3 | # after package_write_rpm), dump data and creating diff file(get all |
4 | # environment variables and functions in building and mapping all | ||
5 | # content in ${S} including patches to xxx.diff.gz. All archived packages | ||
6 | # will be deployed in ${DEPLOY_DIR}/sources | ||
4 | 7 | ||
5 | inherit archiver | 8 | inherit archiver |
6 | 9 | ||
diff --git a/meta/classes/archive-patched-source.bbclass b/meta/classes/archive-patched-source.bbclass index a6d368f2ca..40b2dcb0ea 100644 --- a/meta/classes/archive-patched-source.bbclass +++ b/meta/classes/archive-patched-source.bbclass | |||
@@ -1,6 +1,9 @@ | |||
1 | # This file is for getting archiving packages with patched sources(archive 's' before do_patch stage),logs(archive 'temp' after package_write_rpm),dump data and | 1 | # This file is for getting archiving packages with patched |
2 | # creating diff file(get all environment variables and functions in building and mapping all content in 's' including patches to xxx.diff.gz. | 2 | # sources(archive ${S} before do_patch stage), logs(archive 'temp' after |
3 | # All archived packages will be deployed in ${DEPLOY_DIR}/sources | 3 | # package_write_rpm), dump data and creating diff file(get all |
4 | # environment variables and functions in building and mapping all | ||
5 | # content in ${S} including patches to xxx.diff.gz. All archived | ||
6 | # packages will be deployed in ${DEPLOY_DIR}/sources | ||
4 | 7 | ||
5 | inherit archiver | 8 | inherit archiver |
6 | 9 | ||
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index 117ad02da4..b01b0784cd 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass | |||
@@ -1,9 +1,9 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | 1 | # ex:ts=4:sw=4:sts=4:et |
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
3 | # | 3 | # |
4 | # This file is used for archiving sources ,patches,and logs to tarball. | 4 | # This file is used for archiving sources, patches, and logs to a |
5 | # It also output building environment to xxx.dump.data and create xxx.diff.gz to record | 5 | # tarball. It also output building environment to xxx.dump.data and |
6 | # all content in ${S} to a diff file. | 6 | # create xxx.diff.gz to record all content in ${S} to a diff file. |
7 | # | 7 | # |
8 | 8 | ||
9 | ARCHIVE_EXCLUDE_FROM ?= ".pc autom4te.cache" | 9 | ARCHIVE_EXCLUDE_FROM ?= ".pc autom4te.cache" |
@@ -11,11 +11,11 @@ ARCHIVE_TYPE ?= "TAR SRPM" | |||
11 | DISTRO ?= "poky" | 11 | DISTRO ?= "poky" |
12 | PATCHES_ARCHIVE_WITH_SERIES = 'TRUE' | 12 | PATCHES_ARCHIVE_WITH_SERIES = 'TRUE' |
13 | SOURCE_ARCHIVE_LOG_WITH_SCRIPTS ?= '${@d.getVarFlag('ARCHIVER_MODE', 'log_type') \ | 13 | SOURCE_ARCHIVE_LOG_WITH_SCRIPTS ?= '${@d.getVarFlag('ARCHIVER_MODE', 'log_type') \ |
14 | if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'logs_with_scripts'}' | 14 | if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'logs_with_scripts'}' |
15 | SOURCE_ARCHIVE_PACKAGE_TYPE ?= '${@d.getVarFlag('ARCHIVER_MODE','type') \ | 15 | SOURCE_ARCHIVE_PACKAGE_TYPE ?= '${@d.getVarFlag('ARCHIVER_MODE', 'type') \ |
16 | if d.getVarFlag('ARCHIVER_MODE', 'log_type')!= 'none' else 'tar'}' | 16 | if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'tar'}' |
17 | FILTER ?= '${@d.getVarFlag('ARCHIVER_MODE','filter') \ | 17 | FILTER ?= '${@d.getVarFlag('ARCHIVER_MODE', 'filter') \ |
18 | if d.getVarFlag('ARCHIVER_MODE', 'filter')!= 'none' else 'no'}' | 18 | if d.getVarFlag('ARCHIVER_MODE', 'filter')!= 'none' else 'no'}' |
19 | 19 | ||
20 | 20 | ||
21 | COPYLEFT_LICENSE_INCLUDE ?= 'GPL* LGPL*' | 21 | COPYLEFT_LICENSE_INCLUDE ?= 'GPL* LGPL*' |
@@ -44,7 +44,9 @@ def copyleft_recipe_type(d): | |||
44 | return 'target' | 44 | return 'target' |
45 | 45 | ||
46 | def copyleft_should_include(d): | 46 | def copyleft_should_include(d): |
47 | """Determine if this recipe's sources should be deployed for compliance""" | 47 | """ |
48 | Determine if this recipe's sources should be deployed for compliance | ||
49 | """ | ||
48 | import ast | 50 | import ast |
49 | import oe.license | 51 | import oe.license |
50 | from fnmatch import fnmatchcase as fnmatch | 52 | from fnmatch import fnmatchcase as fnmatch |
@@ -67,7 +69,11 @@ def copyleft_should_include(d): | |||
67 | return False, 'recipe has excluded licenses: %s' % ', '.join(reason) | 69 | return False, 'recipe has excluded licenses: %s' % ', '.join(reason) |
68 | 70 | ||
69 | def tar_filter(d): | 71 | def tar_filter(d): |
70 | """Only tarball the packages belonging to COPYLEFT_LICENSE_INCLUDE and miss packages in COPYLEFT_LICENSE_EXCLUDE. Don't tarball any packages when \"FILTER\" is \"no\"""" | 72 | """ |
73 | Only archive the package belongs to COPYLEFT_LICENSE_INCLUDE | ||
74 | and ignore the one in COPYLEFT_LICENSE_EXCLUDE. Don't exclude any | ||
75 | packages when \"FILTER\" is \"no\" | ||
76 | """ | ||
71 | if d.getVar('FILTER', True).upper() == "YES": | 77 | if d.getVar('FILTER', True).upper() == "YES": |
72 | included, reason = copyleft_should_include(d) | 78 | included, reason = copyleft_should_include(d) |
73 | if not included: | 79 | if not included: |
@@ -78,7 +84,9 @@ def tar_filter(d): | |||
78 | return False | 84 | return False |
79 | 85 | ||
80 | def get_bb_inc(d): | 86 | def get_bb_inc(d): |
81 | '''create a directory "script-logs" including .bb and .inc file in ${WORKDIR}''' | 87 | """ |
88 | create a directory "script-logs" including .bb and .inc file in ${WORKDIR} | ||
89 | """ | ||
82 | import re | 90 | import re |
83 | import shutil | 91 | import shutil |
84 | 92 | ||
@@ -87,26 +95,26 @@ def get_bb_inc(d): | |||
87 | work_dir = d.getVar('WORKDIR', True) | 95 | work_dir = d.getVar('WORKDIR', True) |
88 | bbfile = d.getVar('FILE', True) | 96 | bbfile = d.getVar('FILE', True) |
89 | bbdir = os.path.dirname(bbfile) | 97 | bbdir = os.path.dirname(bbfile) |
90 | script_logs = os.path.join(work_dir,'script-logs') | 98 | script_logs = os.path.join(work_dir, 'script-logs') |
91 | bb_inc = os.path.join(script_logs,'bb_inc') | 99 | bb_inc = os.path.join(script_logs, 'bb_inc') |
92 | bb.mkdirhier(script_logs) | 100 | bb.mkdirhier(script_logs) |
93 | bb.mkdirhier(bb_inc) | 101 | bb.mkdirhier(bb_inc) |
94 | 102 | ||
95 | def find_file(dir,file): | 103 | def find_file(dir, file): |
96 | for root, dirs, files in os.walk(dir): | 104 | for root, dirs, files in os.walk(dir): |
97 | if file in files: | 105 | if file in files: |
98 | return os.path.join(root,file) | 106 | return os.path.join(root, file) |
99 | 107 | ||
100 | def get_inc (file): | 108 | def get_inc (file): |
101 | f = open(file,'r') | 109 | f = open(file, 'r') |
102 | for line in f.readlines(): | 110 | for line in f.readlines(): |
103 | if 'require' not in line: | 111 | if 'require' not in line: |
104 | bbinc.append(file) | 112 | bbinc.append(file) |
105 | else: | 113 | else: |
106 | try: | 114 | try: |
107 | incfile = pat.match(line).group(1) | 115 | incfile = pat.match(line).group(1) |
108 | incfile = bb.data.expand(os.path.basename(incfile),d) | 116 | incfile = bb.data.expand(os.path.basename(incfile), d) |
109 | abs_incfile = find_file(bbdir,incfile) | 117 | abs_incfile = find_file(bbdir, incfile) |
110 | if abs_incfile: | 118 | if abs_incfile: |
111 | bbinc.append(abs_incfile) | 119 | bbinc.append(abs_incfile) |
112 | get_inc(abs_incfile) | 120 | get_inc(abs_incfile) |
@@ -115,23 +123,26 @@ def get_bb_inc(d): | |||
115 | get_inc(bbfile) | 123 | get_inc(bbfile) |
116 | bbinc = list(set(bbinc)) | 124 | bbinc = list(set(bbinc)) |
117 | for bbincfile in bbinc: | 125 | for bbincfile in bbinc: |
118 | shutil.copy(bbincfile,bb_inc) | 126 | shutil.copy(bbincfile, bb_inc) |
119 | 127 | ||
120 | try: | 128 | try: |
121 | bb.mkdirhier(os.path.join(script_logs,'temp')) | 129 | bb.mkdirhier(os.path.join(script_logs, 'temp')) |
122 | oe.path.copytree(os.path.join(work_dir,'temp'), os.path.join(script_logs,'temp')) | 130 | oe.path.copytree(os.path.join(work_dir, 'temp'), os.path.join(script_logs, 'temp')) |
123 | except (IOError,AttributeError): | 131 | except (IOError, AttributeError): |
124 | pass | 132 | pass |
125 | return script_logs | 133 | return script_logs |
126 | 134 | ||
127 | def get_series(d): | 135 | def get_series(d): |
128 | '''copy patches and series file to a pointed directory which will be archived to tarball in ${WORKDIR}''' | 136 | """ |
137 | copy patches and series file to a pointed directory which will be | ||
138 | archived to tarball in ${WORKDIR} | ||
139 | """ | ||
129 | import shutil | 140 | import shutil |
130 | 141 | ||
131 | src_patches=[] | 142 | src_patches=[] |
132 | pf = d.getVar('PF', True) | 143 | pf = d.getVar('PF', True) |
133 | work_dir = d.getVar('WORKDIR', True) | 144 | work_dir = d.getVar('WORKDIR', True) |
134 | s = d.getVar('S',True) | 145 | s = d.getVar('S', True) |
135 | dest = os.path.join(work_dir, pf + '-series') | 146 | dest = os.path.join(work_dir, pf + '-series') |
136 | shutil.rmtree(dest, ignore_errors=True) | 147 | shutil.rmtree(dest, ignore_errors=True) |
137 | bb.mkdirhier(dest) | 148 | bb.mkdirhier(dest) |
@@ -141,22 +152,25 @@ def get_series(d): | |||
141 | locals = (fetch.localpath(url) for url in fetch.urls) | 152 | locals = (fetch.localpath(url) for url in fetch.urls) |
142 | for local in locals: | 153 | for local in locals: |
143 | src_patches.append(local) | 154 | src_patches.append(local) |
144 | if not cmp(work_dir,s): | 155 | if not cmp(work_dir, s): |
145 | tmp_list = src_patches | 156 | tmp_list = src_patches |
146 | else: | 157 | else: |
147 | tmp_list = src_patches[1:] | 158 | tmp_list = src_patches[1:] |
148 | 159 | ||
149 | for patch in tmp_list: | 160 | for patch in tmp_list: |
150 | try: | 161 | try: |
151 | shutil.copy(patch,dest) | 162 | shutil.copy(patch, dest) |
152 | except IOError: | 163 | except IOError: |
153 | if os.path.isdir(patch): | 164 | if os.path.isdir(patch): |
154 | bb.mkdirhier(os.path.join(dest,patch)) | 165 | bb.mkdirhier(os.path.join(dest, patch)) |
155 | oe.path.copytree(patch, os.path.join(dest,patch)) | 166 | oe.path.copytree(patch, os.path.join(dest, patch)) |
156 | return dest | 167 | return dest |
157 | 168 | ||
158 | def get_applying_patches(d): | 169 | def get_applying_patches(d): |
159 | """only copy applying patches to a pointed directory which will be archived to tarball""" | 170 | """ |
171 | only copy applying patches to a pointed directory which will be | ||
172 | archived to tarball | ||
173 | """ | ||
160 | import shutil | 174 | import shutil |
161 | 175 | ||
162 | pf = d.getVar('PF', True) | 176 | pf = d.getVar('PF', True) |
@@ -169,35 +183,40 @@ def get_applying_patches(d): | |||
169 | for patch in patches: | 183 | for patch in patches: |
170 | _, _, local, _, _, parm = bb.decodeurl(patch) | 184 | _, _, local, _, _, parm = bb.decodeurl(patch) |
171 | if local: | 185 | if local: |
172 | shutil.copy(local,dest) | 186 | shutil.copy(local, dest) |
173 | return dest | 187 | return dest |
174 | 188 | ||
175 | def not_tarball(d): | 189 | def not_tarball(d): |
176 | '''packages including key words 'work-shared','native', 'task-' will be passed''' | 190 | """ |
177 | 191 | packages including key words 'work-shared', 'native', 'task-' will be passed | |
178 | workdir = d.getVar('WORKDIR',True) | 192 | """ |
179 | s = d.getVar('S',True) | 193 | workdir = d.getVar('WORKDIR', True) |
194 | s = d.getVar('S', True) | ||
180 | if 'work-shared' in s or 'task-' in workdir or 'native' in workdir: | 195 | if 'work-shared' in s or 'task-' in workdir or 'native' in workdir: |
181 | return True | 196 | return True |
182 | else: | 197 | else: |
183 | return False | 198 | return False |
184 | 199 | ||
185 | def get_source_from_downloads(d,stage_name): | 200 | def get_source_from_downloads(d, stage_name): |
186 | '''copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR''' | 201 | """ |
202 | copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR | ||
203 | """ | ||
187 | if stage_name in 'patched' 'configured': | 204 | if stage_name in 'patched' 'configured': |
188 | return | 205 | return |
189 | pf = d.getVar('PF', True) | 206 | pf = d.getVar('PF', True) |
190 | dl_dir = d.getVar('DL_DIR',True) | 207 | dl_dir = d.getVar('DL_DIR', True) |
191 | try: | 208 | try: |
192 | source = os.path.join(dl_dir,os.path.basename(d.getVar('SRC_URI', True).split()[0])) | 209 | source = os.path.join(dl_dir, os.path.basename(d.getVar('SRC_URI', True).split()[0])) |
193 | if os.path.exists(source) and not os.path.isdir(source): | 210 | if os.path.exists(source) and not os.path.isdir(source): |
194 | return source | 211 | return source |
195 | except (IndexError, OSError): | 212 | except (IndexError, OSError): |
196 | pass | 213 | pass |
197 | return '' | 214 | return '' |
198 | 215 | ||
199 | def do_tarball(workdir,srcdir,tarname): | 216 | def do_tarball(workdir, srcdir, tarname): |
200 | '''tar "srcdir" under "workdir" to "tarname"''' | 217 | """ |
218 | tar "srcdir" under "workdir" to "tarname" | ||
219 | """ | ||
201 | import tarfile | 220 | import tarfile |
202 | 221 | ||
203 | sav_dir = os.getcwd() | 222 | sav_dir = os.getcwd() |
@@ -211,54 +230,66 @@ def do_tarball(workdir,srcdir,tarname): | |||
211 | os.chdir(sav_dir) | 230 | os.chdir(sav_dir) |
212 | return tarname | 231 | return tarname |
213 | 232 | ||
214 | def archive_sources_from_directory(d,stage_name): | 233 | def archive_sources_from_directory(d, stage_name): |
215 | '''archive sources codes tree to tarball when tarball of $P doesn't exist in $DL_DIR''' | 234 | """ |
235 | archive sources codes tree to tarball when tarball of $P doesn't | ||
236 | exist in $DL_DIR | ||
237 | """ | ||
216 | import shutil | 238 | import shutil |
217 | 239 | ||
218 | s = d.getVar('S',True) | 240 | s = d.getVar('S', True) |
219 | work_dir=d.getVar('WORKDIR', True) | 241 | work_dir=d.getVar('WORKDIR', True) |
220 | PF = d.getVar('PF',True) | 242 | PF = d.getVar('PF', True) |
221 | tarname = PF + '-' + stage_name + ".tar.gz" | 243 | tarname = PF + '-' + stage_name + ".tar.gz" |
222 | 244 | ||
223 | if os.path.exists(s) and work_dir in s: | 245 | if os.path.exists(s) and work_dir in s: |
224 | try: | 246 | try: |
225 | source_dir = os.path.join(work_dir,[ i for i in s.replace(work_dir,'').split('/') if i][0]) | 247 | source_dir = os.path.join(work_dir, [ i for i in s.replace(work_dir, '').split('/') if i][0]) |
226 | except IndexError: | 248 | except IndexError: |
227 | if not cmp(s,work_dir): | 249 | if not cmp(s, work_dir): |
228 | return '' | 250 | return '' |
229 | else: | 251 | else: |
230 | return '' | 252 | return '' |
231 | source = os.path.basename(source_dir) | 253 | source = os.path.basename(source_dir) |
232 | return do_tarball(work_dir,source,tarname) | 254 | return do_tarball(work_dir, source, tarname) |
233 | 255 | ||
234 | def archive_sources(d,stage_name): | 256 | def archive_sources(d, stage_name): |
235 | '''copy tarball from $DL_DIR to $WORKDIR if have tarball, archive source codes tree in $WORKDIR if $P is directory instead of tarball''' | 257 | """ |
258 | copy tarball from $DL_DIR to $WORKDIR if have tarball, archive | ||
259 | source codes tree in $WORKDIR if $P is directory instead of tarball | ||
260 | """ | ||
236 | import shutil | 261 | import shutil |
237 | work_dir = d.getVar('WORKDIR',True) | 262 | |
238 | file = get_source_from_downloads(d,stage_name) | 263 | work_dir = d.getVar('WORKDIR', True) |
264 | file = get_source_from_downloads(d, stage_name) | ||
239 | if file: | 265 | if file: |
240 | shutil.copy(file,work_dir) | 266 | shutil.copy(file, work_dir) |
241 | file = os.path.basename(file) | 267 | file = os.path.basename(file) |
242 | else: | 268 | else: |
243 | file = archive_sources_from_directory(d,stage_name) | 269 | file = archive_sources_from_directory(d, stage_name) |
244 | return file | 270 | return file |
245 | 271 | ||
246 | def archive_patches(d,patchdir,series): | 272 | def archive_patches(d, patchdir, series): |
247 | '''archive patches to tarball and also include series files if 'series' is True''' | 273 | """ |
274 | archive patches to tarball and also include series files if 'series' is True | ||
275 | """ | ||
248 | import shutil | 276 | import shutil |
249 | 277 | ||
250 | s = d.getVar('S',True) | 278 | s = d.getVar('S', True) |
251 | work_dir = d.getVar('WORKDIR', True) | 279 | work_dir = d.getVar('WORKDIR', True) |
252 | patch_dir = os.path.basename(patchdir) | 280 | patch_dir = os.path.basename(patchdir) |
253 | tarname = patch_dir + ".tar.gz" | 281 | tarname = patch_dir + ".tar.gz" |
254 | if series == 'all' and os.path.exists(os.path.join(s,'patches/series')): | 282 | if series == 'all' and os.path.exists(os.path.join(s, 'patches/series')): |
255 | shutil.copy(os.path.join(s,'patches/series'),patchdir) | 283 | shutil.copy(os.path.join(s, 'patches/series'), patchdir) |
256 | tarname = do_tarball(work_dir,patch_dir,tarname) | 284 | tarname = do_tarball(work_dir, patch_dir, tarname) |
257 | shutil.rmtree(patchdir, ignore_errors=True) | 285 | shutil.rmtree(patchdir, ignore_errors=True) |
258 | return tarname | 286 | return tarname |
259 | 287 | ||
260 | def select_archive_patches(d,option): | 288 | def select_archive_patches(d, option): |
261 | '''select to archive all patches including non-applying and series or applying patches ''' | 289 | """ |
290 | select to archive all patches including non-applying and series or | ||
291 | applying patches | ||
292 | """ | ||
262 | if option == "all": | 293 | if option == "all": |
263 | patchdir = get_series(d) | 294 | patchdir = get_series(d) |
264 | elif option == "applying": | 295 | elif option == "applying": |
@@ -266,25 +297,27 @@ def select_archive_patches(d,option): | |||
266 | try: | 297 | try: |
267 | os.rmdir(patchdir) | 298 | os.rmdir(patchdir) |
268 | except OSError: | 299 | except OSError: |
269 | tarpatch = archive_patches(d,patchdir,option) | 300 | tarpatch = archive_patches(d, patchdir, option) |
270 | return tarpatch | 301 | return tarpatch |
271 | return | 302 | return |
272 | 303 | ||
273 | def archive_logs(d,logdir,bbinc=False): | 304 | def archive_logs(d, logdir, bbinc=False): |
274 | '''archive logs in temp to tarball and .bb and .inc files if bbinc is True ''' | 305 | """ |
306 | archive logs in temp to tarball and .bb and .inc files if bbinc is True | ||
307 | """ | ||
275 | import shutil | 308 | import shutil |
276 | 309 | ||
277 | pf = d.getVar('PF',True) | 310 | pf = d.getVar('PF', True) |
278 | work_dir = d.getVar('WORKDIR',True) | 311 | work_dir = d.getVar('WORKDIR', True) |
279 | log_dir = os.path.basename(logdir) | 312 | log_dir = os.path.basename(logdir) |
280 | tarname = pf + '-' + log_dir + ".tar.gz" | 313 | tarname = pf + '-' + log_dir + ".tar.gz" |
281 | tarname = do_tarball(work_dir,log_dir,tarname) | 314 | tarname = do_tarball(work_dir, log_dir, tarname) |
282 | if bbinc: | 315 | if bbinc: |
283 | shutil.rmtree(logdir, ignore_errors=True) | 316 | shutil.rmtree(logdir, ignore_errors=True) |
284 | return tarname | 317 | return tarname |
285 | 318 | ||
286 | def get_licenses(d): | 319 | def get_licenses(d): |
287 | '''get licenses for running .bb file''' | 320 | """get licenses for running .bb file""" |
288 | import oe.license | 321 | import oe.license |
289 | 322 | ||
290 | licenses_type = d.getVar('LICENSE', True) or "" | 323 | licenses_type = d.getVar('LICENSE', True) or "" |
@@ -299,8 +332,8 @@ def get_licenses(d): | |||
299 | return lice | 332 | return lice |
300 | 333 | ||
301 | 334 | ||
302 | def move_tarball_deploy(d,tarball_list): | 335 | def move_tarball_deploy(d, tarball_list): |
303 | '''move tarball in location to ${DEPLOY_DIR}/sources''' | 336 | """move tarball in location to ${DEPLOY_DIR}/sources""" |
304 | import shutil | 337 | import shutil |
305 | 338 | ||
306 | if tarball_list is []: | 339 | if tarball_list is []: |
@@ -308,100 +341,114 @@ def move_tarball_deploy(d,tarball_list): | |||
308 | target_sys = d.getVar('TARGET_SYS', True) | 341 | target_sys = d.getVar('TARGET_SYS', True) |
309 | pf = d.getVar('PF', True) | 342 | pf = d.getVar('PF', True) |
310 | licenses = get_licenses(d) | 343 | licenses = get_licenses(d) |
311 | work_dir = d.getVar('WORKDIR',True) | 344 | work_dir = d.getVar('WORKDIR', True) |
312 | tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | 345 | tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf |
313 | if not os.path.exists(tar_sources): | 346 | if not os.path.exists(tar_sources): |
314 | bb.mkdirhier(tar_sources) | 347 | bb.mkdirhier(tar_sources) |
315 | for source in tarball_list: | 348 | for source in tarball_list: |
316 | if source: | 349 | if source: |
317 | if os.path.exists(os.path.join(tar_sources, source)): | 350 | if os.path.exists(os.path.join(tar_sources, source)): |
318 | os.remove(os.path.join(tar_sources,source)) | 351 | os.remove(os.path.join(tar_sources, source)) |
319 | shutil.move(os.path.join(work_dir,source),tar_sources) | 352 | shutil.move(os.path.join(work_dir, source), tar_sources) |
320 | 353 | ||
321 | def check_archiving_type(d): | 354 | def check_archiving_type(d): |
322 | '''check the type for archiving package('tar' or 'srpm')''' | 355 | """check the type for archiving package('tar' or 'srpm')""" |
323 | try: | 356 | try: |
324 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in d.getVar('ARCHIVE_TYPE', True).split(): | 357 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in d.getVar('ARCHIVE_TYPE', True).split(): |
325 | raise AttributeError | 358 | raise AttributeError |
326 | except AttributeError: | 359 | except AttributeError: |
327 | bb.fatal("\"SOURCE_ARCHIVE_PACKAGE_TYPE\" is \'tar\' or \'srpm\', no other types") | 360 | bb.fatal("\"SOURCE_ARCHIVE_PACKAGE_TYPE\" is \'tar\' or \'srpm\', no other types") |
328 | 361 | ||
329 | def store_package(d,package_name): | 362 | def store_package(d, package_name): |
330 | '''store tarbablls name to file "tar-package"''' | 363 | """ |
364 | store tarbablls name to file "tar-package" | ||
365 | """ | ||
331 | try: | 366 | try: |
332 | f = open(os.path.join(d.getVar('WORKDIR',True),'tar-package'),'a') | 367 | f = open(os.path.join(d.getVar('WORKDIR', True), 'tar-package'), 'a') |
333 | f.write(package_name + ' ') | 368 | f.write(package_name + ' ') |
334 | f.close() | 369 | f.close() |
335 | except IOError: | 370 | except IOError: |
336 | pass | 371 | pass |
337 | 372 | ||
338 | def get_package(d): | 373 | def get_package(d): |
339 | '''get tarballs name from "tar-package"''' | 374 | """ |
375 | get tarballs name from "tar-package" | ||
376 | """ | ||
340 | work_dir = (d.getVar('WORKDIR', True)) | 377 | work_dir = (d.getVar('WORKDIR', True)) |
341 | tarpackage = os.path.join(work_dir,'tar-package') | 378 | tarpackage = os.path.join(work_dir, 'tar-package') |
342 | try: | 379 | try: |
343 | f = open(tarpackage,'r') | 380 | f = open(tarpackage, 'r') |
344 | line = list(set(f.readline().replace('\n','').split())) | 381 | line = list(set(f.readline().replace('\n', '').split())) |
345 | except UnboundLocalError,IOError: | 382 | except UnboundLocalError, IOError: |
346 | pass | 383 | pass |
347 | f.close() | 384 | f.close() |
348 | return line | 385 | return line |
349 | 386 | ||
350 | 387 | ||
351 | def archive_sources_patches(d,stage_name): | 388 | def archive_sources_patches(d, stage_name): |
352 | '''archive sources and patches to tarball. stage_name will append strings ${stage_name} to ${PR} as middle name. for example, zlib-1.4.6-prepatch(stage_name).tar.gz ''' | 389 | """ |
390 | archive sources and patches to tarball. stage_name will append | ||
391 | strings ${stage_name} to ${PR} as middle name. for example, | ||
392 | zlib-1.4.6-prepatch(stage_name).tar.gz | ||
393 | """ | ||
353 | import shutil | 394 | import shutil |
354 | 395 | ||
355 | check_archiving_type(d) | 396 | check_archiving_type(d) |
356 | if not_tarball(d) or tar_filter(d): | 397 | if not_tarball(d) or tar_filter(d): |
357 | return | 398 | return |
358 | 399 | ||
359 | source_tar_name = archive_sources(d,stage_name) | 400 | source_tar_name = archive_sources(d, stage_name) |
360 | if stage_name == "prepatch": | 401 | if stage_name == "prepatch": |
361 | if d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'TRUE': | 402 | if d.getVar('PATCHES_ARCHIVE_WITH_SERIES', True).upper() == 'TRUE': |
362 | patch_tar_name = select_archive_patches(d,"all") | 403 | patch_tar_name = select_archive_patches(d, "all") |
363 | elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES',True).upper() == 'FALSE': | 404 | elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES', True).upper() == 'FALSE': |
364 | patch_tar_name = select_archive_patches(d,"applying") | 405 | patch_tar_name = select_archive_patches(d, "applying") |
365 | else: | 406 | else: |
366 | bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' is strings 'True' or 'False' ") | 407 | bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' is strings 'True' or 'False' ") |
367 | else: | 408 | else: |
368 | patch_tar_name = '' | 409 | patch_tar_name = '' |
369 | 410 | ||
370 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | 411 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': |
371 | move_tarball_deploy(d,[source_tar_name,patch_tar_name]) | 412 | move_tarball_deploy(d, [source_tar_name, patch_tar_name]) |
372 | else: | 413 | else: |
373 | tarpackage = os.path.join(d.getVar('WORKDIR', True),'tar-package') | 414 | tarpackage = os.path.join(d.getVar('WORKDIR', True), 'tar-package') |
374 | if os.path.exists(tarpackage): | 415 | if os.path.exists(tarpackage): |
375 | os.remove(tarpackage) | 416 | os.remove(tarpackage) |
376 | for package in os.path.basename(source_tar_name), patch_tar_name: | 417 | for package in os.path.basename(source_tar_name), patch_tar_name: |
377 | if package: | 418 | if package: |
378 | store_package(d,str(package) + ' ') | 419 | store_package(d, str(package) + ' ') |
379 | 420 | ||
380 | def archive_scripts_logs(d): | 421 | def archive_scripts_logs(d): |
381 | '''archive scripts and logs. scripts include .bb and .inc files and logs include stuff in "temp".''' | 422 | """ |
423 | archive scripts and logs. scripts include .bb and .inc files and | ||
424 | logs include stuff in "temp". | ||
425 | """ | ||
382 | 426 | ||
383 | if tar_filter(d): | 427 | if tar_filter(d): |
384 | return | 428 | return |
385 | work_dir = d.getVar('WORKDIR', True) | 429 | work_dir = d.getVar('WORKDIR', True) |
386 | temp_dir = os.path.join(work_dir,'temp') | 430 | temp_dir = os.path.join(work_dir, 'temp') |
387 | source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True) | 431 | source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True) |
388 | if source_archive_log_with_scripts == 'logs_with_scripts': | 432 | if source_archive_log_with_scripts == 'logs_with_scripts': |
389 | logdir = get_bb_inc(d) | 433 | logdir = get_bb_inc(d) |
390 | tarlog = archive_logs(d,logdir,True) | 434 | tarlog = archive_logs(d, logdir, True) |
391 | elif source_archive_log_with_scripts == 'logs': | 435 | elif source_archive_log_with_scripts == 'logs': |
392 | if os.path.exists(temp_dir): | 436 | if os.path.exists(temp_dir): |
393 | tarlog = archive_logs(d,temp_dir,False) | 437 | tarlog = archive_logs(d, temp_dir, False) |
394 | else: | 438 | else: |
395 | return | 439 | return |
396 | 440 | ||
397 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | 441 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': |
398 | move_tarball_deploy(d,[tarlog]) | 442 | move_tarball_deploy(d, [tarlog]) |
399 | 443 | ||
400 | else: | 444 | else: |
401 | store_package(d,tarlog) | 445 | store_package(d, tarlog) |
402 | 446 | ||
403 | def dumpdata(d): | 447 | def dumpdata(d): |
404 | '''dump environment to "${P}-${PR}.showdata.dump" including all kinds of variables and functions when running a task''' | 448 | """ |
449 | dump environment to "${P}-${PR}.showdata.dump" including all | ||
450 | kinds of variables and functions when running a task | ||
451 | """ | ||
405 | 452 | ||
406 | if tar_filter(d): | 453 | if tar_filter(d): |
407 | return | 454 | return |
@@ -415,20 +462,23 @@ def dumpdata(d): | |||
415 | if not os.path.exists(dumpdir): | 462 | if not os.path.exists(dumpdir): |
416 | bb.mkdirhier(dumpdir) | 463 | bb.mkdirhier(dumpdir) |
417 | 464 | ||
418 | dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump",d)) | 465 | dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump", d)) |
419 | 466 | ||
420 | bb.note("Dumping metadata into '%s'" % dumpfile) | 467 | bb.note("Dumping metadata into '%s'" % dumpfile) |
421 | f = open(dumpfile, "w") | 468 | f = open(dumpfile, "w") |
422 | # emit variables and shell functions | 469 | # emit variables and shell functions |
423 | bb.data.emit_env(f, d, True) | 470 | bb.data.emit_env(f, d, True) |
424 | # emit the metadata which isnt valid shell | 471 | # emit the metadata which isn't valid shell |
425 | for e in d.keys(): | 472 | for e in d.keys(): |
426 | if bb.data.getVarFlag(e, 'python', d): | 473 | if bb.data.getVarFlag(e, 'python', d): |
427 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | 474 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) |
428 | f.close() | 475 | f.close() |
429 | 476 | ||
430 | def create_diff_gz(d): | 477 | def create_diff_gz(d): |
431 | '''creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.g gz for mapping all content in 's' including patches to xxx.diff.gz''' | 478 | """ |
479 | creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.g gz for | ||
480 | mapping all content in 's' including patches to xxx.diff.gz | ||
481 | """ | ||
432 | import shutil | 482 | import shutil |
433 | import subprocess | 483 | import subprocess |
434 | 484 | ||
@@ -467,41 +517,47 @@ def create_diff_gz(d): | |||
467 | bb.build.exec_func('DIFF', d) | 517 | bb.build.exec_func('DIFF', d) |
468 | shutil.rmtree(s + '.org', ignore_errors=True) | 518 | shutil.rmtree(s + '.org', ignore_errors=True) |
469 | 519 | ||
470 | # This function will run when user want to get tarball for sources and patches after do_unpack | 520 | # This function will run when user want to get tarball for sources and |
521 | # patches after do_unpack | ||
471 | python do_archive_original_sources_patches(){ | 522 | python do_archive_original_sources_patches(){ |
472 | archive_sources_patches(d,'prepatch') | 523 | archive_sources_patches(d, 'prepatch') |
473 | } | 524 | } |
474 | 525 | ||
475 | # This function will run when user want to get tarball for patched sources after do_patch | 526 | # This function will run when user want to get tarball for patched |
527 | # sources after do_patch | ||
476 | python do_archive_patched_sources(){ | 528 | python do_archive_patched_sources(){ |
477 | archive_sources_patches(d,'patched') | 529 | archive_sources_patches(d, 'patched') |
478 | } | 530 | } |
479 | 531 | ||
480 | # This function will run when user want to get tarball for configured sources after do_configure | 532 | # This function will run when user want to get tarball for configured |
533 | # sources after do_configure | ||
481 | python do_archive_configured_sources(){ | 534 | python do_archive_configured_sources(){ |
482 | archive_sources_patches(d,'configured') | 535 | archive_sources_patches(d, 'configured') |
483 | } | 536 | } |
484 | 537 | ||
485 | # This function will run when user want to get tarball for logs or both logs and scripts(.bb and .inc files) | 538 | # This function will run when user want to get tarball for logs or both |
539 | # logs and scripts(.bb and .inc files) | ||
486 | python do_archive_scripts_logs(){ | 540 | python do_archive_scripts_logs(){ |
487 | archive_scripts_logs(d) | 541 | archive_scripts_logs(d) |
488 | } | 542 | } |
489 | 543 | ||
490 | # This function will run when user want to know what variable and functions in a running task are and also can get a diff file including | 544 | # This function will run when user want to know what variable and |
545 | # functions in a running task are and also can get a diff file including | ||
491 | # all content a package should include. | 546 | # all content a package should include. |
492 | python do_dumpdata_create_diff_gz(){ | 547 | python do_dumpdata_create_diff_gz(){ |
493 | dumpdata(d) | 548 | dumpdata(d) |
494 | create_diff_gz(d) | 549 | create_diff_gz(d) |
495 | } | 550 | } |
496 | 551 | ||
497 | # This functions prepare for archiving "linux-yocto" because this package create directory 's' before do_patch instead of after do_unpack. | 552 | # This functions prepare for archiving "linux-yocto" because this |
498 | # This is special control for archiving linux-yocto only. | 553 | # package create directory 's' before do_patch instead of after |
554 | # do_unpack. This is special control for archiving linux-yocto only. | ||
499 | python do_archive_linux_yocto(){ | 555 | python do_archive_linux_yocto(){ |
500 | s = d.getVar('S', True) | 556 | s = d.getVar('S', True) |
501 | if 'linux-yocto' in s: | 557 | if 'linux-yocto' in s: |
502 | source_tar_name = archive_sources(d,'') | 558 | source_tar_name = archive_sources(d, '') |
503 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': | 559 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM': |
504 | move_tarball_deploy(d,[source_tar_name,'']) | 560 | move_tarball_deploy(d, [source_tar_name, '']) |
505 | } | 561 | } |
506 | do_kernel_checkout[postfuncs] += "do_archive_linux_yocto " | 562 | do_kernel_checkout[postfuncs] += "do_archive_linux_yocto " |
507 | 563 | ||
@@ -513,8 +569,8 @@ python do_remove_tarball(){ | |||
513 | for file in os.listdir(os.getcwd()): | 569 | for file in os.listdir(os.getcwd()): |
514 | if file in get_package(d): | 570 | if file in get_package(d): |
515 | os.remove(file) | 571 | os.remove(file) |
516 | os.remove(os.path.join(work_dir,'tar-package')) | 572 | os.remove(os.path.join(work_dir, 'tar-package')) |
517 | except (TypeError,OSError): | 573 | except (TypeError, OSError): |
518 | pass | 574 | pass |
519 | } | 575 | } |
520 | do_remove_taball[deptask] = "do_archive_scripts_logs" | 576 | do_remove_taball[deptask] = "do_archive_scripts_logs" |