diff options
-rw-r--r-- | meta/classes/archiver.bbclass | 829 |
1 files changed, 275 insertions, 554 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index 43373ae4f1..9d4b158a4c 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass | |||
@@ -1,20 +1,38 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | 1 | # ex:ts=4:sw=4:sts=4:et |
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
3 | # | 3 | # |
4 | # This file is used for archiving sources, patches, and logs to a | 4 | # This bbclass is used for creating archive for: |
5 | # tarball. It also output building environment to xxx.dump.data and | 5 | # 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" |
6 | # create xxx.diff.gz to record all content in ${S} to a diff file. | 6 | # 2) patched source: ARCHIVER_MODE[src] = "patched" (default) |
7 | # 3) configured source: ARCHIVER_MODE[src] = "configured" | ||
8 | # 4) The patches between do_unpack and do_patch: | ||
9 | # ARCHIVER_MODE[diff] = "1" | ||
10 | # And you can set the one that you'd like to exclude from the diff: | ||
11 | # ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | ||
12 | # 5) The environment data, similar to 'bitbake -e recipe': | ||
13 | # ARCHIVER_MODE[dumpdata] = "1" | ||
14 | # 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" | ||
7 | # | 15 | # |
16 | # All of the above can be packed into a .src.rpm package: (when PACKAGES != "") | ||
17 | # ARCHIVER_MODE[srpm] = "1" | ||
8 | 18 | ||
9 | ARCHIVE_EXCLUDE_FROM ?= ".pc autom4te.cache" | 19 | ARCHIVER_MODE[srpm] ?= "0" |
10 | ARCHIVE_TYPE ?= "tar srpm" | 20 | ARCHIVER_MODE[src] ?= "patched" |
11 | PATCHES_ARCHIVE_WITH_SERIES = 'yes' | 21 | ARCHIVER_MODE[diff] ?= "0" |
12 | SOURCE_ARCHIVE_LOG_WITH_SCRIPTS ?= '${@d.getVarFlag('ARCHIVER_MODE', 'log_type') \ | 22 | ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" |
13 | if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'logs_with_scripts'}' | 23 | ARCHIVER_MODE[dumpdata] ?= "0" |
14 | SOURCE_ARCHIVE_PACKAGE_TYPE ?= '${@d.getVarFlag('ARCHIVER_MODE', 'type') \ | 24 | ARCHIVER_MODE[recipe] ?= "0" |
15 | if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'tar'}' | 25 | |
16 | FILTER ?= '${@d.getVarFlag('ARCHIVER_MODE', 'filter') \ | 26 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" |
17 | if d.getVarFlag('ARCHIVER_MODE', 'filter')!= 'none' else 'no'}' | 27 | ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources" |
28 | ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" | ||
29 | ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" | ||
30 | |||
31 | do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" | ||
32 | do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" | ||
33 | do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" | ||
34 | |||
35 | # This is a convenience for the shell script to use it | ||
18 | 36 | ||
19 | 37 | ||
20 | COPYLEFT_LICENSE_INCLUDE ?= 'GPL* LGPL*' | 38 | COPYLEFT_LICENSE_INCLUDE ?= 'GPL* LGPL*' |
@@ -23,7 +41,7 @@ COPYLEFT_LICENSE_INCLUDE[doc] = 'Space separated list of globs which include lic | |||
23 | 41 | ||
24 | COPYLEFT_LICENSE_EXCLUDE ?= 'CLOSED Proprietary' | 42 | COPYLEFT_LICENSE_EXCLUDE ?= 'CLOSED Proprietary' |
25 | COPYLEFT_LICENSE_EXCLUDE[type] = 'list' | 43 | COPYLEFT_LICENSE_EXCLUDE[type] = 'list' |
26 | COPYLEFT_LICENSE_INCLUDE[doc] = 'Space separated list of globs which exclude licenses' | 44 | COPYLEFT_LICENSE_EXCLUDE[doc] = 'Space separated list of globs which exclude licenses' |
27 | 45 | ||
28 | COPYLEFT_RECIPE_TYPE ?= '${@copyleft_recipe_type(d)}' | 46 | COPYLEFT_RECIPE_TYPE ?= '${@copyleft_recipe_type(d)}' |
29 | COPYLEFT_RECIPE_TYPE[doc] = 'The "type" of the current recipe (e.g. target, native, cross)' | 47 | COPYLEFT_RECIPE_TYPE[doc] = 'The "type" of the current recipe (e.g. target, native, cross)' |
@@ -38,593 +56,296 @@ COPYLEFT_AVAILABLE_RECIPE_TYPES[doc] = 'Space separated list of available recipe | |||
38 | 56 | ||
39 | python () { | 57 | python () { |
40 | pn = d.getVar('PN', True) | 58 | pn = d.getVar('PN', True) |
41 | packaging = d.getVar('IMAGE_PKGTYPE', True) | ||
42 | 59 | ||
43 | if tar_filter(d): | 60 | ar_src = d.getVarFlag('ARCHIVER_MODE', 'src', True) |
44 | return | 61 | ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata', True) |
45 | 62 | ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe', True) | |
46 | if d.getVar('PACKAGES', True) != '': | 63 | |
47 | d.appendVarFlag('do_dumpdata_create_diff_gz', 'depends', ' %s:do_package_write_%s' % (pn, packaging)) | 64 | if ar_src == "original": |
65 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) | ||
66 | elif ar_src == "patched": | ||
67 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) | ||
68 | elif ar_src == "configured": | ||
69 | # We can't use "addtask do_ar_configured after do_configure" since it | ||
70 | # will cause the deptask of do_populate_sysroot to run not matter what | ||
71 | # archives we need, so we add the depends here. | ||
72 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) | ||
73 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) | ||
74 | elif ar_src: | ||
75 | bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) | ||
76 | |||
77 | if ar_dumpdata == "1": | ||
78 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) | ||
79 | |||
80 | if ar_recipe == "1": | ||
81 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) | ||
82 | |||
83 | # Output the srpm package | ||
84 | ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) | ||
85 | if ar_srpm == "1": | ||
86 | if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': | ||
87 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) | ||
88 | if ar_dumpdata == "1": | ||
89 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) | ||
90 | if ar_recipe == "1": | ||
91 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) | ||
92 | if ar_src == "original": | ||
93 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) | ||
94 | elif ar_src == "patched": | ||
95 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) | ||
96 | elif ar_src == "configured": | ||
97 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) | ||
98 | |||
99 | # The gcc staff uses shared source | ||
100 | flag = d.getVarFlag("do_unpack", "stamp-base", True) | ||
101 | if flag: | ||
102 | if ar_src in [ 'original', 'patched' ]: | ||
103 | ar_outdir = os.path.join(d.getVar('ARCHIVER_TOPDIR', True), 'work-shared') | ||
104 | d.setVar('ARCHIVER_OUTDIR', ar_outdir) | ||
105 | d.setVarFlag('do_ar_original', 'stamp-base', flag) | ||
106 | d.setVarFlag('do_ar_patched', 'stamp-base', flag) | ||
107 | d.setVarFlag('do_unpack_and_patch', 'stamp-base', flag) | ||
108 | d.setVarFlag('do_ar_original', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') | ||
109 | d.setVarFlag('do_unpack_and_patch', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') | ||
110 | d.setVarFlag('do_ar_patched', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR') | ||
111 | d.setVarFlag('create_diff_gz', 'vardepsexclude', 'PF') | ||
112 | d.setVarFlag('create_tarball', 'vardepsexclude', 'PF') | ||
113 | |||
114 | flag_clean = d.getVarFlag('do_unpack', 'stamp-base-clean', True) | ||
115 | if flag_clean: | ||
116 | d.setVarFlag('do_ar_original', 'stamp-base-clean', flag_clean) | ||
117 | d.setVarFlag('do_ar_patched', 'stamp-base-clean', flag_clean) | ||
118 | d.setVarFlag('do_unpack_and_patch', 'stamp-base-clean', flag_clean) | ||
119 | } | ||
48 | 120 | ||
49 | build_deps = ' %s:do_dumpdata_create_diff_gz' % pn | 121 | # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. |
122 | # Files in SRC_URI are copied directly, anything that's a directory | ||
123 | # (e.g. git repositories) is "unpacked" and then put into a tarball. | ||
124 | python do_ar_original() { | ||
50 | 125 | ||
51 | if d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True) == 'logs_with_scripts': | 126 | import shutil, tarfile, tempfile |
52 | d.appendVarFlag('do_archive_scripts_logs', 'depends', ' %s:do_package_write_%s' % (pn, packaging)) | ||
53 | build_deps += ' %s:do_archive_scripts_logs' % pn | ||
54 | 127 | ||
55 | if not not_tarball(d): | 128 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": |
56 | archiver_mode = d.getVar('ARCHIVER_MODE') | 129 | return |
57 | d.appendVarFlag('do_compile', 'depends', ' %s:do_archive_%s_sources' % (pn, archiver_mode)) | ||
58 | build_deps += ' %s:do_archive_%s_sources' % (pn, archiver_mode) | ||
59 | 130 | ||
60 | if bb.data.inherits_class('image', d): | 131 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) |
61 | d.appendVarFlag('do_rootfs', 'depends', build_deps) | 132 | bb.note('Archiving the original source...') |
62 | else: | 133 | fetch = bb.fetch2.Fetch([], d) |
63 | d.appendVarFlag('do_build', 'depends', build_deps) | 134 | for url in fetch.urls: |
135 | local = fetch.localpath(url) | ||
136 | if os.path.isfile(local): | ||
137 | shutil.copy(local, ar_outdir) | ||
138 | elif os.path.isdir(local): | ||
139 | basename = os.path.basename(local) | ||
140 | |||
141 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) | ||
142 | fetch.unpack(tmpdir, (url,)) | ||
143 | |||
144 | os.chdir(tmpdir) | ||
145 | tarname = os.path.join(ar_outdir, basename + '.tar.gz') | ||
146 | tar = tarfile.open(tarname, 'w:gz') | ||
147 | tar.add('.') | ||
148 | tar.close() | ||
64 | } | 149 | } |
65 | 150 | ||
66 | def copyleft_recipe_type(d): | 151 | python do_ar_patched() { |
67 | for recipe_type in oe.data.typed_value('COPYLEFT_AVAILABLE_RECIPE_TYPES', d): | ||
68 | if oe.utils.inherits(d, recipe_type): | ||
69 | return recipe_type | ||
70 | return 'target' | ||
71 | |||
72 | def copyleft_should_include(d): | ||
73 | """ | ||
74 | Determine if this recipe's sources should be deployed for compliance | ||
75 | """ | ||
76 | import ast | ||
77 | import oe.license | ||
78 | from fnmatch import fnmatchcase as fnmatch | ||
79 | 152 | ||
80 | recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE', True) | 153 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) != 'patched': |
81 | if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d): | 154 | return |
82 | return False, 'recipe type "%s" is excluded' % recipe_type | ||
83 | |||
84 | include = oe.data.typed_value('COPYLEFT_LICENSE_INCLUDE', d) | ||
85 | exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d) | ||
86 | 155 | ||
87 | try: | 156 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR |
88 | is_included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude) | 157 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) |
89 | except oe.license.LicenseError as exc: | 158 | bb.note('Archiving the patched source...') |
90 | bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) | 159 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) |
91 | else: | 160 | # The gcc staff uses shared source |
92 | if is_included: | 161 | flag = d.getVarFlag('do_unpack', 'stamp-base', True) |
93 | if reason: | 162 | if flag: |
94 | return True, 'recipe has included licenses: %s' % ', '.join(reason) | 163 | create_tarball(d, d.getVar('S', True), 'patched', ar_outdir, 'gcc') |
95 | else: | ||
96 | return False, 'recipe does not include a copyleft license' | ||
97 | else: | ||
98 | return False, 'recipe has excluded licenses: %s' % ', '.join(reason) | ||
99 | |||
100 | def tar_filter(d): | ||
101 | """ | ||
102 | Only archive the package belongs to COPYLEFT_LICENSE_INCLUDE | ||
103 | and ignore the one in COPYLEFT_LICENSE_EXCLUDE. Don't exclude any | ||
104 | packages when \"FILTER\" is \"no\" | ||
105 | """ | ||
106 | if d.getVar('FILTER', True) == "yes": | ||
107 | included, reason = copyleft_should_include(d) | ||
108 | return not included | ||
109 | else: | 164 | else: |
110 | return False | 165 | create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) |
111 | 166 | } | |
112 | def get_bb_inc(d): | ||
113 | """ | ||
114 | create a directory "script-logs" including .bb and .inc file in ${WORKDIR} | ||
115 | """ | ||
116 | import re | ||
117 | import shutil | ||
118 | |||
119 | bbinc = [] | ||
120 | pat=re.compile('require\s*([^\s]*\.*)(.*)') | ||
121 | work_dir = d.getVar('WORKDIR', True) | ||
122 | bbfile = d.getVar('FILE', True) | ||
123 | bbdir = os.path.dirname(bbfile) | ||
124 | target_sys = d.getVar('TARGET_SYS', True) | ||
125 | pf = d.getVar('PF', True) | ||
126 | licenses = get_licenses(d) | ||
127 | script_logs = os.path.join(work_dir, 'script-logs/'+ target_sys + '/' + licenses + '/' + pf + '/script-logs') | ||
128 | bb_inc = os.path.join(script_logs, 'bb_inc') | ||
129 | bb.utils.mkdirhier(bb_inc) | ||
130 | |||
131 | def find_file(dir, file): | ||
132 | for root, dirs, files in os.walk(dir): | ||
133 | if file in files: | ||
134 | return os.path.join(root, file) | ||
135 | |||
136 | def get_inc (file): | ||
137 | f = open(file, 'r') | ||
138 | for line in f.readlines(): | ||
139 | if 'require' not in line: | ||
140 | bbinc.append(file) | ||
141 | else: | ||
142 | try: | ||
143 | incfile = pat.match(line).group(1) | ||
144 | incfile = bb.data.expand(os.path.basename(incfile), d) | ||
145 | abs_incfile = find_file(bbdir, incfile) | ||
146 | if abs_incfile: | ||
147 | bbinc.append(abs_incfile) | ||
148 | get_inc(abs_incfile) | ||
149 | except AttributeError: | ||
150 | pass | ||
151 | get_inc(bbfile) | ||
152 | bbinc = list(set(bbinc)) | ||
153 | for bbincfile in bbinc: | ||
154 | shutil.copy(bbincfile, bb_inc) | ||
155 | |||
156 | return script_logs | ||
157 | |||
158 | def get_logs(d): | ||
159 | """ | ||
160 | create a directory "script-logs" in ${WORKDIR} | ||
161 | """ | ||
162 | work_dir = d.getVar('WORKDIR', True) | ||
163 | target_sys = d.getVar('TARGET_SYS', True) | ||
164 | pf = d.getVar('PF', True) | ||
165 | licenses = get_licenses(d) | ||
166 | script_logs = os.path.join(work_dir, 'script-logs/'+ target_sys + '/' + licenses + '/' + pf + '/script-logs') | ||
167 | |||
168 | try: | ||
169 | bb.utils.mkdirhier(os.path.join(script_logs, 'temp')) | ||
170 | oe.path.copytree(os.path.join(work_dir, 'temp'), os.path.join(script_logs, 'temp')) | ||
171 | except (IOError, AttributeError): | ||
172 | pass | ||
173 | return script_logs | ||
174 | |||
175 | def get_series(d): | ||
176 | """ | ||
177 | copy patches and series file to a pointed directory which will be | ||
178 | archived to tarball in ${WORKDIR} | ||
179 | """ | ||
180 | import shutil | ||
181 | 167 | ||
182 | src_patches=[] | 168 | python do_ar_configured() { |
183 | pf = d.getVar('PF', True) | ||
184 | work_dir = d.getVar('WORKDIR', True) | ||
185 | s = d.getVar('S', True) | ||
186 | dest = os.path.join(work_dir, pf + '-series') | ||
187 | shutil.rmtree(dest, ignore_errors=True) | ||
188 | bb.utils.mkdirhier(dest) | ||
189 | |||
190 | src_uri = d.getVar('SRC_URI', True).split() | ||
191 | fetch = bb.fetch2.Fetch(src_uri, d) | ||
192 | locals = (fetch.localpath(url) for url in fetch.urls) | ||
193 | for local in locals: | ||
194 | src_patches.append(local) | ||
195 | if not cmp(work_dir, s): | ||
196 | tmp_list = src_patches | ||
197 | else: | ||
198 | tmp_list = src_patches[1:] | ||
199 | |||
200 | for patch in tmp_list: | ||
201 | try: | ||
202 | shutil.copy(patch, dest) | ||
203 | except IOError: | ||
204 | if os.path.isdir(patch): | ||
205 | bb.utils.mkdirhier(os.path.join(dest, patch)) | ||
206 | oe.path.copytree(patch, os.path.join(dest, patch)) | ||
207 | return dest | ||
208 | |||
209 | def get_applying_patches(d): | ||
210 | """ | ||
211 | only copy applying patches to a pointed directory which will be | ||
212 | archived to tarball | ||
213 | """ | ||
214 | import shutil | 169 | import shutil |
215 | 170 | ||
216 | pf = d.getVar('PF', True) | 171 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) |
217 | work_dir = d.getVar('WORKDIR', True) | 172 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': |
218 | dest = os.path.join(work_dir, pf + '-patches') | 173 | bb.note('Archiving the configured source...') |
219 | shutil.rmtree(dest, ignore_errors=True) | 174 | # The libtool-native's do_configure will remove the |
220 | bb.utils.mkdirhier(dest) | 175 | # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the |
221 | 176 | # do_configure, we archive the already configured ${S} to | |
222 | patches = src_patches(d) | 177 | # instead of. |
223 | for patch in patches: | 178 | if d.getVar('PN', True) != 'libtool-native': |
224 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) | 179 | # Change the WORKDIR to make do_configure run in another dir. |
225 | if local: | 180 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) |
226 | shutil.copy(local, dest) | 181 | if bb.data.inherits_class('kernel-yocto', d): |
227 | return dest | 182 | bb.build.exec_func('do_kernel_configme', d) |
228 | 183 | if bb.data.inherits_class('cmake', d): | |
229 | def not_tarball(d): | 184 | bb.build.exec_func('do_generate_toolchain_file', d) |
230 | """ | 185 | prefuncs = d.getVarFlag('do_configure', 'prefuncs', True) |
231 | packages including key words 'work-shared', 'native', 'packagegroup-' will be passed | 186 | for func in (prefuncs or '').split(): |
232 | """ | 187 | if func != "sysroot_cleansstate": |
233 | workdir = d.getVar('WORKDIR', True) | 188 | bb.build.exec_func(func, d) |
234 | s = d.getVar('S', True) | 189 | bb.build.exec_func('do_configure', d) |
235 | if 'work-shared' in s or 'packagegroup-' in workdir or 'native' in workdir: | 190 | postfuncs = d.getVarFlag('do_configure', 'postfuncs', True) |
236 | return True | 191 | for func in (postfuncs or '').split(): |
237 | else: | 192 | if func != "do_qa_configure": |
238 | return False | 193 | bb.build.exec_func(func, d) |
194 | srcdir = d.getVar('S', True) | ||
195 | builddir = d.getVar('B', True) | ||
196 | if srcdir != builddir: | ||
197 | if os.path.exists(builddir): | ||
198 | oe.path.copytree(builddir, os.path.join(srcdir, \ | ||
199 | 'build.%s.ar_configured' % d.getVar('PF', True))) | ||
200 | create_tarball(d, srcdir, 'configured', ar_outdir) | ||
201 | } | ||
239 | 202 | ||
240 | def get_source_from_downloads(d, stage_name): | 203 | def create_tarball(d, srcdir, suffix, ar_outdir, pf=None): |
241 | """ | 204 | """ |
242 | copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR | 205 | create the tarball from srcdir |
243 | """ | ||
244 | if stage_name in 'patched' 'configured': | ||
245 | return | ||
246 | pf = d.getVar('PF', True) | ||
247 | dl_dir = d.getVar('DL_DIR', True) | ||
248 | try: | ||
249 | source = os.path.join(dl_dir, os.path.basename(d.getVar('SRC_URI', True).split()[0])) | ||
250 | if os.path.exists(source) and not os.path.isdir(source): | ||
251 | return source | ||
252 | except (IndexError, OSError): | ||
253 | pass | ||
254 | return '' | ||
255 | |||
256 | def do_tarball(workdir, srcdir, tarname): | ||
257 | """ | ||
258 | tar "srcdir" under "workdir" to "tarname" | ||
259 | """ | 206 | """ |
260 | import tarfile | 207 | import tarfile |
261 | 208 | ||
262 | sav_dir = os.getcwd() | 209 | bb.utils.mkdirhier(ar_outdir) |
263 | os.chdir(workdir) | 210 | if pf: |
264 | if (len(os.listdir(srcdir))) != 0: | 211 | tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % (pf, suffix)) |
265 | tar = tarfile.open(tarname, "w:gz") | ||
266 | tar.add(srcdir) | ||
267 | tar.close() | ||
268 | else: | ||
269 | tarname = '' | ||
270 | os.chdir(sav_dir) | ||
271 | return tarname | ||
272 | |||
273 | def archive_sources_from_directory(d, stage_name): | ||
274 | """ | ||
275 | archive sources codes tree to tarball when tarball of $P doesn't | ||
276 | exist in $DL_DIR | ||
277 | """ | ||
278 | |||
279 | s = d.getVar('S', True) | ||
280 | work_dir=d.getVar('WORKDIR', True) | ||
281 | PF = d.getVar('PF', True) | ||
282 | tarname = PF + '-' + stage_name + ".tar.gz" | ||
283 | |||
284 | if os.path.exists(s) and work_dir in s: | ||
285 | try: | ||
286 | source_dir = os.path.join(work_dir, [ i for i in s.replace(work_dir, '').split('/') if i][0]) | ||
287 | except IndexError: | ||
288 | if not cmp(s, work_dir): | ||
289 | return '' | ||
290 | else: | ||
291 | return '' | ||
292 | source = os.path.basename(source_dir) | ||
293 | return do_tarball(work_dir, source, tarname) | ||
294 | |||
295 | def archive_sources(d, stage_name): | ||
296 | """ | ||
297 | copy tarball from $DL_DIR to $WORKDIR if have tarball, archive | ||
298 | source codes tree in $WORKDIR if $P is directory instead of tarball | ||
299 | """ | ||
300 | import shutil | ||
301 | |||
302 | work_dir = d.getVar('WORKDIR', True) | ||
303 | file = get_source_from_downloads(d, stage_name) | ||
304 | if file: | ||
305 | shutil.copy(file, work_dir) | ||
306 | file = os.path.basename(file) | ||
307 | else: | 212 | else: |
308 | file = archive_sources_from_directory(d, stage_name) | 213 | tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % \ |
309 | return file | 214 | (d.getVar('PF', True), suffix)) |
310 | 215 | ||
311 | def archive_patches(d, patchdir, series): | 216 | srcdir = srcdir.rstrip('/') |
312 | """ | 217 | dirname = os.path.dirname(srcdir) |
313 | archive patches to tarball and also include series files if 'series' is True | 218 | basename = os.path.basename(srcdir) |
314 | """ | 219 | os.chdir(dirname) |
315 | import shutil | 220 | bb.note('Creating %s' % tarname) |
221 | tar = tarfile.open(tarname, 'w:gz') | ||
222 | tar.add(basename) | ||
223 | tar.close() | ||
316 | 224 | ||
317 | s = d.getVar('S', True) | 225 | # creating .diff.gz between source.orig and source |
318 | work_dir = d.getVar('WORKDIR', True) | 226 | def create_diff_gz(d, src_orig, src, ar_outdir): |
319 | patch_dir = os.path.basename(patchdir) | ||
320 | tarname = patch_dir + ".tar.gz" | ||
321 | if series == 'all' and os.path.exists(os.path.join(s, 'patches/series')): | ||
322 | shutil.copy(os.path.join(s, 'patches/series'), patchdir) | ||
323 | tarname = do_tarball(work_dir, patch_dir, tarname) | ||
324 | shutil.rmtree(patchdir, ignore_errors=True) | ||
325 | return tarname | ||
326 | |||
327 | def select_archive_patches(d, option): | ||
328 | """ | ||
329 | select to archive all patches including non-applying and series or | ||
330 | applying patches | ||
331 | """ | ||
332 | if option == "all": | ||
333 | patchdir = get_series(d) | ||
334 | elif option == "applying": | ||
335 | patchdir = get_applying_patches(d) | ||
336 | try: | ||
337 | os.rmdir(patchdir) | ||
338 | except OSError: | ||
339 | tarpatch = archive_patches(d, patchdir, option) | ||
340 | return tarpatch | ||
341 | return | ||
342 | |||
343 | def archive_logs(d, logdir, bbinc=False): | ||
344 | """ | ||
345 | archive logs in temp to tarball and .bb and .inc files if bbinc is True | ||
346 | """ | ||
347 | import shutil | ||
348 | 227 | ||
349 | pf = d.getVar('PF', True) | 228 | import subprocess |
350 | work_dir = d.getVar('WORKDIR', True) | ||
351 | log_dir = os.path.basename(logdir) | ||
352 | tarname = pf + '-' + log_dir + ".tar.gz" | ||
353 | archive_dir = os.path.join( logdir, '..' ) | ||
354 | tarname = do_tarball(archive_dir, log_dir, tarname) | ||
355 | if bbinc: | ||
356 | shutil.rmtree(logdir, ignore_errors=True) | ||
357 | return tarname | ||
358 | |||
359 | def get_licenses(d): | ||
360 | """get licenses for running .bb file""" | ||
361 | import oe.license | ||
362 | |||
363 | licenses_type = d.getVar('LICENSE', True) or "" | ||
364 | lics = oe.license.is_included(licenses_type)[1:][0] | ||
365 | lice = '' | ||
366 | for lic in lics: | ||
367 | licens = d.getVarFlag('SPDXLICENSEMAP', lic) | ||
368 | if licens != None: | ||
369 | lice += licens | ||
370 | else: | ||
371 | lice += lic | ||
372 | return lice | ||
373 | |||
374 | |||
375 | def move_tarball_deploy(d, tarball_list): | ||
376 | """move tarball in location to ${DEPLOY_DIR}/sources""" | ||
377 | import shutil | ||
378 | 229 | ||
379 | if tarball_list is []: | 230 | if not os.path.isdir(src) or not os.path.isdir(src_orig): |
380 | return | 231 | return |
381 | target_sys = d.getVar('TARGET_SYS', True) | ||
382 | pf = d.getVar('PF', True) | ||
383 | licenses = get_licenses(d) | ||
384 | work_dir = d.getVar('WORKDIR', True) | ||
385 | tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf | ||
386 | if not os.path.exists(tar_sources): | ||
387 | bb.utils.mkdirhier(tar_sources) | ||
388 | for source in tarball_list: | ||
389 | if source: | ||
390 | if os.path.exists(os.path.join(tar_sources, source)): | ||
391 | os.remove(os.path.join(tar_sources, source)) | ||
392 | shutil.move(os.path.join(work_dir, source), tar_sources) | ||
393 | |||
394 | def check_archiving_type(d): | ||
395 | """check the type for archiving package('tar' or 'srpm')""" | ||
396 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) not in d.getVar('ARCHIVE_TYPE', True).split(): | ||
397 | bb.fatal("\"SOURCE_ARCHIVE_PACKAGE_TYPE\" is \'tar\' or \'srpm\', no other types") | ||
398 | |||
399 | def store_package(d, package_name): | ||
400 | """ | ||
401 | store tarbablls name to file "tar-package" | ||
402 | """ | ||
403 | f = open(os.path.join(d.getVar('WORKDIR', True), 'tar-package'), 'a') | ||
404 | f.write(package_name + ' ') | ||
405 | f.close() | ||
406 | |||
407 | def get_package(d): | ||
408 | """ | ||
409 | get tarballs name from "tar-package" | ||
410 | """ | ||
411 | work_dir = (d.getVar('WORKDIR', True)) | ||
412 | tarlist = os.path.join(work_dir, 'tar-package') | ||
413 | if os.path.exists(tarlist): | ||
414 | f = open(tarlist, 'r') | ||
415 | line = f.readline().rstrip('\n').split() | ||
416 | f.close() | ||
417 | return line | ||
418 | return [] | ||
419 | 232 | ||
233 | # The diff --exclude can't exclude the file with path, so we copy | ||
234 | # the patched source, and remove the files that we'd like to | ||
235 | # exclude. | ||
236 | src_patched = src + '.patched' | ||
237 | oe.path.copyhardlinktree(src, src_patched) | ||
238 | for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude', True).split(): | ||
239 | bb.utils.remove(os.path.join(src_orig, i), recurse=True) | ||
240 | bb.utils.remove(os.path.join(src_patched, i), recurse=True) | ||
241 | |||
242 | dirname = os.path.dirname(src) | ||
243 | basename = os.path.basename(src) | ||
244 | os.chdir(dirname) | ||
245 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) | ||
246 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) | ||
247 | subprocess.call(diff_cmd, shell=True) | ||
248 | bb.utils.remove(src_patched, recurse=True) | ||
249 | |||
250 | # Run do_unpack and do_patch | ||
251 | python do_unpack_and_patch() { | ||
252 | if d.getVarFlag('ARCHIVER_MODE', 'src', True) not in \ | ||
253 | [ 'patched', 'configured'] and \ | ||
254 | d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': | ||
255 | return | ||
420 | 256 | ||
421 | def archive_sources_patches(d, stage_name): | 257 | ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) |
422 | """ | ||
423 | archive sources and patches to tarball. stage_name will append | ||
424 | strings ${stage_name} to ${PR} as middle name. for example, | ||
425 | zlib-1.4.6-prepatch(stage_name).tar.gz | ||
426 | """ | ||
427 | import shutil | ||
428 | 258 | ||
429 | check_archiving_type(d) | 259 | # Change the WORKDIR to make do_unpack do_patch run in another dir. |
260 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) | ||
430 | 261 | ||
431 | source_tar_name = archive_sources(d, stage_name) | 262 | # The kernel source is ready after do_validate_branches |
432 | if stage_name == "prepatch": | 263 | if bb.data.inherits_class('kernel-yocto', d): |
433 | if d.getVar('PATCHES_ARCHIVE_WITH_SERIES', True) == 'yes': | 264 | bb.build.exec_func('do_unpack', d) |
434 | patch_tar_name = select_archive_patches(d, "all") | 265 | bb.build.exec_func('do_kernel_checkout', d) |
435 | elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES', True) == 'no': | 266 | bb.build.exec_func('do_validate_branches', d) |
436 | patch_tar_name = select_archive_patches(d, "applying") | ||
437 | else: | ||
438 | bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' to 'yes' or 'no' ") | ||
439 | else: | 267 | else: |
440 | patch_tar_name = '' | 268 | bb.build.exec_func('do_unpack', d) |
269 | |||
270 | # Save the original source for creating the patches | ||
271 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': | ||
272 | src = d.getVar('S', True).rstrip('/') | ||
273 | src_orig = '%s.orig' % src | ||
274 | oe.path.copytree(src, src_orig) | ||
275 | bb.build.exec_func('do_patch', d) | ||
276 | # Create the patches | ||
277 | if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': | ||
278 | bb.note('Creating diff gz...') | ||
279 | create_diff_gz(d, src_orig, src, ar_outdir) | ||
280 | bb.utils.remove(src_orig, recurse=True) | ||
281 | } | ||
441 | 282 | ||
442 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) != 'srpm': | 283 | python do_ar_recipe () { |
443 | move_tarball_deploy(d, [source_tar_name, patch_tar_name]) | ||
444 | else: | ||
445 | tarlist = os.path.join(d.getVar('WORKDIR', True), 'tar-package') | ||
446 | if os.path.exists(tarlist): | ||
447 | os.remove(tarlist) | ||
448 | for package in os.path.basename(source_tar_name), patch_tar_name: | ||
449 | if package: | ||
450 | store_package(d, str(package) + ' ') | ||
451 | |||
452 | def archive_scripts_logs(d): | ||
453 | """ | 284 | """ |
454 | archive scripts and logs. scripts include .bb and .inc files and | 285 | archive the recipe, including .bb and .inc. |
455 | logs include stuff in "temp". | ||
456 | """ | 286 | """ |
287 | import re | ||
457 | import shutil | 288 | import shutil |
458 | 289 | ||
459 | work_dir = d.getVar('WORKDIR', True) | 290 | require_re = re.compile( r"require\s+(.+)" ) |
460 | temp_dir = os.path.join(work_dir, 'temp') | 291 | include_re = re.compile( r"include\s+(.+)" ) |
461 | source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True) | 292 | bbfile = d.getVar('FILE', True) |
462 | if source_archive_log_with_scripts == 'logs_with_scripts': | 293 | outdir = os.path.join(d.getVar('WORKDIR', True), \ |
463 | logdir = get_logs(d) | 294 | '%s-recipe' % d.getVar('PF', True)) |
464 | logdir = get_bb_inc(d) | 295 | bb.utils.mkdirhier(outdir) |
465 | elif source_archive_log_with_scripts == 'logs': | 296 | shutil.copy(bbfile, outdir) |
466 | logdir = get_logs(d) | 297 | |
467 | else: | 298 | dirname = os.path.dirname(bbfile) |
468 | return | 299 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) |
469 | 300 | f = open(bbfile, 'r') | |
470 | tarlog = archive_logs(d, logdir, True) | 301 | for line in f.readlines(): |
471 | 302 | incfile = None | |
472 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) == 'srpm': | 303 | if require_re.match(line): |
473 | store_package(d, tarlog) | 304 | incfile = require_re.match(line).group(1) |
305 | elif include_re.match(line): | ||
306 | incfile = include_re.match(line).group(1) | ||
307 | if incfile: | ||
308 | incfile = bb.data.expand(incfile, d) | ||
309 | incfile = bb.utils.which(bbpath, incfile) | ||
310 | if incfile: | ||
311 | shutil.copy(incfile, outdir) | ||
312 | |||
313 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) | ||
314 | bb.utils.remove(outdir, recurse=True) | ||
315 | } | ||
474 | 316 | ||
475 | def dumpdata(d): | 317 | python do_dumpdata () { |
476 | """ | 318 | """ |
477 | dump environment to "${P}-${PR}.showdata.dump" including all | 319 | dump environment data to ${PF}-showdata.dump |
478 | kinds of variables and functions when running a task | ||
479 | """ | 320 | """ |
480 | 321 | ||
481 | workdir = bb.data.getVar('WORKDIR', d, 1) | 322 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ |
482 | distro = bb.data.getVar('DISTRO', d, 1) | 323 | '%s-showdata.dump' % d.getVar('PF', True)) |
483 | s = d.getVar('S', True) | 324 | bb.note('Dumping metadata into %s' % dumpfile) |
484 | pf = d.getVar('PF', True) | 325 | f = open(dumpfile, 'w') |
485 | target_sys = d.getVar('TARGET_SYS', True) | ||
486 | licenses = get_licenses(d) | ||
487 | dumpdir = os.path.join(workdir, 'diffgz-envdata/'+ target_sys + '/' + licenses + '/' + pf ) | ||
488 | if not os.path.exists(dumpdir): | ||
489 | bb.utils.mkdirhier(dumpdir) | ||
490 | |||
491 | dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump", d)) | ||
492 | |||
493 | bb.note("Dumping metadata into '%s'" % dumpfile) | ||
494 | f = open(dumpfile, "w") | ||
495 | # emit variables and shell functions | 326 | # emit variables and shell functions |
496 | bb.data.emit_env(f, d, True) | 327 | bb.data.emit_env(f, d, True) |
497 | # emit the metadata which isn't valid shell | 328 | # emit the metadata which isn't valid shell |
498 | for e in d.keys(): | 329 | for e in d.keys(): |
499 | if bb.data.getVarFlag(e, 'python', d): | 330 | if bb.data.getVarFlag(e, 'python', d): |
500 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) | 331 | f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, True))) |
501 | f.close() | ||
502 | |||
503 | def create_diff_gz(d): | ||
504 | """ | ||
505 | creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.g gz for | ||
506 | mapping all content in 's' including patches to xxx.diff.gz | ||
507 | """ | ||
508 | import shutil | ||
509 | import subprocess | ||
510 | |||
511 | work_dir = d.getVar('WORKDIR', True) | ||
512 | exclude_from = d.getVar('ARCHIVE_EXCLUDE_FROM', True).split() | ||
513 | pf = d.getVar('PF', True) | ||
514 | licenses = get_licenses(d) | ||
515 | target_sys = d.getVar('TARGET_SYS', True) | ||
516 | diff_dir = os.path.join(work_dir, 'diffgz-envdata/'+ target_sys + '/' + licenses + '/' + pf ) | ||
517 | diff_file = os.path.join(diff_dir, bb.data.expand("${P}-${PR}.diff.gz",d)) | ||
518 | |||
519 | f = open(os.path.join(work_dir,'temp/exclude-from-file'), 'a') | ||
520 | for i in exclude_from: | ||
521 | f.write(i) | ||
522 | f.write("\n") | ||
523 | f.close() | 332 | f.close() |
524 | |||
525 | s=d.getVar('S', True) | ||
526 | distro = d.getVar('DISTRO',True) or "" | ||
527 | dest = s + '/' + distro + '/files' | ||
528 | if not os.path.exists(dest): | ||
529 | bb.utils.mkdirhier(dest) | ||
530 | for i in os.listdir(os.getcwd()): | ||
531 | if os.path.isfile(i): | ||
532 | try: | ||
533 | shutil.copy(i, dest) | ||
534 | except IOError: | ||
535 | subprocess.call('fakeroot cp -rf ' + i + " " + dest, shell=True) | ||
536 | |||
537 | bb.note("Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz") | ||
538 | cmd = "LC_ALL=C TZ=UTC0 diff --exclude-from=" + work_dir + "/temp/exclude-from-file -Naur " + s + '.org' + ' ' + s + " | gzip -c > " + diff_file | ||
539 | d.setVar('DIFF', cmd + "\n") | ||
540 | d.setVarFlag('DIFF', 'func', '1') | ||
541 | bb.build.exec_func('DIFF', d) | ||
542 | shutil.rmtree(s + '.org', ignore_errors=True) | ||
543 | |||
544 | # This function will run when user want to get tarball for sources and | ||
545 | # patches after do_unpack | ||
546 | python do_archive_original_sources(){ | ||
547 | archive_sources_patches(d, 'prepatch') | ||
548 | } | ||
549 | |||
550 | # This function will run when user want to get tarball for patched | ||
551 | # sources after do_patch | ||
552 | python do_archive_patched_sources(){ | ||
553 | archive_sources_patches(d, 'patched') | ||
554 | } | ||
555 | |||
556 | # This function will run when user want to get tarball for configured | ||
557 | # sources after do_configure | ||
558 | python do_archive_configured_sources(){ | ||
559 | archive_sources_patches(d, 'configured') | ||
560 | } | ||
561 | |||
562 | # This function will run when user want to get tarball for logs or both | ||
563 | # logs and scripts(.bb and .inc files) | ||
564 | python do_archive_scripts_logs(){ | ||
565 | archive_scripts_logs(d) | ||
566 | } | ||
567 | |||
568 | # This function will run when user want to know what variable and | ||
569 | # functions in a running task are and also can get a diff file including | ||
570 | # all content a package should include. | ||
571 | python do_dumpdata_create_diff_gz(){ | ||
572 | dumpdata(d) | ||
573 | create_diff_gz(d) | ||
574 | } | ||
575 | |||
576 | # This functions prepare for archiving "linux-yocto" because this | ||
577 | # package create directory 's' before do_patch instead of after | ||
578 | # do_unpack. This is special control for archiving linux-yocto only. | ||
579 | python do_archive_linux_yocto(){ | ||
580 | s = d.getVar('S', True) | ||
581 | if 'linux-yocto' in s: | ||
582 | source_tar_name = archive_sources(d, '') | ||
583 | if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) != 'srpm': | ||
584 | move_tarball_deploy(d, [source_tar_name, '']) | ||
585 | } | ||
586 | do_kernel_checkout[postfuncs] += "do_archive_linux_yocto " | ||
587 | |||
588 | # remove tarball for sources, patches and logs after creating srpm. | ||
589 | python do_delete_tarlist(){ | ||
590 | work_dir = d.getVar('WORKDIR', True) | ||
591 | tarlist = os.path.join(work_dir, 'tar-package') | ||
592 | if os.path.exists(tarlist): | ||
593 | os.remove(tarlist) | ||
594 | } | 333 | } |
595 | do_delete_tarlist[deptask] = "do_archive_scripts_logs" | ||
596 | do_package_write_rpm[postfuncs] += "do_delete_tarlist " | ||
597 | |||
598 | # Get archiving package with temp(logs) and scripts(.bb and .inc files) | ||
599 | addtask do_archive_scripts_logs | ||
600 | |||
601 | # Get dump date and create diff file | ||
602 | addtask do_dumpdata_create_diff_gz | ||
603 | |||
604 | ARCHIVE_SSTATE_OUTDIR = "${DEPLOY_DIR}/sources/" | ||
605 | ARCHIVE_SSTATE_SCRIPTS_LOGS_INDIR = "${WORKDIR}/script-logs/" | ||
606 | ARCHIVE_SSTATE_DIFFGZ_ENVDATA_INDIR = "${WORKDIR}/diffgz-envdata/" | ||
607 | 334 | ||
608 | SSTATETASKS += "do_archive_scripts_logs" | 335 | SSTATETASKS += "do_deploy_archives" |
609 | do_archive_scripts_logs[sstate-inputdirs] = "${ARCHIVE_SSTATE_SCRIPTS_LOGS_INDIR}" | 336 | do_deploy_archives () { |
610 | do_archive_scripts_logs[sstate-outputdirs] = "${ARCHIVE_SSTATE_OUTDIR}" | 337 | echo "Deploying source archive files ..." |
611 | |||
612 | python do_archive_scripts_logs_setscene () { | ||
613 | sstate_setscene(d) | ||
614 | } | 338 | } |
615 | 339 | python do_deploy_archives_setscene () { | |
616 | addtask do_archive_scripts_logs_setscene | ||
617 | |||
618 | SSTATETASKS += "do_dumpdata_create_diff_gz" | ||
619 | do_dumpdata_create_diff_gz[sstate-inputdirs] = "${ARCHIVE_SSTATE_DIFFGZ_ENVDATA_INDIR}" | ||
620 | do_dumpdata_create_diff_gz[sstate-outputdirs] = "${ARCHIVE_SSTATE_OUTDIR}" | ||
621 | |||
622 | python do_dumpdata_create_diff_gz_setscene () { | ||
623 | sstate_setscene(d) | 340 | sstate_setscene(d) |
624 | } | 341 | } |
625 | 342 | do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" | |
626 | addtask do_dumpdata_create_diff_gz_setscene | 343 | do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" |
627 | 344 | ||
628 | addtask do_archive_original_sources after do_unpack | 345 | addtask do_ar_original after do_unpack |
629 | addtask do_archive_patched_sources after do_patch | 346 | addtask do_unpack_and_patch after do_patch |
630 | addtask do_archive_configured_sources after do_configure | 347 | addtask do_ar_patched after do_unpack_and_patch |
348 | addtask do_ar_configured after do_unpack_and_patch | ||
349 | addtask do_dumpdata | ||
350 | addtask do_ar_recipe | ||
351 | addtask do_deploy_archives before do_build | ||