diff options
Diffstat (limited to 'meta/classes-recipe/externalsrc.bbclass')
-rw-r--r-- | meta/classes-recipe/externalsrc.bbclass | 269 |
1 files changed, 269 insertions, 0 deletions
diff --git a/meta/classes-recipe/externalsrc.bbclass b/meta/classes-recipe/externalsrc.bbclass new file mode 100644 index 0000000000..51dbe9ea5a --- /dev/null +++ b/meta/classes-recipe/externalsrc.bbclass | |||
@@ -0,0 +1,269 @@ | |||
1 | # Copyright (C) 2012 Linux Foundation | ||
2 | # Author: Richard Purdie | ||
3 | # Some code and influence taken from srctree.bbclass: | ||
4 | # Copyright (C) 2009 Chris Larson <clarson@kergoth.com> | ||
5 | # | ||
6 | # SPDX-License-Identifier: MIT | ||
7 | # | ||
8 | # externalsrc.bbclass enables use of an existing source tree, usually external to | ||
9 | # the build system to build a piece of software rather than the usual fetch/unpack/patch | ||
10 | # process. | ||
11 | # | ||
12 | # To use, add externalsrc to the global inherit and set EXTERNALSRC to point at the | ||
13 | # directory you want to use containing the sources e.g. from local.conf for a recipe | ||
14 | # called "myrecipe" you would do: | ||
15 | # | ||
16 | # INHERIT += "externalsrc" | ||
17 | # EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree" | ||
18 | # | ||
19 | # In order to make this class work for both target and native versions (or with | ||
20 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate | ||
21 | # directory under the work directory (split source and build directories). This is | ||
22 | # the default, but the build directory can be set to the source directory if | ||
23 | # circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.: | ||
24 | # | ||
25 | # EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree" | ||
26 | # | ||
27 | |||
28 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" | ||
29 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" | ||
30 | |||
31 | python () { | ||
32 | externalsrc = d.getVar('EXTERNALSRC') | ||
33 | externalsrcbuild = d.getVar('EXTERNALSRC_BUILD') | ||
34 | |||
35 | if externalsrc and not externalsrc.startswith("/"): | ||
36 | bb.error("EXTERNALSRC must be an absolute path") | ||
37 | if externalsrcbuild and not externalsrcbuild.startswith("/"): | ||
38 | bb.error("EXTERNALSRC_BUILD must be an absolute path") | ||
39 | |||
40 | # If this is the base recipe and EXTERNALSRC is set for it or any of its | ||
41 | # derivatives, then enable BB_DONT_CACHE to force the recipe to always be | ||
42 | # re-parsed so that the file-checksums function for do_compile is run every | ||
43 | # time. | ||
44 | bpn = d.getVar('BPN') | ||
45 | classextend = (d.getVar('BBCLASSEXTEND') or '').split() | ||
46 | if bpn == d.getVar('PN') or not classextend: | ||
47 | if (externalsrc or | ||
48 | ('native' in classextend and | ||
49 | d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or | ||
50 | ('nativesdk' in classextend and | ||
51 | d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or | ||
52 | ('cross' in classextend and | ||
53 | d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))): | ||
54 | d.setVar('BB_DONT_CACHE', '1') | ||
55 | |||
56 | if externalsrc: | ||
57 | import oe.recipeutils | ||
58 | import oe.path | ||
59 | |||
60 | d.setVar('S', externalsrc) | ||
61 | if externalsrcbuild: | ||
62 | d.setVar('B', externalsrcbuild) | ||
63 | else: | ||
64 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') | ||
65 | |||
66 | local_srcuri = [] | ||
67 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) | ||
68 | for url in fetch.urls: | ||
69 | url_data = fetch.ud[url] | ||
70 | parm = url_data.parm | ||
71 | if (url_data.type == 'file' or | ||
72 | url_data.type == 'npmsw' or url_data.type == 'crate' or | ||
73 | 'type' in parm and parm['type'] == 'kmeta'): | ||
74 | local_srcuri.append(url) | ||
75 | |||
76 | d.setVar('SRC_URI', ' '.join(local_srcuri)) | ||
77 | |||
78 | # Dummy value because the default function can't be called with blank SRC_URI | ||
79 | d.setVar('SRCPV', '999') | ||
80 | |||
81 | if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking': | ||
82 | d.setVar('CONFIGUREOPT_DEPTRACK', '') | ||
83 | |||
84 | tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys()) | ||
85 | |||
86 | for task in tasks: | ||
87 | if task.endswith("_setscene"): | ||
88 | # sstate is never going to work for external source trees, disable it | ||
89 | bb.build.deltask(task, d) | ||
90 | elif os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')): | ||
91 | # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time | ||
92 | d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock") | ||
93 | |||
94 | for funcname in [task, "base_" + task, "kernel_" + task]: | ||
95 | # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean) | ||
96 | cleandirs = oe.recipeutils.split_var_value(d.getVarFlag(funcname, 'cleandirs', False) or '') | ||
97 | setvalue = False | ||
98 | for cleandir in cleandirs[:]: | ||
99 | if oe.path.is_path_parent(externalsrc, d.expand(cleandir)): | ||
100 | cleandirs.remove(cleandir) | ||
101 | setvalue = True | ||
102 | if setvalue: | ||
103 | d.setVarFlag(funcname, 'cleandirs', ' '.join(cleandirs)) | ||
104 | |||
105 | fetch_tasks = ['do_fetch', 'do_unpack'] | ||
106 | # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one | ||
107 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string | ||
108 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) | ||
109 | |||
110 | for task in d.getVar("SRCTREECOVEREDTASKS").split(): | ||
111 | if local_srcuri and task in fetch_tasks: | ||
112 | continue | ||
113 | bb.build.deltask(task, d) | ||
114 | if task == 'do_unpack': | ||
115 | # The reproducible build create_source_date_epoch_stamp function must | ||
116 | # be run after the source is available and before the | ||
117 | # do_deploy_source_date_epoch task. In the normal case, it's attached | ||
118 | # to do_unpack as a postfuncs, but since we removed do_unpack (above) | ||
119 | # we need to move the function elsewhere. The easiest thing to do is | ||
120 | # move it into the prefuncs of the do_deploy_source_date_epoch task. | ||
121 | # This is safe, as externalsrc runs with the source already unpacked. | ||
122 | d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ') | ||
123 | |||
124 | d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ") | ||
125 | d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ") | ||
126 | |||
127 | d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}') | ||
128 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') | ||
129 | |||
130 | # We don't want the workdir to go away | ||
131 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) | ||
132 | |||
133 | bb.build.addtask('do_buildclean', | ||
134 | 'do_clean' if d.getVar('S') == d.getVar('B') else None, | ||
135 | None, d) | ||
136 | |||
137 | # If B=S the same builddir is used even for different architectures. | ||
138 | # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that | ||
139 | # change of do_configure task hash is correctly detected and stamps are | ||
140 | # invalidated if e.g. MACHINE changes. | ||
141 | if d.getVar('S') == d.getVar('B'): | ||
142 | configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' | ||
143 | d.setVar('CONFIGURESTAMPFILE', configstamp) | ||
144 | d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') | ||
145 | d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*') | ||
146 | } | ||
147 | |||
148 | python externalsrc_configure_prefunc() { | ||
149 | s_dir = d.getVar('S') | ||
150 | # Create desired symlinks | ||
151 | symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split() | ||
152 | newlinks = [] | ||
153 | for symlink in symlinks: | ||
154 | symsplit = symlink.split(':', 1) | ||
155 | lnkfile = os.path.join(s_dir, symsplit[0]) | ||
156 | target = d.expand(symsplit[1]) | ||
157 | if len(symsplit) > 1: | ||
158 | if os.path.islink(lnkfile): | ||
159 | # Link already exists, leave it if it points to the right location already | ||
160 | if os.readlink(lnkfile) == target: | ||
161 | continue | ||
162 | os.unlink(lnkfile) | ||
163 | elif os.path.exists(lnkfile): | ||
164 | # File/dir exists with same name as link, just leave it alone | ||
165 | continue | ||
166 | os.symlink(target, lnkfile) | ||
167 | newlinks.append(symsplit[0]) | ||
168 | # Hide the symlinks from git | ||
169 | try: | ||
170 | git_exclude_file = os.path.join(s_dir, '.git/info/exclude') | ||
171 | if os.path.exists(git_exclude_file): | ||
172 | with open(git_exclude_file, 'r+') as efile: | ||
173 | elines = efile.readlines() | ||
174 | for link in newlinks: | ||
175 | if link in elines or '/'+link in elines: | ||
176 | continue | ||
177 | efile.write('/' + link + '\n') | ||
178 | except IOError as ioe: | ||
179 | bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git') | ||
180 | } | ||
181 | |||
182 | python externalsrc_compile_prefunc() { | ||
183 | # Make it obvious that this is happening, since forgetting about it could lead to much confusion | ||
184 | bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC'))) | ||
185 | } | ||
186 | |||
187 | do_buildclean[dirs] = "${S} ${B}" | ||
188 | do_buildclean[nostamp] = "1" | ||
189 | do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}" | ||
190 | externalsrc_do_buildclean() { | ||
191 | if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then | ||
192 | rm -f ${@' '.join([x.split(':')[0] for x in (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()])} | ||
193 | if [ "${CLEANBROKEN}" != "1" ]; then | ||
194 | oe_runmake clean || die "make failed" | ||
195 | fi | ||
196 | else | ||
197 | bbnote "nothing to do - no makefile found" | ||
198 | fi | ||
199 | } | ||
200 | |||
201 | def srctree_hash_files(d, srcdir=None): | ||
202 | import shutil | ||
203 | import subprocess | ||
204 | import tempfile | ||
205 | import hashlib | ||
206 | |||
207 | s_dir = srcdir or d.getVar('EXTERNALSRC') | ||
208 | git_dir = None | ||
209 | |||
210 | try: | ||
211 | git_dir = os.path.join(s_dir, | ||
212 | subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) | ||
213 | top_git_dir = os.path.join(s_dir, subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], | ||
214 | stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) | ||
215 | if git_dir == top_git_dir: | ||
216 | git_dir = None | ||
217 | except subprocess.CalledProcessError: | ||
218 | pass | ||
219 | |||
220 | ret = " " | ||
221 | if git_dir is not None: | ||
222 | oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1-%s' % d.getVar('PN')) | ||
223 | with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index: | ||
224 | # Clone index | ||
225 | shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name) | ||
226 | # Update our custom index | ||
227 | env = os.environ.copy() | ||
228 | env['GIT_INDEX_FILE'] = tmp_index.name | ||
229 | subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env) | ||
230 | git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") | ||
231 | submodule_helper = subprocess.check_output(['git', 'submodule--helper', 'list'], cwd=s_dir, env=env).decode("utf-8") | ||
232 | for line in submodule_helper.splitlines(): | ||
233 | module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1]) | ||
234 | if os.path.isdir(module_dir): | ||
235 | proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) | ||
236 | proc.communicate() | ||
237 | proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) | ||
238 | stdout, _ = proc.communicate() | ||
239 | git_sha1 += stdout.decode("utf-8") | ||
240 | sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest() | ||
241 | with open(oe_hash_file, 'w') as fobj: | ||
242 | fobj.write(sha1) | ||
243 | ret = oe_hash_file + ':True' | ||
244 | else: | ||
245 | ret = s_dir + '/*:True' | ||
246 | return ret | ||
247 | |||
248 | def srctree_configure_hash_files(d): | ||
249 | """ | ||
250 | Get the list of files that should trigger do_configure to re-execute, | ||
251 | based on the value of CONFIGURE_FILES | ||
252 | """ | ||
253 | in_files = (d.getVar('CONFIGURE_FILES') or '').split() | ||
254 | out_items = [] | ||
255 | search_files = [] | ||
256 | for entry in in_files: | ||
257 | if entry.startswith('/'): | ||
258 | out_items.append('%s:%s' % (entry, os.path.exists(entry))) | ||
259 | else: | ||
260 | search_files.append(entry) | ||
261 | if search_files: | ||
262 | s_dir = d.getVar('EXTERNALSRC') | ||
263 | for root, _, files in os.walk(s_dir): | ||
264 | for f in files: | ||
265 | if f in search_files: | ||
266 | out_items.append('%s:True' % os.path.join(root, f)) | ||
267 | return ' '.join(out_items) | ||
268 | |||
269 | EXPORT_FUNCTIONS do_buildclean | ||