diff options
Diffstat (limited to 'meta/classes/externalsrc.bbclass')
-rw-r--r-- | meta/classes/externalsrc.bbclass | 131 |
1 files changed, 77 insertions, 54 deletions
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass index c7b2bf2f49..527c99ab69 100644 --- a/meta/classes/externalsrc.bbclass +++ b/meta/classes/externalsrc.bbclass | |||
@@ -2,7 +2,8 @@ | |||
2 | # Author: Richard Purdie | 2 | # Author: Richard Purdie |
3 | # Some code and influence taken from srctree.bbclass: | 3 | # Some code and influence taken from srctree.bbclass: |
4 | # Copyright (C) 2009 Chris Larson <clarson@kergoth.com> | 4 | # Copyright (C) 2009 Chris Larson <clarson@kergoth.com> |
5 | # Released under the MIT license (see COPYING.MIT for the terms) | 5 | # |
6 | # SPDX-License-Identifier: MIT | ||
6 | # | 7 | # |
7 | # externalsrc.bbclass enables use of an existing source tree, usually external to | 8 | # externalsrc.bbclass enables use of an existing source tree, usually external to |
8 | # the build system to build a piece of software rather than the usual fetch/unpack/patch | 9 | # the build system to build a piece of software rather than the usual fetch/unpack/patch |
@@ -13,7 +14,7 @@ | |||
13 | # called "myrecipe" you would do: | 14 | # called "myrecipe" you would do: |
14 | # | 15 | # |
15 | # INHERIT += "externalsrc" | 16 | # INHERIT += "externalsrc" |
16 | # EXTERNALSRC_pn-myrecipe = "/path/to/my/source/tree" | 17 | # EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree" |
17 | # | 18 | # |
18 | # In order to make this class work for both target and native versions (or with | 19 | # In order to make this class work for both target and native versions (or with |
19 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate | 20 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate |
@@ -21,12 +22,26 @@ | |||
21 | # the default, but the build directory can be set to the source directory if | 22 | # the default, but the build directory can be set to the source directory if |
22 | # circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.: | 23 | # circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.: |
23 | # | 24 | # |
24 | # EXTERNALSRC_BUILD_pn-myrecipe = "/path/to/my/source/tree" | 25 | # EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree" |
25 | # | 26 | # |
26 | 27 | ||
27 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" | 28 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" |
28 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" | 29 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" |
29 | 30 | ||
31 | def find_git_dir(d, s_dir): | ||
32 | import subprocess | ||
33 | git_dir = None | ||
34 | try: | ||
35 | git_dir = os.path.join(s_dir, | ||
36 | subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) | ||
37 | top_git_dir = os.path.join(d.getVar("TOPDIR"), | ||
38 | subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) | ||
39 | if git_dir == top_git_dir: | ||
40 | git_dir = None | ||
41 | except subprocess.CalledProcessError: | ||
42 | pass | ||
43 | return git_dir | ||
44 | |||
30 | python () { | 45 | python () { |
31 | externalsrc = d.getVar('EXTERNALSRC') | 46 | externalsrc = d.getVar('EXTERNALSRC') |
32 | externalsrcbuild = d.getVar('EXTERNALSRC_BUILD') | 47 | externalsrcbuild = d.getVar('EXTERNALSRC_BUILD') |
@@ -45,11 +60,11 @@ python () { | |||
45 | if bpn == d.getVar('PN') or not classextend: | 60 | if bpn == d.getVar('PN') or not classextend: |
46 | if (externalsrc or | 61 | if (externalsrc or |
47 | ('native' in classextend and | 62 | ('native' in classextend and |
48 | d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or | 63 | d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or |
49 | ('nativesdk' in classextend and | 64 | ('nativesdk' in classextend and |
50 | d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or | 65 | d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or |
51 | ('cross' in classextend and | 66 | ('cross' in classextend and |
52 | d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))): | 67 | d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))): |
53 | d.setVar('BB_DONT_CACHE', '1') | 68 | d.setVar('BB_DONT_CACHE', '1') |
54 | 69 | ||
55 | if externalsrc: | 70 | if externalsrc: |
@@ -60,22 +75,21 @@ python () { | |||
60 | if externalsrcbuild: | 75 | if externalsrcbuild: |
61 | d.setVar('B', externalsrcbuild) | 76 | d.setVar('B', externalsrcbuild) |
62 | else: | 77 | else: |
63 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') | 78 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}') |
64 | 79 | ||
80 | bb.fetch.get_hashvalue(d) | ||
65 | local_srcuri = [] | 81 | local_srcuri = [] |
66 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) | 82 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) |
67 | for url in fetch.urls: | 83 | for url in fetch.urls: |
68 | url_data = fetch.ud[url] | 84 | url_data = fetch.ud[url] |
69 | parm = url_data.parm | 85 | parm = url_data.parm |
70 | if (url_data.type == 'file' or | 86 | if url_data.type in ['file', 'npmsw', 'crate'] or parm.get('type') in ['kmeta', 'git-dependency']: |
71 | url_data.type == 'npmsw' or | ||
72 | 'type' in parm and parm['type'] == 'kmeta'): | ||
73 | local_srcuri.append(url) | 87 | local_srcuri.append(url) |
74 | 88 | ||
75 | d.setVar('SRC_URI', ' '.join(local_srcuri)) | 89 | d.setVar('SRC_URI', ' '.join(local_srcuri)) |
76 | 90 | ||
77 | # Dummy value because the default function can't be called with blank SRC_URI | 91 | # sstate is never going to work for external source trees, disable it |
78 | d.setVar('SRCPV', '999') | 92 | d.setVar('SSTATE_SKIP_CREATION', '1') |
79 | 93 | ||
80 | if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking': | 94 | if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking': |
81 | d.setVar('CONFIGUREOPT_DEPTRACK', '') | 95 | d.setVar('CONFIGUREOPT_DEPTRACK', '') |
@@ -83,32 +97,42 @@ python () { | |||
83 | tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys()) | 97 | tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys()) |
84 | 98 | ||
85 | for task in tasks: | 99 | for task in tasks: |
86 | if task.endswith("_setscene"): | 100 | if os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')): |
87 | # sstate is never going to work for external source trees, disable it | ||
88 | bb.build.deltask(task, d) | ||
89 | elif os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')): | ||
90 | # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time | 101 | # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time |
91 | d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock") | 102 | d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock") |
92 | 103 | ||
93 | # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean) | 104 | for v in d.keys(): |
94 | cleandirs = oe.recipeutils.split_var_value(d.getVarFlag(task, 'cleandirs', False) or '') | 105 | cleandirs = d.getVarFlag(v, "cleandirs", False) |
95 | setvalue = False | 106 | if cleandirs: |
96 | for cleandir in cleandirs[:]: | 107 | # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean) |
97 | if oe.path.is_path_parent(externalsrc, d.expand(cleandir)): | 108 | cleandirs = oe.recipeutils.split_var_value(cleandirs) |
98 | cleandirs.remove(cleandir) | 109 | setvalue = False |
99 | setvalue = True | 110 | for cleandir in cleandirs[:]: |
100 | if setvalue: | 111 | if oe.path.is_path_parent(externalsrc, d.expand(cleandir)): |
101 | d.setVarFlag(task, 'cleandirs', ' '.join(cleandirs)) | 112 | cleandirs.remove(cleandir) |
113 | setvalue = True | ||
114 | if setvalue: | ||
115 | d.setVarFlag(v, 'cleandirs', ' '.join(cleandirs)) | ||
102 | 116 | ||
103 | fetch_tasks = ['do_fetch', 'do_unpack'] | 117 | fetch_tasks = ['do_fetch', 'do_unpack'] |
104 | # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one | 118 | # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one |
105 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string | 119 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string |
106 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) | 120 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) |
121 | d.setVarFlag('do_populate_lic', 'deps', (d.getVarFlag('do_populate_lic', 'deps', False) or []) + ['do_unpack']) | ||
107 | 122 | ||
108 | for task in d.getVar("SRCTREECOVEREDTASKS").split(): | 123 | for task in d.getVar("SRCTREECOVEREDTASKS").split(): |
109 | if local_srcuri and task in fetch_tasks: | 124 | if local_srcuri and task in fetch_tasks: |
110 | continue | 125 | continue |
111 | bb.build.deltask(task, d) | 126 | bb.build.deltask(task, d) |
127 | if task == 'do_unpack': | ||
128 | # The reproducible build create_source_date_epoch_stamp function must | ||
129 | # be run after the source is available and before the | ||
130 | # do_deploy_source_date_epoch task. In the normal case, it's attached | ||
131 | # to do_unpack as a postfuncs, but since we removed do_unpack (above) | ||
132 | # we need to move the function elsewhere. The easiest thing to do is | ||
133 | # move it into the prefuncs of the do_deploy_source_date_epoch task. | ||
134 | # This is safe, as externalsrc runs with the source already unpacked. | ||
135 | d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ') | ||
112 | 136 | ||
113 | d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ") | 137 | d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ") |
114 | d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ") | 138 | d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ") |
@@ -116,6 +140,9 @@ python () { | |||
116 | d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}') | 140 | d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}') |
117 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') | 141 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') |
118 | 142 | ||
143 | d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc') | ||
144 | d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc') | ||
145 | |||
119 | # We don't want the workdir to go away | 146 | # We don't want the workdir to go away |
120 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) | 147 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) |
121 | 148 | ||
@@ -156,14 +183,16 @@ python externalsrc_configure_prefunc() { | |||
156 | newlinks.append(symsplit[0]) | 183 | newlinks.append(symsplit[0]) |
157 | # Hide the symlinks from git | 184 | # Hide the symlinks from git |
158 | try: | 185 | try: |
159 | git_exclude_file = os.path.join(s_dir, '.git/info/exclude') | 186 | git_dir = find_git_dir(d, s_dir) |
160 | if os.path.exists(git_exclude_file): | 187 | if git_dir: |
161 | with open(git_exclude_file, 'r+') as efile: | 188 | git_exclude_file = os.path.join(git_dir, 'info/exclude') |
162 | elines = efile.readlines() | 189 | if os.path.exists(git_exclude_file): |
163 | for link in newlinks: | 190 | with open(git_exclude_file, 'r+') as efile: |
164 | if link in elines or '/'+link in elines: | 191 | elines = efile.readlines() |
165 | continue | 192 | for link in newlinks: |
166 | efile.write('/' + link + '\n') | 193 | if link in elines or '/'+link in elines: |
194 | continue | ||
195 | efile.write('/' + link + '\n') | ||
167 | except IOError as ioe: | 196 | except IOError as ioe: |
168 | bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git') | 197 | bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git') |
169 | } | 198 | } |
@@ -194,17 +223,7 @@ def srctree_hash_files(d, srcdir=None): | |||
194 | import hashlib | 223 | import hashlib |
195 | 224 | ||
196 | s_dir = srcdir or d.getVar('EXTERNALSRC') | 225 | s_dir = srcdir or d.getVar('EXTERNALSRC') |
197 | git_dir = None | 226 | git_dir = find_git_dir(d, s_dir) |
198 | |||
199 | try: | ||
200 | git_dir = os.path.join(s_dir, | ||
201 | subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) | ||
202 | top_git_dir = os.path.join(s_dir, subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], | ||
203 | stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) | ||
204 | if git_dir == top_git_dir: | ||
205 | git_dir = None | ||
206 | except subprocess.CalledProcessError: | ||
207 | pass | ||
208 | 227 | ||
209 | ret = " " | 228 | ret = " " |
210 | if git_dir is not None: | 229 | if git_dir is not None: |
@@ -217,14 +236,16 @@ def srctree_hash_files(d, srcdir=None): | |||
217 | env['GIT_INDEX_FILE'] = tmp_index.name | 236 | env['GIT_INDEX_FILE'] = tmp_index.name |
218 | subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env) | 237 | subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env) |
219 | git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") | 238 | git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") |
220 | submodule_helper = subprocess.check_output(['git', 'submodule--helper', 'list'], cwd=s_dir, env=env).decode("utf-8") | 239 | if os.path.exists(os.path.join(s_dir, ".gitmodules")) and os.path.getsize(os.path.join(s_dir, ".gitmodules")) > 0: |
221 | for line in submodule_helper.splitlines(): | 240 | submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8") |
222 | module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1]) | 241 | for line in submodule_helper.splitlines(): |
223 | proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) | 242 | module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1]) |
224 | proc.communicate() | 243 | if os.path.isdir(module_dir): |
225 | proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) | 244 | proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) |
226 | stdout, _ = proc.communicate() | 245 | proc.communicate() |
227 | git_sha1 += stdout.decode("utf-8") | 246 | proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) |
247 | stdout, _ = proc.communicate() | ||
248 | git_sha1 += stdout.decode("utf-8") | ||
228 | sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest() | 249 | sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest() |
229 | with open(oe_hash_file, 'w') as fobj: | 250 | with open(oe_hash_file, 'w') as fobj: |
230 | fobj.write(sha1) | 251 | fobj.write(sha1) |
@@ -238,6 +259,8 @@ def srctree_configure_hash_files(d): | |||
238 | Get the list of files that should trigger do_configure to re-execute, | 259 | Get the list of files that should trigger do_configure to re-execute, |
239 | based on the value of CONFIGURE_FILES | 260 | based on the value of CONFIGURE_FILES |
240 | """ | 261 | """ |
262 | import fnmatch | ||
263 | |||
241 | in_files = (d.getVar('CONFIGURE_FILES') or '').split() | 264 | in_files = (d.getVar('CONFIGURE_FILES') or '').split() |
242 | out_items = [] | 265 | out_items = [] |
243 | search_files = [] | 266 | search_files = [] |
@@ -249,8 +272,8 @@ def srctree_configure_hash_files(d): | |||
249 | if search_files: | 272 | if search_files: |
250 | s_dir = d.getVar('EXTERNALSRC') | 273 | s_dir = d.getVar('EXTERNALSRC') |
251 | for root, _, files in os.walk(s_dir): | 274 | for root, _, files in os.walk(s_dir): |
252 | for f in files: | 275 | for p in search_files: |
253 | if f in search_files: | 276 | for f in fnmatch.filter(files, p): |
254 | out_items.append('%s:True' % os.path.join(root, f)) | 277 | out_items.append('%s:True' % os.path.join(root, f)) |
255 | return ' '.join(out_items) | 278 | return ' '.join(out_items) |
256 | 279 | ||