diff options
Diffstat (limited to 'scripts/lib/devtool')
-rw-r--r-- | scripts/lib/devtool/__init__.py | 27 | ||||
-rw-r--r-- | scripts/lib/devtool/build_image.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/build_sdk.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/deploy.py | 240 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/__init__.py | 282 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/ide_code.py | 463 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/ide_none.py | 53 | ||||
-rwxr-xr-x | scripts/lib/devtool/ide_sdk.py | 1070 | ||||
-rw-r--r-- | scripts/lib/devtool/menuconfig.py | 4 | ||||
-rw-r--r-- | scripts/lib/devtool/sdk.py | 5 | ||||
-rw-r--r-- | scripts/lib/devtool/search.py | 5 | ||||
-rw-r--r-- | scripts/lib/devtool/standard.py | 540 | ||||
-rw-r--r-- | scripts/lib/devtool/upgrade.py | 196 |
13 files changed, 2493 insertions, 396 deletions
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py index 702db669de..6133c1c5b4 100644 --- a/scripts/lib/devtool/__init__.py +++ b/scripts/lib/devtool/__init__.py | |||
@@ -78,12 +78,15 @@ def exec_fakeroot(d, cmd, **kwargs): | |||
78 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" | 78 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" |
79 | # Grab the command and check it actually exists | 79 | # Grab the command and check it actually exists |
80 | fakerootcmd = d.getVar('FAKEROOTCMD') | 80 | fakerootcmd = d.getVar('FAKEROOTCMD') |
81 | fakerootenv = d.getVar('FAKEROOTENV') | ||
82 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs) | ||
83 | |||
84 | def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs): | ||
81 | if not os.path.exists(fakerootcmd): | 85 | if not os.path.exists(fakerootcmd): |
82 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') | 86 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') |
83 | return 2 | 87 | return 2 |
84 | # Set up the appropriate environment | 88 | # Set up the appropriate environment |
85 | newenv = dict(os.environ) | 89 | newenv = dict(os.environ) |
86 | fakerootenv = d.getVar('FAKEROOTENV') | ||
87 | for varvalue in fakerootenv.split(): | 90 | for varvalue in fakerootenv.split(): |
88 | if '=' in varvalue: | 91 | if '=' in varvalue: |
89 | splitval = varvalue.split('=', 1) | 92 | splitval = varvalue.split('=', 1) |
@@ -233,6 +236,28 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
233 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) | 236 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) |
234 | bb.process.run('git tag -f %s' % basetag, cwd=repodir) | 237 | bb.process.run('git tag -f %s' % basetag, cwd=repodir) |
235 | 238 | ||
239 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, | ||
240 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe | ||
241 | stdout, _ = bb.process.run("git status --porcelain", cwd=repodir) | ||
242 | found = False | ||
243 | for line in stdout.splitlines(): | ||
244 | if line.endswith("/"): | ||
245 | new_dir = line.split()[1] | ||
246 | for root, dirs, files in os.walk(os.path.join(repodir, new_dir)): | ||
247 | if ".git" in dirs + files: | ||
248 | (stdout, _) = bb.process.run('git remote', cwd=root) | ||
249 | remote = stdout.splitlines()[0] | ||
250 | (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root) | ||
251 | remote_url = stdout.splitlines()[0] | ||
252 | logger.error(os.path.relpath(os.path.join(root, ".."), root)) | ||
253 | bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, "..")) | ||
254 | found = True | ||
255 | if found: | ||
256 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) | ||
257 | found = False | ||
258 | if os.path.exists(os.path.join(repodir, '.gitmodules')): | ||
259 | bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir) | ||
260 | |||
236 | def recipe_to_append(recipefile, config, wildcard=False): | 261 | def recipe_to_append(recipefile, config, wildcard=False): |
237 | """ | 262 | """ |
238 | Convert a recipe file to a bbappend file path within the workspace. | 263 | Convert a recipe file to a bbappend file path within the workspace. |
diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py index 9388abbacf..980f90ddd6 100644 --- a/scripts/lib/devtool/build_image.py +++ b/scripts/lib/devtool/build_image.py | |||
@@ -113,7 +113,7 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task | |||
113 | with open(appendfile, 'w') as afile: | 113 | with open(appendfile, 'w') as afile: |
114 | if packages: | 114 | if packages: |
115 | # include packages from workspace recipes into the image | 115 | # include packages from workspace recipes into the image |
116 | afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages)) | 116 | afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages)) |
117 | if not task: | 117 | if not task: |
118 | logger.info('Building image %s with the following ' | 118 | logger.info('Building image %s with the following ' |
119 | 'additional packages: %s', image, ' '.join(packages)) | 119 | 'additional packages: %s', image, ' '.join(packages)) |
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py index 6fe02fff2a..1cd4831d2b 100644 --- a/scripts/lib/devtool/build_sdk.py +++ b/scripts/lib/devtool/build_sdk.py | |||
@@ -13,7 +13,7 @@ import shutil | |||
13 | import errno | 13 | import errno |
14 | import sys | 14 | import sys |
15 | import tempfile | 15 | import tempfile |
16 | from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError | 16 | from devtool import DevtoolError |
17 | from devtool import build_image | 17 | from devtool import build_image |
18 | 18 | ||
19 | logger = logging.getLogger('devtool') | 19 | logger = logging.getLogger('devtool') |
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py index e5af2c95ae..b5ca8f2c2f 100644 --- a/scripts/lib/devtool/deploy.py +++ b/scripts/lib/devtool/deploy.py | |||
@@ -16,7 +16,7 @@ import bb.utils | |||
16 | import argparse_oe | 16 | import argparse_oe |
17 | import oe.types | 17 | import oe.types |
18 | 18 | ||
19 | from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError | 19 | from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError |
20 | 20 | ||
21 | logger = logging.getLogger('devtool') | 21 | logger = logging.getLogger('devtool') |
22 | 22 | ||
@@ -133,16 +133,38 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals | |||
133 | 133 | ||
134 | return '\n'.join(lines) | 134 | return '\n'.join(lines) |
135 | 135 | ||
136 | |||
137 | |||
138 | def deploy(args, config, basepath, workspace): | 136 | def deploy(args, config, basepath, workspace): |
139 | """Entry point for the devtool 'deploy' subcommand""" | 137 | """Entry point for the devtool 'deploy' subcommand""" |
140 | import math | 138 | import oe.utils |
141 | import oe.recipeutils | ||
142 | import oe.package | ||
143 | 139 | ||
144 | check_workspace_recipe(workspace, args.recipename, checksrc=False) | 140 | check_workspace_recipe(workspace, args.recipename, checksrc=False) |
145 | 141 | ||
142 | tinfoil = setup_tinfoil(basepath=basepath) | ||
143 | try: | ||
144 | try: | ||
145 | rd = tinfoil.parse_recipe(args.recipename) | ||
146 | except Exception as e: | ||
147 | raise DevtoolError('Exception parsing recipe %s: %s' % | ||
148 | (args.recipename, e)) | ||
149 | |||
150 | srcdir = rd.getVar('D') | ||
151 | workdir = rd.getVar('WORKDIR') | ||
152 | path = rd.getVar('PATH') | ||
153 | strip_cmd = rd.getVar('STRIP') | ||
154 | libdir = rd.getVar('libdir') | ||
155 | base_libdir = rd.getVar('base_libdir') | ||
156 | max_process = oe.utils.get_bb_number_threads(rd) | ||
157 | fakerootcmd = rd.getVar('FAKEROOTCMD') | ||
158 | fakerootenv = rd.getVar('FAKEROOTENV') | ||
159 | finally: | ||
160 | tinfoil.shutdown() | ||
161 | |||
162 | return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args) | ||
163 | |||
164 | def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args): | ||
165 | import math | ||
166 | import oe.package | ||
167 | |||
146 | try: | 168 | try: |
147 | host, destdir = args.target.split(':') | 169 | host, destdir = args.target.split(':') |
148 | except ValueError: | 170 | except ValueError: |
@@ -152,118 +174,108 @@ def deploy(args, config, basepath, workspace): | |||
152 | if not destdir.endswith('/'): | 174 | if not destdir.endswith('/'): |
153 | destdir += '/' | 175 | destdir += '/' |
154 | 176 | ||
155 | tinfoil = setup_tinfoil(basepath=basepath) | 177 | recipe_outdir = srcdir |
156 | try: | 178 | if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): |
157 | try: | 179 | raise DevtoolError('No files to deploy - have you built the %s ' |
158 | rd = tinfoil.parse_recipe(args.recipename) | 180 | 'recipe? If so, the install step has not installed ' |
159 | except Exception as e: | 181 | 'any files.' % args.recipename) |
160 | raise DevtoolError('Exception parsing recipe %s: %s' % | 182 | |
161 | (args.recipename, e)) | 183 | if args.strip and not args.dry_run: |
162 | recipe_outdir = rd.getVar('D') | 184 | # Fakeroot copy to new destination |
163 | if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): | 185 | srcdir = recipe_outdir |
164 | raise DevtoolError('No files to deploy - have you built the %s ' | 186 | recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped') |
165 | 'recipe? If so, the install step has not installed ' | 187 | if os.path.isdir(recipe_outdir): |
166 | 'any files.' % args.recipename) | 188 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True) |
167 | 189 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) | |
168 | if args.strip and not args.dry_run: | 190 | os.environ['PATH'] = ':'.join([os.environ['PATH'], path or '']) |
169 | # Fakeroot copy to new destination | 191 | oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process) |
170 | srcdir = recipe_outdir | 192 | |
171 | recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped') | 193 | filelist = [] |
172 | if os.path.isdir(recipe_outdir): | 194 | inodes = set({}) |
173 | bb.utils.remove(recipe_outdir, True) | 195 | ftotalsize = 0 |
174 | exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) | 196 | for root, _, files in os.walk(recipe_outdir): |
175 | os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) | 197 | for fn in files: |
176 | oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), | 198 | fstat = os.lstat(os.path.join(root, fn)) |
177 | rd.getVar('base_libdir'), rd) | 199 | # Get the size in kiB (since we'll be comparing it to the output of du -k) |
178 | 200 | # MUST use lstat() here not stat() or getfilesize() since we don't want to | |
179 | filelist = [] | 201 | # dereference symlinks |
180 | inodes = set({}) | 202 | if fstat.st_ino in inodes: |
181 | ftotalsize = 0 | 203 | fsize = 0 |
182 | for root, _, files in os.walk(recipe_outdir): | 204 | else: |
183 | for fn in files: | 205 | fsize = int(math.ceil(float(fstat.st_size)/1024)) |
184 | fstat = os.lstat(os.path.join(root, fn)) | 206 | inodes.add(fstat.st_ino) |
185 | # Get the size in kiB (since we'll be comparing it to the output of du -k) | 207 | ftotalsize += fsize |
186 | # MUST use lstat() here not stat() or getfilesize() since we don't want to | 208 | # The path as it would appear on the target |
187 | # dereference symlinks | 209 | fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn) |
188 | if fstat.st_ino in inodes: | 210 | filelist.append((fpath, fsize)) |
189 | fsize = 0 | 211 | |
190 | else: | 212 | if args.dry_run: |
191 | fsize = int(math.ceil(float(fstat.st_size)/1024)) | 213 | print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) |
192 | inodes.add(fstat.st_ino) | 214 | for item, _ in filelist: |
193 | ftotalsize += fsize | 215 | print(' %s' % item) |
194 | # The path as it would appear on the target | 216 | return 0 |
195 | fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn) | ||
196 | filelist.append((fpath, fsize)) | ||
197 | |||
198 | if args.dry_run: | ||
199 | print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) | ||
200 | for item, _ in filelist: | ||
201 | print(' %s' % item) | ||
202 | return 0 | ||
203 | |||
204 | extraoptions = '' | ||
205 | if args.no_host_check: | ||
206 | extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | ||
207 | if not args.show_status: | ||
208 | extraoptions += ' -q' | ||
209 | |||
210 | scp_sshexec = '' | ||
211 | ssh_sshexec = 'ssh' | ||
212 | if args.ssh_exec: | ||
213 | scp_sshexec = "-S %s" % args.ssh_exec | ||
214 | ssh_sshexec = args.ssh_exec | ||
215 | scp_port = '' | ||
216 | ssh_port = '' | ||
217 | if args.port: | ||
218 | scp_port = "-P %s" % args.port | ||
219 | ssh_port = "-p %s" % args.port | ||
220 | |||
221 | if args.key: | ||
222 | extraoptions += ' -i %s' % args.key | ||
223 | |||
224 | # In order to delete previously deployed files and have the manifest file on | ||
225 | # the target, we write out a shell script and then copy it to the target | ||
226 | # so we can then run it (piping tar output to it). | ||
227 | # (We cannot use scp here, because it doesn't preserve symlinks.) | ||
228 | tmpdir = tempfile.mkdtemp(prefix='devtool') | ||
229 | try: | ||
230 | tmpscript = '/tmp/devtool_deploy.sh' | ||
231 | tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list') | ||
232 | shellscript = _prepare_remote_script(deploy=True, | ||
233 | verbose=args.show_status, | ||
234 | nopreserve=args.no_preserve, | ||
235 | nocheckspace=args.no_check_space) | ||
236 | # Write out the script to a file | ||
237 | with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: | ||
238 | f.write(shellscript) | ||
239 | # Write out the file list | ||
240 | with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f: | ||
241 | f.write('%d\n' % ftotalsize) | ||
242 | for fpath, fsize in filelist: | ||
243 | f.write('%s %d\n' % (fpath, fsize)) | ||
244 | # Copy them to the target | ||
245 | ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) | ||
246 | if ret != 0: | ||
247 | raise DevtoolError('Failed to copy script to %s - rerun with -s to ' | ||
248 | 'get a complete error message' % args.target) | ||
249 | finally: | ||
250 | shutil.rmtree(tmpdir) | ||
251 | 217 | ||
252 | # Now run the script | 218 | extraoptions = '' |
253 | ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) | 219 | if args.no_host_check: |
254 | if ret != 0: | 220 | extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' |
255 | raise DevtoolError('Deploy failed - rerun with -s to get a complete ' | 221 | if not args.show_status: |
256 | 'error message') | 222 | extraoptions += ' -q' |
257 | 223 | ||
258 | logger.info('Successfully deployed %s' % recipe_outdir) | 224 | scp_sshexec = '' |
225 | ssh_sshexec = 'ssh' | ||
226 | if args.ssh_exec: | ||
227 | scp_sshexec = "-S %s" % args.ssh_exec | ||
228 | ssh_sshexec = args.ssh_exec | ||
229 | scp_port = '' | ||
230 | ssh_port = '' | ||
231 | if args.port: | ||
232 | scp_port = "-P %s" % args.port | ||
233 | ssh_port = "-p %s" % args.port | ||
234 | |||
235 | if args.key: | ||
236 | extraoptions += ' -i %s' % args.key | ||
259 | 237 | ||
260 | files_list = [] | 238 | # In order to delete previously deployed files and have the manifest file on |
261 | for root, _, files in os.walk(recipe_outdir): | 239 | # the target, we write out a shell script and then copy it to the target |
262 | for filename in files: | 240 | # so we can then run it (piping tar output to it). |
263 | filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) | 241 | # (We cannot use scp here, because it doesn't preserve symlinks.) |
264 | files_list.append(os.path.join(destdir, filename)) | 242 | tmpdir = tempfile.mkdtemp(prefix='devtool') |
243 | try: | ||
244 | tmpscript = '/tmp/devtool_deploy.sh' | ||
245 | tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list') | ||
246 | shellscript = _prepare_remote_script(deploy=True, | ||
247 | verbose=args.show_status, | ||
248 | nopreserve=args.no_preserve, | ||
249 | nocheckspace=args.no_check_space) | ||
250 | # Write out the script to a file | ||
251 | with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: | ||
252 | f.write(shellscript) | ||
253 | # Write out the file list | ||
254 | with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f: | ||
255 | f.write('%d\n' % ftotalsize) | ||
256 | for fpath, fsize in filelist: | ||
257 | f.write('%s %d\n' % (fpath, fsize)) | ||
258 | # Copy them to the target | ||
259 | ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) | ||
260 | if ret != 0: | ||
261 | raise DevtoolError('Failed to copy script to %s - rerun with -s to ' | ||
262 | 'get a complete error message' % args.target) | ||
265 | finally: | 263 | finally: |
266 | tinfoil.shutdown() | 264 | shutil.rmtree(tmpdir) |
265 | |||
266 | # Now run the script | ||
267 | ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) | ||
268 | if ret != 0: | ||
269 | raise DevtoolError('Deploy failed - rerun with -s to get a complete ' | ||
270 | 'error message') | ||
271 | |||
272 | logger.info('Successfully deployed %s' % recipe_outdir) | ||
273 | |||
274 | files_list = [] | ||
275 | for root, _, files in os.walk(recipe_outdir): | ||
276 | for filename in files: | ||
277 | filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) | ||
278 | files_list.append(os.path.join(destdir, filename)) | ||
267 | 279 | ||
268 | return 0 | 280 | return 0 |
269 | 281 | ||
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py new file mode 100644 index 0000000000..19c2f61c5f --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/__init__.py | |||
@@ -0,0 +1,282 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk IDE plugin interface definition and helper functions""" | ||
7 | |||
8 | import errno | ||
9 | import json | ||
10 | import logging | ||
11 | import os | ||
12 | import stat | ||
13 | from enum import Enum, auto | ||
14 | from devtool import DevtoolError | ||
15 | from bb.utils import mkdirhier | ||
16 | |||
17 | logger = logging.getLogger('devtool') | ||
18 | |||
19 | |||
20 | class BuildTool(Enum): | ||
21 | UNDEFINED = auto() | ||
22 | CMAKE = auto() | ||
23 | MESON = auto() | ||
24 | |||
25 | @property | ||
26 | def is_c_ccp(self): | ||
27 | if self is BuildTool.CMAKE: | ||
28 | return True | ||
29 | if self is BuildTool.MESON: | ||
30 | return True | ||
31 | return False | ||
32 | |||
33 | |||
34 | class GdbCrossConfig: | ||
35 | """Base class defining the GDB configuration generator interface | ||
36 | |||
37 | Generate a GDB configuration for a binary on the target device. | ||
38 | Only one instance per binary is allowed. This allows to assign unique port | ||
39 | numbers for all gdbserver instances. | ||
40 | """ | ||
41 | _gdbserver_port_next = 1234 | ||
42 | _binaries = [] | ||
43 | |||
44 | def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True): | ||
45 | self.image_recipe = image_recipe | ||
46 | self.modified_recipe = modified_recipe | ||
47 | self.gdb_cross = modified_recipe.gdb_cross | ||
48 | self.binary = binary | ||
49 | if binary in GdbCrossConfig._binaries: | ||
50 | raise DevtoolError( | ||
51 | "gdbserver config for binary %s is already generated" % binary) | ||
52 | GdbCrossConfig._binaries.append(binary) | ||
53 | self.script_dir = modified_recipe.ide_sdk_scripts_dir | ||
54 | self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit') | ||
55 | self.gdbserver_multi = gdbserver_multi | ||
56 | self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-') | ||
57 | self.gdbserver_port = GdbCrossConfig._gdbserver_port_next | ||
58 | GdbCrossConfig._gdbserver_port_next += 1 | ||
59 | self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty) | ||
60 | # gdbserver start script | ||
61 | gdbserver_script_file = 'gdbserver_' + self.id_pretty | ||
62 | if self.gdbserver_multi: | ||
63 | gdbserver_script_file += "_m" | ||
64 | self.gdbserver_script = os.path.join( | ||
65 | self.script_dir, gdbserver_script_file) | ||
66 | # gdbinit file | ||
67 | self.gdbinit = os.path.join( | ||
68 | self.gdbinit_dir, 'gdbinit_' + self.id_pretty) | ||
69 | # gdb start script | ||
70 | self.gdb_script = os.path.join( | ||
71 | self.script_dir, 'gdb_' + self.id_pretty) | ||
72 | |||
73 | def _gen_gdbserver_start_script(self): | ||
74 | """Generate a shell command starting the gdbserver on the remote device via ssh | ||
75 | |||
76 | GDB supports two modes: | ||
77 | multi: gdbserver remains running over several debug sessions | ||
78 | once: gdbserver terminates after the debugged process terminates | ||
79 | """ | ||
80 | cmd_lines = ['#!/bin/sh'] | ||
81 | if self.gdbserver_multi: | ||
82 | temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty | ||
83 | gdbserver_cmd_start = temp_dir | ||
84 | gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; " | ||
85 | gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; " | ||
86 | gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % ( | ||
87 | self.gdb_cross.gdbserver_path, self.gdbserver_port) | ||
88 | gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;" | ||
89 | |||
90 | gdbserver_cmd_stop = temp_dir | ||
91 | gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); " | ||
92 | gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; " | ||
93 | |||
94 | gdbserver_cmd_l = [] | ||
95 | gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then') | ||
96 | gdbserver_cmd_l.append(' shift') | ||
97 | gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % ( | ||
98 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop)) | ||
99 | gdbserver_cmd_l.append('else') | ||
100 | gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % ( | ||
101 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)) | ||
102 | gdbserver_cmd_l.append('fi') | ||
103 | gdbserver_cmd = os.linesep.join(gdbserver_cmd_l) | ||
104 | else: | ||
105 | gdbserver_cmd_start = "%s --once :%s %s" % ( | ||
106 | self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary) | ||
107 | gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % ( | ||
108 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start) | ||
109 | cmd_lines.append(gdbserver_cmd) | ||
110 | GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True) | ||
111 | |||
112 | def _gen_gdbinit_config(self): | ||
113 | """Generate a gdbinit file for this binary and the corresponding gdbserver configuration""" | ||
114 | gdbinit_lines = ['# This file is generated by devtool ide-sdk'] | ||
115 | if self.gdbserver_multi: | ||
116 | target_help = '# gdbserver --multi :%d' % self.gdbserver_port | ||
117 | remote_cmd = 'target extended-remote' | ||
118 | else: | ||
119 | target_help = '# gdbserver :%d %s' % ( | ||
120 | self.gdbserver_port, self.binary) | ||
121 | remote_cmd = 'target remote' | ||
122 | gdbinit_lines.append('# On the remote target:') | ||
123 | gdbinit_lines.append(target_help) | ||
124 | gdbinit_lines.append('# On the build machine:') | ||
125 | gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree) | ||
126 | gdbinit_lines.append( | ||
127 | '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit) | ||
128 | |||
129 | gdbinit_lines.append('set sysroot ' + self.modified_recipe.d) | ||
130 | gdbinit_lines.append('set substitute-path "/usr/include" "' + | ||
131 | os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"') | ||
132 | # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir. | ||
133 | gdbinit_lines.append('set debuginfod enabled off') | ||
134 | if self.image_recipe.rootfs_dbg: | ||
135 | gdbinit_lines.append( | ||
136 | 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"') | ||
137 | # First: Search for sources of this recipe in the workspace folder | ||
138 | if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir: | ||
139 | gdbinit_lines.append('set substitute-path "%s" "%s"' % | ||
140 | (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree)) | ||
141 | else: | ||
142 | logger.error( | ||
143 | "TARGET_DBGSRC_DIR must contain the recipe name PN.") | ||
144 | # Second: Search for sources of other recipes in the rootfs-dbg | ||
145 | if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"): | ||
146 | gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join( | ||
147 | self.image_recipe.rootfs_dbg, "usr", "src", "debug")) | ||
148 | else: | ||
149 | logger.error( | ||
150 | "TARGET_DBGSRC_DIR must start with /usr/src/debug.") | ||
151 | else: | ||
152 | logger.warning( | ||
153 | "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.") | ||
154 | gdbinit_lines.append( | ||
155 | '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port)) | ||
156 | gdbinit_lines.append('set remote exec-file ' + self.binary) | ||
157 | gdbinit_lines.append( | ||
158 | 'run ' + os.path.join(self.modified_recipe.d, self.binary)) | ||
159 | |||
160 | GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines) | ||
161 | |||
162 | def _gen_gdb_start_script(self): | ||
163 | """Generate a script starting GDB with the corresponding gdbinit configuration.""" | ||
164 | cmd_lines = ['#!/bin/sh'] | ||
165 | cmd_lines.append('cd ' + self.modified_recipe.real_srctree) | ||
166 | cmd_lines.append(self.gdb_cross.gdb + ' -ix ' + | ||
167 | self.gdbinit + ' "$@"') | ||
168 | GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True) | ||
169 | |||
170 | def initialize(self): | ||
171 | self._gen_gdbserver_start_script() | ||
172 | self._gen_gdbinit_config() | ||
173 | self._gen_gdb_start_script() | ||
174 | |||
175 | @staticmethod | ||
176 | def write_file(script_file, cmd_lines, executable=False): | ||
177 | script_dir = os.path.dirname(script_file) | ||
178 | mkdirhier(script_dir) | ||
179 | with open(script_file, 'w') as script_f: | ||
180 | script_f.write(os.linesep.join(cmd_lines)) | ||
181 | script_f.write(os.linesep) | ||
182 | if executable: | ||
183 | st = os.stat(script_file) | ||
184 | os.chmod(script_file, st.st_mode | stat.S_IEXEC) | ||
185 | logger.info("Created: %s" % script_file) | ||
186 | |||
187 | |||
188 | class IdeBase: | ||
189 | """Base class defining the interface for IDE plugins""" | ||
190 | |||
191 | def __init__(self): | ||
192 | self.ide_name = 'undefined' | ||
193 | self.gdb_cross_configs = [] | ||
194 | |||
195 | @classmethod | ||
196 | def ide_plugin_priority(cls): | ||
197 | """Used to find the default ide handler if --ide is not passed""" | ||
198 | return 10 | ||
199 | |||
200 | def setup_shared_sysroots(self, shared_env): | ||
201 | logger.warn("Shared sysroot mode is not supported for IDE %s" % | ||
202 | self.ide_name) | ||
203 | |||
204 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
205 | logger.warn("Modified recipe mode is not supported for IDE %s" % | ||
206 | self.ide_name) | ||
207 | |||
208 | def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig): | ||
209 | binaries = modified_recipe.find_installed_binaries() | ||
210 | for binary in binaries: | ||
211 | gdb_cross_config = gdb_cross_config_class( | ||
212 | image_recipe, modified_recipe, binary) | ||
213 | gdb_cross_config.initialize() | ||
214 | self.gdb_cross_configs.append(gdb_cross_config) | ||
215 | |||
216 | @staticmethod | ||
217 | def gen_oe_scrtips_sym_link(modified_recipe): | ||
218 | # create a sym-link from sources to the scripts directory | ||
219 | if os.path.isdir(modified_recipe.ide_sdk_scripts_dir): | ||
220 | IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir, | ||
221 | os.path.join(modified_recipe.real_srctree, 'oe-scripts')) | ||
222 | |||
223 | @staticmethod | ||
224 | def update_json_file(json_dir, json_file, update_dict): | ||
225 | """Update a json file | ||
226 | |||
227 | By default it uses the dict.update function. If this is not sutiable | ||
228 | the update function might be passed via update_func parameter. | ||
229 | """ | ||
230 | json_path = os.path.join(json_dir, json_file) | ||
231 | logger.info("Updating IDE config file: %s (%s)" % | ||
232 | (json_file, json_path)) | ||
233 | if not os.path.exists(json_dir): | ||
234 | os.makedirs(json_dir) | ||
235 | try: | ||
236 | with open(json_path) as f: | ||
237 | orig_dict = json.load(f) | ||
238 | except json.decoder.JSONDecodeError: | ||
239 | logger.info( | ||
240 | "Decoding %s failed. Probably because of comments in the json file" % json_path) | ||
241 | orig_dict = {} | ||
242 | except FileNotFoundError: | ||
243 | orig_dict = {} | ||
244 | orig_dict.update(update_dict) | ||
245 | with open(json_path, 'w') as f: | ||
246 | json.dump(orig_dict, f, indent=4) | ||
247 | |||
248 | @staticmethod | ||
249 | def symlink_force(tgt, dst): | ||
250 | try: | ||
251 | os.symlink(tgt, dst) | ||
252 | except OSError as err: | ||
253 | if err.errno == errno.EEXIST: | ||
254 | if os.readlink(dst) != tgt: | ||
255 | os.remove(dst) | ||
256 | os.symlink(tgt, dst) | ||
257 | else: | ||
258 | raise err | ||
259 | |||
260 | |||
261 | def get_devtool_deploy_opts(args): | ||
262 | """Filter args for devtool deploy-target args""" | ||
263 | if not args.target: | ||
264 | return None | ||
265 | devtool_deploy_opts = [args.target] | ||
266 | if args.no_host_check: | ||
267 | devtool_deploy_opts += ["-c"] | ||
268 | if args.show_status: | ||
269 | devtool_deploy_opts += ["-s"] | ||
270 | if args.no_preserve: | ||
271 | devtool_deploy_opts += ["-p"] | ||
272 | if args.no_check_space: | ||
273 | devtool_deploy_opts += ["--no-check-space"] | ||
274 | if args.ssh_exec: | ||
275 | devtool_deploy_opts += ["-e", args.ssh.exec] | ||
276 | if args.port: | ||
277 | devtool_deploy_opts += ["-P", args.port] | ||
278 | if args.key: | ||
279 | devtool_deploy_opts += ["-I", args.key] | ||
280 | if args.strip is False: | ||
281 | devtool_deploy_opts += ["--no-strip"] | ||
282 | return devtool_deploy_opts | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py new file mode 100644 index 0000000000..a62b93224e --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/ide_code.py | |||
@@ -0,0 +1,463 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk IDE plugin for VSCode and VSCodium""" | ||
7 | |||
8 | import json | ||
9 | import logging | ||
10 | import os | ||
11 | import shutil | ||
12 | from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts | ||
13 | |||
14 | logger = logging.getLogger('devtool') | ||
15 | |||
16 | |||
17 | class GdbCrossConfigVSCode(GdbCrossConfig): | ||
18 | def __init__(self, image_recipe, modified_recipe, binary): | ||
19 | super().__init__(image_recipe, modified_recipe, binary, False) | ||
20 | |||
21 | def initialize(self): | ||
22 | self._gen_gdbserver_start_script() | ||
23 | |||
24 | |||
25 | class IdeVSCode(IdeBase): | ||
26 | """Manage IDE configurations for VSCode | ||
27 | |||
28 | Modified recipe mode: | ||
29 | - cmake: use the cmake-preset generated by devtool ide-sdk | ||
30 | - meson: meson is called via a wrapper script generated by devtool ide-sdk | ||
31 | |||
32 | Shared sysroot mode: | ||
33 | In shared sysroot mode, the cross tool-chain is exported to the user's global configuration. | ||
34 | A workspace cannot be created because there is no recipe that defines how a workspace could | ||
35 | be set up. | ||
36 | - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json | ||
37 | The cmake-kit uses the environment script and the tool-chain file | ||
38 | generated by meta-ide-support. | ||
39 | - meson: Meson needs manual workspace configuration. | ||
40 | """ | ||
41 | |||
42 | @classmethod | ||
43 | def ide_plugin_priority(cls): | ||
44 | """If --ide is not passed this is the default plugin""" | ||
45 | if shutil.which('code'): | ||
46 | return 100 | ||
47 | return 0 | ||
48 | |||
49 | def setup_shared_sysroots(self, shared_env): | ||
50 | """Expose the toolchain of the shared sysroots SDK""" | ||
51 | datadir = shared_env.ide_support.datadir | ||
52 | deploy_dir_image = shared_env.ide_support.deploy_dir_image | ||
53 | real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys | ||
54 | standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native | ||
55 | vscode_ws_path = os.path.join( | ||
56 | os.environ['HOME'], '.local', 'share', 'CMakeTools') | ||
57 | cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json') | ||
58 | oecmake_generator = "Ninja" | ||
59 | env_script = os.path.join( | ||
60 | deploy_dir_image, 'environment-setup-' + real_multimach_target_sys) | ||
61 | |||
62 | if not os.path.isdir(vscode_ws_path): | ||
63 | os.makedirs(vscode_ws_path) | ||
64 | cmake_kits_old = [] | ||
65 | if os.path.exists(cmake_kits_path): | ||
66 | with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file: | ||
67 | cmake_kits_old = json.load(cmake_kits_file) | ||
68 | cmake_kits = cmake_kits_old.copy() | ||
69 | |||
70 | cmake_kit_new = { | ||
71 | "name": "OE " + real_multimach_target_sys, | ||
72 | "environmentSetupScript": env_script, | ||
73 | "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake", | ||
74 | "preferredGenerator": { | ||
75 | "name": oecmake_generator | ||
76 | } | ||
77 | } | ||
78 | |||
79 | def merge_kit(cmake_kits, cmake_kit_new): | ||
80 | i = 0 | ||
81 | while i < len(cmake_kits): | ||
82 | if 'environmentSetupScript' in cmake_kits[i] and \ | ||
83 | cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']: | ||
84 | cmake_kits[i] = cmake_kit_new | ||
85 | return | ||
86 | i += 1 | ||
87 | cmake_kits.append(cmake_kit_new) | ||
88 | merge_kit(cmake_kits, cmake_kit_new) | ||
89 | |||
90 | if cmake_kits != cmake_kits_old: | ||
91 | logger.info("Updating: %s" % cmake_kits_path) | ||
92 | with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file: | ||
93 | json.dump(cmake_kits, cmake_kits_file, indent=4) | ||
94 | else: | ||
95 | logger.info("Already up to date: %s" % cmake_kits_path) | ||
96 | |||
97 | cmake_native = os.path.join( | ||
98 | shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake') | ||
99 | if os.path.isfile(cmake_native): | ||
100 | logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native) | ||
101 | else: | ||
102 | logger.error("Cannot find cmake native at: %s" % cmake_native) | ||
103 | |||
104 | def dot_code_dir(self, modified_recipe): | ||
105 | return os.path.join(modified_recipe.srctree, '.vscode') | ||
106 | |||
107 | def __vscode_settings_meson(self, settings_dict, modified_recipe): | ||
108 | if modified_recipe.build_tool is not BuildTool.MESON: | ||
109 | return | ||
110 | settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper | ||
111 | |||
112 | confopts = modified_recipe.mesonopts.split() | ||
113 | confopts += modified_recipe.meson_cross_file.split() | ||
114 | confopts += modified_recipe.extra_oemeson.split() | ||
115 | settings_dict["mesonbuild.configureOptions"] = confopts | ||
116 | settings_dict["mesonbuild.buildFolder"] = modified_recipe.b | ||
117 | |||
118 | def __vscode_settings_cmake(self, settings_dict, modified_recipe): | ||
119 | """Add cmake specific settings to settings.json. | ||
120 | |||
121 | Note: most settings are passed to the cmake preset. | ||
122 | """ | ||
123 | if modified_recipe.build_tool is not BuildTool.CMAKE: | ||
124 | return | ||
125 | settings_dict["cmake.configureOnOpen"] = True | ||
126 | settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree | ||
127 | |||
128 | def vscode_settings(self, modified_recipe, image_recipe): | ||
129 | files_excludes = { | ||
130 | "**/.git/**": True, | ||
131 | "**/oe-logs/**": True, | ||
132 | "**/oe-workdir/**": True, | ||
133 | "**/source-date-epoch/**": True | ||
134 | } | ||
135 | python_exclude = [ | ||
136 | "**/.git/**", | ||
137 | "**/oe-logs/**", | ||
138 | "**/oe-workdir/**", | ||
139 | "**/source-date-epoch/**" | ||
140 | ] | ||
141 | files_readonly = { | ||
142 | modified_recipe.recipe_sysroot + '/**': True, | ||
143 | modified_recipe.recipe_sysroot_native + '/**': True, | ||
144 | } | ||
145 | if image_recipe.rootfs_dbg is not None: | ||
146 | files_readonly[image_recipe.rootfs_dbg + '/**'] = True | ||
147 | settings_dict = { | ||
148 | "files.watcherExclude": files_excludes, | ||
149 | "files.exclude": files_excludes, | ||
150 | "files.readonlyInclude": files_readonly, | ||
151 | "python.analysis.exclude": python_exclude | ||
152 | } | ||
153 | self.__vscode_settings_cmake(settings_dict, modified_recipe) | ||
154 | self.__vscode_settings_meson(settings_dict, modified_recipe) | ||
155 | |||
156 | settings_file = 'settings.json' | ||
157 | IdeBase.update_json_file( | ||
158 | self.dot_code_dir(modified_recipe), settings_file, settings_dict) | ||
159 | |||
160 | def __vscode_extensions_cmake(self, modified_recipe, recommendations): | ||
161 | if modified_recipe.build_tool is not BuildTool.CMAKE: | ||
162 | return | ||
163 | recommendations += [ | ||
164 | "twxs.cmake", | ||
165 | "ms-vscode.cmake-tools", | ||
166 | "ms-vscode.cpptools", | ||
167 | "ms-vscode.cpptools-extension-pack", | ||
168 | "ms-vscode.cpptools-themes" | ||
169 | ] | ||
170 | |||
171 | def __vscode_extensions_meson(self, modified_recipe, recommendations): | ||
172 | if modified_recipe.build_tool is not BuildTool.MESON: | ||
173 | return | ||
174 | recommendations += [ | ||
175 | 'mesonbuild.mesonbuild', | ||
176 | "ms-vscode.cpptools", | ||
177 | "ms-vscode.cpptools-extension-pack", | ||
178 | "ms-vscode.cpptools-themes" | ||
179 | ] | ||
180 | |||
181 | def vscode_extensions(self, modified_recipe): | ||
182 | recommendations = [] | ||
183 | self.__vscode_extensions_cmake(modified_recipe, recommendations) | ||
184 | self.__vscode_extensions_meson(modified_recipe, recommendations) | ||
185 | extensions_file = 'extensions.json' | ||
186 | IdeBase.update_json_file( | ||
187 | self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations}) | ||
188 | |||
189 | def vscode_c_cpp_properties(self, modified_recipe): | ||
190 | properties_dict = { | ||
191 | "name": modified_recipe.recipe_id_pretty, | ||
192 | } | ||
193 | if modified_recipe.build_tool is BuildTool.CMAKE: | ||
194 | properties_dict["configurationProvider"] = "ms-vscode.cmake-tools" | ||
195 | elif modified_recipe.build_tool is BuildTool.MESON: | ||
196 | properties_dict["configurationProvider"] = "mesonbuild.mesonbuild" | ||
197 | properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0]) | ||
198 | else: # no C/C++ build | ||
199 | return | ||
200 | |||
201 | properties_dicts = { | ||
202 | "configurations": [ | ||
203 | properties_dict | ||
204 | ], | ||
205 | "version": 4 | ||
206 | } | ||
207 | prop_file = 'c_cpp_properties.json' | ||
208 | IdeBase.update_json_file( | ||
209 | self.dot_code_dir(modified_recipe), prop_file, properties_dicts) | ||
210 | |||
211 | def vscode_launch_bin_dbg(self, gdb_cross_config): | ||
212 | modified_recipe = gdb_cross_config.modified_recipe | ||
213 | |||
214 | launch_config = { | ||
215 | "name": gdb_cross_config.id_pretty, | ||
216 | "type": "cppdbg", | ||
217 | "request": "launch", | ||
218 | "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')), | ||
219 | "stopAtEntry": True, | ||
220 | "cwd": "${workspaceFolder}", | ||
221 | "environment": [], | ||
222 | "externalConsole": False, | ||
223 | "MIMode": "gdb", | ||
224 | "preLaunchTask": gdb_cross_config.id_pretty, | ||
225 | "miDebuggerPath": modified_recipe.gdb_cross.gdb, | ||
226 | "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port) | ||
227 | } | ||
228 | |||
229 | # Search for header files in recipe-sysroot. | ||
230 | src_file_map = { | ||
231 | "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include") | ||
232 | } | ||
233 | # First of all search for not stripped binaries in the image folder. | ||
234 | # These binaries are copied (and optionally stripped) by deploy-target | ||
235 | setup_commands = [ | ||
236 | { | ||
237 | "description": "sysroot", | ||
238 | "text": "set sysroot " + modified_recipe.d | ||
239 | } | ||
240 | ] | ||
241 | |||
242 | if gdb_cross_config.image_recipe.rootfs_dbg: | ||
243 | launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str( | ||
244 | gdb_cross_config.image_recipe) | ||
245 | # First: Search for sources of this recipe in the workspace folder | ||
246 | if modified_recipe.pn in modified_recipe.target_dbgsrc_dir: | ||
247 | src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}" | ||
248 | else: | ||
249 | logger.error( | ||
250 | "TARGET_DBGSRC_DIR must contain the recipe name PN.") | ||
251 | # Second: Search for sources of other recipes in the rootfs-dbg | ||
252 | if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"): | ||
253 | src_file_map["/usr/src/debug"] = os.path.join( | ||
254 | gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug") | ||
255 | else: | ||
256 | logger.error( | ||
257 | "TARGET_DBGSRC_DIR must start with /usr/src/debug.") | ||
258 | else: | ||
259 | logger.warning( | ||
260 | "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.") | ||
261 | |||
262 | launch_config['sourceFileMap'] = src_file_map | ||
263 | launch_config['setupCommands'] = setup_commands | ||
264 | return launch_config | ||
265 | |||
266 | def vscode_launch(self, modified_recipe): | ||
267 | """GDB Launch configuration for binaries (elf files)""" | ||
268 | |||
269 | configurations = [] | ||
270 | for gdb_cross_config in self.gdb_cross_configs: | ||
271 | if gdb_cross_config.modified_recipe is modified_recipe: | ||
272 | configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config)) | ||
273 | launch_dict = { | ||
274 | "version": "0.2.0", | ||
275 | "configurations": configurations | ||
276 | } | ||
277 | launch_file = 'launch.json' | ||
278 | IdeBase.update_json_file( | ||
279 | self.dot_code_dir(modified_recipe), launch_file, launch_dict) | ||
280 | |||
281 | def vscode_tasks_cpp(self, args, modified_recipe): | ||
282 | run_install_deploy = modified_recipe.gen_install_deploy_script(args) | ||
283 | install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty | ||
284 | tasks_dict = { | ||
285 | "version": "2.0.0", | ||
286 | "tasks": [ | ||
287 | { | ||
288 | "label": install_task_name, | ||
289 | "type": "shell", | ||
290 | "command": run_install_deploy, | ||
291 | "problemMatcher": [] | ||
292 | } | ||
293 | ] | ||
294 | } | ||
295 | for gdb_cross_config in self.gdb_cross_configs: | ||
296 | if gdb_cross_config.modified_recipe is not modified_recipe: | ||
297 | continue | ||
298 | tasks_dict['tasks'].append( | ||
299 | { | ||
300 | "label": gdb_cross_config.id_pretty, | ||
301 | "type": "shell", | ||
302 | "isBackground": True, | ||
303 | "dependsOn": [ | ||
304 | install_task_name | ||
305 | ], | ||
306 | "command": gdb_cross_config.gdbserver_script, | ||
307 | "problemMatcher": [ | ||
308 | { | ||
309 | "pattern": [ | ||
310 | { | ||
311 | "regexp": ".", | ||
312 | "file": 1, | ||
313 | "location": 2, | ||
314 | "message": 3 | ||
315 | } | ||
316 | ], | ||
317 | "background": { | ||
318 | "activeOnStart": True, | ||
319 | "beginsPattern": ".", | ||
320 | "endsPattern": ".", | ||
321 | } | ||
322 | } | ||
323 | ] | ||
324 | }) | ||
325 | tasks_file = 'tasks.json' | ||
326 | IdeBase.update_json_file( | ||
327 | self.dot_code_dir(modified_recipe), tasks_file, tasks_dict) | ||
328 | |||
329 | def vscode_tasks_fallback(self, args, modified_recipe): | ||
330 | oe_init_dir = modified_recipe.oe_init_dir | ||
331 | oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir) | ||
332 | dt_build = "devtool build " | ||
333 | dt_build_label = dt_build + modified_recipe.recipe_id_pretty | ||
334 | dt_build_cmd = dt_build + modified_recipe.bpn | ||
335 | clean_opt = " --clean" | ||
336 | dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt | ||
337 | dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt | ||
338 | dt_deploy = "devtool deploy-target " | ||
339 | dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty | ||
340 | dt_deploy_cmd = dt_deploy + modified_recipe.bpn | ||
341 | dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty | ||
342 | deploy_opts = ' '.join(get_devtool_deploy_opts(args)) | ||
343 | tasks_dict = { | ||
344 | "version": "2.0.0", | ||
345 | "tasks": [ | ||
346 | { | ||
347 | "label": dt_build_label, | ||
348 | "type": "shell", | ||
349 | "command": "bash", | ||
350 | "linux": { | ||
351 | "options": { | ||
352 | "cwd": oe_init_dir | ||
353 | } | ||
354 | }, | ||
355 | "args": [ | ||
356 | "--login", | ||
357 | "-c", | ||
358 | "%s%s" % (oe_init, dt_build_cmd) | ||
359 | ], | ||
360 | "problemMatcher": [] | ||
361 | }, | ||
362 | { | ||
363 | "label": dt_deploy_label, | ||
364 | "type": "shell", | ||
365 | "command": "bash", | ||
366 | "linux": { | ||
367 | "options": { | ||
368 | "cwd": oe_init_dir | ||
369 | } | ||
370 | }, | ||
371 | "args": [ | ||
372 | "--login", | ||
373 | "-c", | ||
374 | "%s%s %s" % ( | ||
375 | oe_init, dt_deploy_cmd, deploy_opts) | ||
376 | ], | ||
377 | "problemMatcher": [] | ||
378 | }, | ||
379 | { | ||
380 | "label": dt_build_deploy_label, | ||
381 | "dependsOrder": "sequence", | ||
382 | "dependsOn": [ | ||
383 | dt_build_label, | ||
384 | dt_deploy_label | ||
385 | ], | ||
386 | "problemMatcher": [], | ||
387 | "group": { | ||
388 | "kind": "build", | ||
389 | "isDefault": True | ||
390 | } | ||
391 | }, | ||
392 | { | ||
393 | "label": dt_build_clean_label, | ||
394 | "type": "shell", | ||
395 | "command": "bash", | ||
396 | "linux": { | ||
397 | "options": { | ||
398 | "cwd": oe_init_dir | ||
399 | } | ||
400 | }, | ||
401 | "args": [ | ||
402 | "--login", | ||
403 | "-c", | ||
404 | "%s%s" % (oe_init, dt_build_clean_cmd) | ||
405 | ], | ||
406 | "problemMatcher": [] | ||
407 | } | ||
408 | ] | ||
409 | } | ||
410 | if modified_recipe.gdb_cross: | ||
411 | for gdb_cross_config in self.gdb_cross_configs: | ||
412 | if gdb_cross_config.modified_recipe is not modified_recipe: | ||
413 | continue | ||
414 | tasks_dict['tasks'].append( | ||
415 | { | ||
416 | "label": gdb_cross_config.id_pretty, | ||
417 | "type": "shell", | ||
418 | "isBackground": True, | ||
419 | "dependsOn": [ | ||
420 | dt_build_deploy_label | ||
421 | ], | ||
422 | "command": gdb_cross_config.gdbserver_script, | ||
423 | "problemMatcher": [ | ||
424 | { | ||
425 | "pattern": [ | ||
426 | { | ||
427 | "regexp": ".", | ||
428 | "file": 1, | ||
429 | "location": 2, | ||
430 | "message": 3 | ||
431 | } | ||
432 | ], | ||
433 | "background": { | ||
434 | "activeOnStart": True, | ||
435 | "beginsPattern": ".", | ||
436 | "endsPattern": ".", | ||
437 | } | ||
438 | } | ||
439 | ] | ||
440 | }) | ||
441 | tasks_file = 'tasks.json' | ||
442 | IdeBase.update_json_file( | ||
443 | self.dot_code_dir(modified_recipe), tasks_file, tasks_dict) | ||
444 | |||
445 | def vscode_tasks(self, args, modified_recipe): | ||
446 | if modified_recipe.build_tool.is_c_ccp: | ||
447 | self.vscode_tasks_cpp(args, modified_recipe) | ||
448 | else: | ||
449 | self.vscode_tasks_fallback(args, modified_recipe) | ||
450 | |||
451 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
452 | self.vscode_settings(modified_recipe, image_recipe) | ||
453 | self.vscode_extensions(modified_recipe) | ||
454 | self.vscode_c_cpp_properties(modified_recipe) | ||
455 | if args.target: | ||
456 | self.initialize_gdb_cross_configs( | ||
457 | image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode) | ||
458 | self.vscode_launch(modified_recipe) | ||
459 | self.vscode_tasks(args, modified_recipe) | ||
460 | |||
461 | |||
462 | def register_ide_plugin(ide_plugins): | ||
463 | ide_plugins['code'] = IdeVSCode | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py new file mode 100644 index 0000000000..f106c5a026 --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/ide_none.py | |||
@@ -0,0 +1,53 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk generic IDE plugin""" | ||
7 | |||
8 | import os | ||
9 | import logging | ||
10 | from devtool.ide_plugins import IdeBase, GdbCrossConfig | ||
11 | |||
12 | logger = logging.getLogger('devtool') | ||
13 | |||
14 | |||
15 | class IdeNone(IdeBase): | ||
16 | """Generate some generic helpers for other IDEs | ||
17 | |||
18 | Modified recipe mode: | ||
19 | Generate some helper scripts for remote debugging with GDB | ||
20 | |||
21 | Shared sysroot mode: | ||
22 | A wrapper for bitbake meta-ide-support and bitbake build-sysroots | ||
23 | """ | ||
24 | |||
25 | def __init__(self): | ||
26 | super().__init__() | ||
27 | |||
28 | def setup_shared_sysroots(self, shared_env): | ||
29 | real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys | ||
30 | deploy_dir_image = shared_env.ide_support.deploy_dir_image | ||
31 | env_script = os.path.join( | ||
32 | deploy_dir_image, 'environment-setup-' + real_multimach_target_sys) | ||
33 | logger.info( | ||
34 | "To use this SDK please source this: %s" % env_script) | ||
35 | |||
36 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
37 | """generate some helper scripts and config files | ||
38 | |||
39 | - Execute the do_install task | ||
40 | - Execute devtool deploy-target | ||
41 | - Generate a gdbinit file per executable | ||
42 | - Generate the oe-scripts sym-link | ||
43 | """ | ||
44 | script_path = modified_recipe.gen_install_deploy_script(args) | ||
45 | logger.info("Created: %s" % script_path) | ||
46 | |||
47 | self.initialize_gdb_cross_configs(image_recipe, modified_recipe) | ||
48 | |||
49 | IdeBase.gen_oe_scrtips_sym_link(modified_recipe) | ||
50 | |||
51 | |||
52 | def register_ide_plugin(ide_plugins): | ||
53 | ide_plugins['none'] = IdeNone | ||
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py new file mode 100755 index 0000000000..7807b322b3 --- /dev/null +++ b/scripts/lib/devtool/ide_sdk.py | |||
@@ -0,0 +1,1070 @@ | |||
1 | # Development tool - ide-sdk command plugin | ||
2 | # | ||
3 | # Copyright (C) 2023-2024 Siemens AG | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | """Devtool ide-sdk plugin""" | ||
8 | |||
9 | import json | ||
10 | import logging | ||
11 | import os | ||
12 | import re | ||
13 | import shutil | ||
14 | import stat | ||
15 | import subprocess | ||
16 | import sys | ||
17 | from argparse import RawTextHelpFormatter | ||
18 | from enum import Enum | ||
19 | |||
20 | import scriptutils | ||
21 | import bb | ||
22 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe | ||
23 | from devtool.standard import get_real_srctree | ||
24 | from devtool.ide_plugins import BuildTool | ||
25 | |||
26 | |||
27 | logger = logging.getLogger('devtool') | ||
28 | |||
29 | # dict of classes derived from IdeBase | ||
30 | ide_plugins = {} | ||
31 | |||
32 | |||
33 | class DevtoolIdeMode(Enum): | ||
34 | """Different modes are supported by the ide-sdk plugin. | ||
35 | |||
36 | The enum might be extended by more advanced modes in the future. Some ideas: | ||
37 | - auto: modified if all recipes are modified, shared if none of the recipes is modified. | ||
38 | - mixed: modified mode for modified recipes, shared mode for all other recipes. | ||
39 | """ | ||
40 | |||
41 | modified = 'modified' | ||
42 | shared = 'shared' | ||
43 | |||
44 | |||
45 | class TargetDevice: | ||
46 | """SSH remote login parameters""" | ||
47 | |||
48 | def __init__(self, args): | ||
49 | self.extraoptions = '' | ||
50 | if args.no_host_check: | ||
51 | self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | ||
52 | self.ssh_sshexec = 'ssh' | ||
53 | if args.ssh_exec: | ||
54 | self.ssh_sshexec = args.ssh_exec | ||
55 | self.ssh_port = '' | ||
56 | if args.port: | ||
57 | self.ssh_port = "-p %s" % args.port | ||
58 | if args.key: | ||
59 | self.extraoptions += ' -i %s' % args.key | ||
60 | |||
61 | self.target = args.target | ||
62 | target_sp = args.target.split('@') | ||
63 | if len(target_sp) == 1: | ||
64 | self.login = "" | ||
65 | self.host = target_sp[0] | ||
66 | elif len(target_sp) == 2: | ||
67 | self.login = target_sp[0] | ||
68 | self.host = target_sp[1] | ||
69 | else: | ||
70 | logger.error("Invalid target argument: %s" % args.target) | ||
71 | |||
72 | |||
73 | class RecipeNative: | ||
74 | """Base class for calling bitbake to provide a -native recipe""" | ||
75 | |||
76 | def __init__(self, name, target_arch=None): | ||
77 | self.name = name | ||
78 | self.target_arch = target_arch | ||
79 | self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot'] | ||
80 | self.staging_bindir_native = None | ||
81 | self.target_sys = None | ||
82 | self.__native_bin = None | ||
83 | |||
84 | def _initialize(self, config, workspace, tinfoil): | ||
85 | """Get the parsed recipe""" | ||
86 | recipe_d = parse_recipe( | ||
87 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
88 | if not recipe_d: | ||
89 | raise DevtoolError("Parsing %s recipe failed" % self.name) | ||
90 | self.staging_bindir_native = os.path.realpath( | ||
91 | recipe_d.getVar('STAGING_BINDIR_NATIVE')) | ||
92 | self.target_sys = recipe_d.getVar('TARGET_SYS') | ||
93 | return recipe_d | ||
94 | |||
95 | def initialize(self, config, workspace, tinfoil): | ||
96 | """Basic initialization that can be overridden by a derived class""" | ||
97 | self._initialize(config, workspace, tinfoil) | ||
98 | |||
99 | @property | ||
100 | def native_bin(self): | ||
101 | if not self.__native_bin: | ||
102 | raise DevtoolError("native binary name is not defined.") | ||
103 | return self.__native_bin | ||
104 | |||
105 | |||
106 | class RecipeGdbCross(RecipeNative): | ||
107 | """Handle handle gdb-cross on the host and the gdbserver on the target device""" | ||
108 | |||
109 | def __init__(self, args, target_arch, target_device): | ||
110 | super().__init__('gdb-cross-' + target_arch, target_arch) | ||
111 | self.target_device = target_device | ||
112 | self.gdb = None | ||
113 | self.gdbserver_port_next = int(args.gdbserver_port_start) | ||
114 | self.config_db = {} | ||
115 | |||
116 | def __find_gdbserver(self, config, tinfoil): | ||
117 | """Absolute path of the gdbserver""" | ||
118 | recipe_d_gdb = parse_recipe( | ||
119 | config, tinfoil, 'gdb', appends=True, filter_workspace=False) | ||
120 | if not recipe_d_gdb: | ||
121 | raise DevtoolError("Parsing gdb recipe failed") | ||
122 | return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver') | ||
123 | |||
124 | def initialize(self, config, workspace, tinfoil): | ||
125 | super()._initialize(config, workspace, tinfoil) | ||
126 | gdb_bin = self.target_sys + '-gdb' | ||
127 | gdb_path = os.path.join( | ||
128 | self.staging_bindir_native, self.target_sys, gdb_bin) | ||
129 | self.gdb = gdb_path | ||
130 | self.gdbserver_path = self.__find_gdbserver(config, tinfoil) | ||
131 | |||
132 | @property | ||
133 | def host(self): | ||
134 | return self.target_device.host | ||
135 | |||
136 | |||
137 | class RecipeImage: | ||
138 | """Handle some image recipe related properties | ||
139 | |||
140 | Most workflows require firmware that runs on the target device. | ||
141 | This firmware must be consistent with the setup of the host system. | ||
142 | In particular, the debug symbols must be compatible. For this, the | ||
143 | rootfs must be created as part of the SDK. | ||
144 | """ | ||
145 | |||
146 | def __init__(self, name): | ||
147 | self.combine_dbg_image = False | ||
148 | self.gdbserver_missing = False | ||
149 | self.name = name | ||
150 | self.rootfs = None | ||
151 | self.__rootfs_dbg = None | ||
152 | self.bootstrap_tasks = [self.name + ':do_build'] | ||
153 | |||
154 | def initialize(self, config, tinfoil): | ||
155 | image_d = parse_recipe( | ||
156 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
157 | if not image_d: | ||
158 | raise DevtoolError( | ||
159 | "Parsing image recipe %s failed" % self.name) | ||
160 | |||
161 | self.combine_dbg_image = bb.data.inherits_class( | ||
162 | 'image-combined-dbg', image_d) | ||
163 | |||
164 | workdir = image_d.getVar('WORKDIR') | ||
165 | self.rootfs = os.path.join(workdir, 'rootfs') | ||
166 | if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1": | ||
167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') | ||
168 | |||
169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( | ||
170 | 'IMAGE_INSTALL') | ||
171 | |||
172 | @property | ||
173 | def debug_support(self): | ||
174 | return bool(self.rootfs_dbg) | ||
175 | |||
176 | @property | ||
177 | def rootfs_dbg(self): | ||
178 | if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg): | ||
179 | return self.__rootfs_dbg | ||
180 | return None | ||
181 | |||
182 | |||
183 | class RecipeMetaIdeSupport: | ||
184 | """For the shared sysroots mode meta-ide-support is needed | ||
185 | |||
186 | For use cases where just a cross tool-chain is required but | ||
187 | no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support | ||
188 | and bitbake build-sysroots. This also allows to expose the cross-toolchains | ||
189 | to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits. | ||
190 | """ | ||
191 | |||
192 | def __init__(self): | ||
193 | self.bootstrap_tasks = ['meta-ide-support:do_build'] | ||
194 | self.topdir = None | ||
195 | self.datadir = None | ||
196 | self.deploy_dir_image = None | ||
197 | self.build_sys = None | ||
198 | # From toolchain-scripts | ||
199 | self.real_multimach_target_sys = None | ||
200 | |||
201 | def initialize(self, config, tinfoil): | ||
202 | meta_ide_support_d = parse_recipe( | ||
203 | config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False) | ||
204 | if not meta_ide_support_d: | ||
205 | raise DevtoolError("Parsing meta-ide-support recipe failed") | ||
206 | |||
207 | self.topdir = meta_ide_support_d.getVar('TOPDIR') | ||
208 | self.datadir = meta_ide_support_d.getVar('datadir') | ||
209 | self.deploy_dir_image = meta_ide_support_d.getVar( | ||
210 | 'DEPLOY_DIR_IMAGE') | ||
211 | self.build_sys = meta_ide_support_d.getVar('BUILD_SYS') | ||
212 | self.real_multimach_target_sys = meta_ide_support_d.getVar( | ||
213 | 'REAL_MULTIMACH_TARGET_SYS') | ||
214 | |||
215 | |||
216 | class RecipeBuildSysroots: | ||
217 | """For the shared sysroots mode build-sysroots is needed""" | ||
218 | |||
219 | def __init__(self): | ||
220 | self.standalone_sysroot = None | ||
221 | self.standalone_sysroot_native = None | ||
222 | self.bootstrap_tasks = [ | ||
223 | 'build-sysroots:do_build_target_sysroot', | ||
224 | 'build-sysroots:do_build_native_sysroot' | ||
225 | ] | ||
226 | |||
227 | def initialize(self, config, tinfoil): | ||
228 | build_sysroots_d = parse_recipe( | ||
229 | config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False) | ||
230 | if not build_sysroots_d: | ||
231 | raise DevtoolError("Parsing build-sysroots recipe failed") | ||
232 | self.standalone_sysroot = build_sysroots_d.getVar( | ||
233 | 'STANDALONE_SYSROOT') | ||
234 | self.standalone_sysroot_native = build_sysroots_d.getVar( | ||
235 | 'STANDALONE_SYSROOT_NATIVE') | ||
236 | |||
237 | |||
238 | class SharedSysrootsEnv: | ||
239 | """Handle the shared sysroots based workflow | ||
240 | |||
241 | Support the workflow with just a tool-chain without a recipe. | ||
242 | It's basically like: | ||
243 | bitbake some-dependencies | ||
244 | bitbake meta-ide-support | ||
245 | bitbake build-sysroots | ||
246 | Use the environment-* file found in the deploy folder | ||
247 | """ | ||
248 | |||
249 | def __init__(self): | ||
250 | self.ide_support = None | ||
251 | self.build_sysroots = None | ||
252 | |||
253 | def initialize(self, ide_support, build_sysroots): | ||
254 | self.ide_support = ide_support | ||
255 | self.build_sysroots = build_sysroots | ||
256 | |||
257 | def setup_ide(self, ide): | ||
258 | ide.setup(self) | ||
259 | |||
260 | |||
261 | class RecipeNotModified: | ||
262 | """Handling of recipes added to the Direct DSK shared sysroots.""" | ||
263 | |||
264 | def __init__(self, name): | ||
265 | self.name = name | ||
266 | self.bootstrap_tasks = [name + ':do_populate_sysroot'] | ||
267 | |||
268 | |||
269 | class RecipeModified: | ||
270 | """Handling of recipes in the workspace created by devtool modify""" | ||
271 | OE_INIT_BUILD_ENV = 'oe-init-build-env' | ||
272 | |||
273 | VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$") | ||
274 | |||
275 | def __init__(self, name): | ||
276 | self.name = name | ||
277 | self.bootstrap_tasks = [name + ':do_install'] | ||
278 | self.gdb_cross = None | ||
279 | # workspace | ||
280 | self.real_srctree = None | ||
281 | self.srctree = None | ||
282 | self.ide_sdk_dir = None | ||
283 | self.ide_sdk_scripts_dir = None | ||
284 | self.bbappend = None | ||
285 | # recipe variables from d.getVar | ||
286 | self.b = None | ||
287 | self.base_libdir = None | ||
288 | self.bblayers = None | ||
289 | self.bpn = None | ||
290 | self.d = None | ||
291 | self.fakerootcmd = None | ||
292 | self.fakerootenv = None | ||
293 | self.libdir = None | ||
294 | self.max_process = None | ||
295 | self.package_arch = None | ||
296 | self.package_debug_split_style = None | ||
297 | self.path = None | ||
298 | self.pn = None | ||
299 | self.recipe_sysroot = None | ||
300 | self.recipe_sysroot_native = None | ||
301 | self.staging_incdir = None | ||
302 | self.strip_cmd = None | ||
303 | self.target_arch = None | ||
304 | self.target_dbgsrc_dir = None | ||
305 | self.topdir = None | ||
306 | self.workdir = None | ||
307 | self.recipe_id = None | ||
308 | # replicate bitbake build environment | ||
309 | self.exported_vars = None | ||
310 | self.cmd_compile = None | ||
311 | self.__oe_init_dir = None | ||
312 | # main build tool used by this recipe | ||
313 | self.build_tool = BuildTool.UNDEFINED | ||
314 | # build_tool = cmake | ||
315 | self.oecmake_generator = None | ||
316 | self.cmake_cache_vars = None | ||
317 | # build_tool = meson | ||
318 | self.meson_buildtype = None | ||
319 | self.meson_wrapper = None | ||
320 | self.mesonopts = None | ||
321 | self.extra_oemeson = None | ||
322 | self.meson_cross_file = None | ||
323 | |||
324 | def initialize(self, config, workspace, tinfoil): | ||
325 | recipe_d = parse_recipe( | ||
326 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
327 | if not recipe_d: | ||
328 | raise DevtoolError("Parsing %s recipe failed" % self.name) | ||
329 | |||
330 | # Verify this recipe is built as externalsrc setup by devtool modify | ||
331 | workspacepn = check_workspace_recipe( | ||
332 | workspace, self.name, bbclassextend=True) | ||
333 | self.srctree = workspace[workspacepn]['srctree'] | ||
334 | # Need to grab this here in case the source is within a subdirectory | ||
335 | self.real_srctree = get_real_srctree( | ||
336 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR')) | ||
337 | self.bbappend = workspace[workspacepn]['bbappend'] | ||
338 | |||
339 | self.ide_sdk_dir = os.path.join( | ||
340 | config.workspace_path, 'ide-sdk', self.name) | ||
341 | if os.path.exists(self.ide_sdk_dir): | ||
342 | shutil.rmtree(self.ide_sdk_dir) | ||
343 | self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts') | ||
344 | |||
345 | self.b = recipe_d.getVar('B') | ||
346 | self.base_libdir = recipe_d.getVar('base_libdir') | ||
347 | self.bblayers = recipe_d.getVar('BBLAYERS').split() | ||
348 | self.bpn = recipe_d.getVar('BPN') | ||
349 | self.cxx = recipe_d.getVar('CXX') | ||
350 | self.d = recipe_d.getVar('D') | ||
351 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') | ||
352 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') | ||
353 | self.libdir = recipe_d.getVar('libdir') | ||
354 | self.max_process = int(recipe_d.getVar( | ||
355 | "BB_NUMBER_THREADS") or os.cpu_count() or 1) | ||
356 | self.package_arch = recipe_d.getVar('PACKAGE_ARCH') | ||
357 | self.package_debug_split_style = recipe_d.getVar( | ||
358 | 'PACKAGE_DEBUG_SPLIT_STYLE') | ||
359 | self.path = recipe_d.getVar('PATH') | ||
360 | self.pn = recipe_d.getVar('PN') | ||
361 | self.recipe_sysroot = os.path.realpath( | ||
362 | recipe_d.getVar('RECIPE_SYSROOT')) | ||
363 | self.recipe_sysroot_native = os.path.realpath( | ||
364 | recipe_d.getVar('RECIPE_SYSROOT_NATIVE')) | ||
365 | self.staging_bindir_toolchain = os.path.realpath( | ||
366 | recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN')) | ||
367 | self.staging_incdir = os.path.realpath( | ||
368 | recipe_d.getVar('STAGING_INCDIR')) | ||
369 | self.strip_cmd = recipe_d.getVar('STRIP') | ||
370 | self.target_arch = recipe_d.getVar('TARGET_ARCH') | ||
371 | self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR') | ||
372 | self.topdir = recipe_d.getVar('TOPDIR') | ||
373 | self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR')) | ||
374 | |||
375 | self.__init_exported_variables(recipe_d) | ||
376 | |||
377 | if bb.data.inherits_class('cmake', recipe_d): | ||
378 | self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR') | ||
379 | self.__init_cmake_preset_cache(recipe_d) | ||
380 | self.build_tool = BuildTool.CMAKE | ||
381 | elif bb.data.inherits_class('meson', recipe_d): | ||
382 | self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE') | ||
383 | self.mesonopts = recipe_d.getVar('MESONOPTS') | ||
384 | self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON') | ||
385 | self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE') | ||
386 | self.build_tool = BuildTool.MESON | ||
387 | |||
388 | # Recipe ID is the identifier for IDE config sections | ||
389 | self.recipe_id = self.bpn + "-" + self.package_arch | ||
390 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch | ||
391 | |||
392 | def append_to_bbappend(self, append_text): | ||
393 | with open(self.bbappend, 'a') as bbap: | ||
394 | bbap.write(append_text) | ||
395 | |||
396 | def remove_from_bbappend(self, append_text): | ||
397 | with open(self.bbappend, 'r') as bbap: | ||
398 | text = bbap.read() | ||
399 | new_text = text.replace(append_text, '') | ||
400 | with open(self.bbappend, 'w') as bbap: | ||
401 | bbap.write(new_text) | ||
402 | |||
403 | @staticmethod | ||
404 | def is_valid_shell_variable(var): | ||
405 | """Skip strange shell variables like systemd | ||
406 | |||
407 | prevent from strange bugs because of strange variables which | ||
408 | are not used in this context but break various tools. | ||
409 | """ | ||
410 | if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var): | ||
411 | bb.debug(1, "ignoring variable: %s" % var) | ||
412 | return True | ||
413 | return False | ||
414 | |||
415 | def debug_build_config(self, args): | ||
416 | """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise""" | ||
417 | if self.build_tool is BuildTool.CMAKE: | ||
418 | append_text = os.linesep + \ | ||
419 | 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep | ||
420 | if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
421 | self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = { | ||
422 | "type": "STRING", | ||
423 | "value": "Debug", | ||
424 | } | ||
425 | self.append_to_bbappend(append_text) | ||
426 | elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: | ||
427 | del self.cmake_cache_vars['CMAKE_BUILD_TYPE'] | ||
428 | self.remove_from_bbappend(append_text) | ||
429 | elif self.build_tool is BuildTool.MESON: | ||
430 | append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep | ||
431 | if args.debug_build_config and self.meson_buildtype != "debug": | ||
432 | self.mesonopts.replace( | ||
433 | '--buildtype ' + self.meson_buildtype, '--buildtype debug') | ||
434 | self.append_to_bbappend(append_text) | ||
435 | elif self.meson_buildtype == "debug": | ||
436 | self.mesonopts.replace( | ||
437 | '--buildtype debug', '--buildtype plain') | ||
438 | self.remove_from_bbappend(append_text) | ||
439 | elif args.debug_build_config: | ||
440 | logger.warn( | ||
441 | "--debug-build-config is not implemented for this build tool yet.") | ||
442 | |||
443 | def solib_search_path(self, image): | ||
444 | """Search for debug symbols in the rootfs and rootfs-dbg | ||
445 | |||
446 | The debug symbols of shared libraries which are provided by other packages | ||
447 | are grabbed from the -dbg packages in the rootfs-dbg. | ||
448 | |||
449 | But most cross debugging tools like gdb, perf, and systemtap need to find | ||
450 | executable/library first and through it debuglink note find corresponding | ||
451 | symbols file. Therefore the library paths from the rootfs are added as well. | ||
452 | |||
453 | Note: For the devtool modified recipe compiled from the IDE, the debug | ||
454 | symbols are taken from the unstripped binaries in the image folder. | ||
455 | Also, devtool deploy-target takes the files from the image folder. | ||
456 | debug symbols in the image folder refer to the corresponding source files | ||
457 | with absolute paths of the build machine. Debug symbols found in the | ||
458 | rootfs-dbg are relocated and contain paths which refer to the source files | ||
459 | installed on the target device e.g. /usr/src/... | ||
460 | """ | ||
461 | base_libdir = self.base_libdir.lstrip('/') | ||
462 | libdir = self.libdir.lstrip('/') | ||
463 | so_paths = [ | ||
464 | # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug | ||
465 | os.path.join(image.rootfs_dbg, base_libdir, ".debug"), | ||
466 | os.path.join(image.rootfs_dbg, libdir, ".debug"), | ||
467 | # debug symbols for package_debug_split_style: debug-file-directory | ||
468 | os.path.join(image.rootfs_dbg, "usr", "lib", "debug"), | ||
469 | |||
470 | # The binaries are required as well, the debug packages are not enough | ||
471 | # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg | ||
472 | os.path.join(image.rootfs_dbg, base_libdir), | ||
473 | os.path.join(image.rootfs_dbg, libdir), | ||
474 | # Without image-combined-dbg.bbclass the binaries are only in rootfs. | ||
475 | # Note: Stepping into source files located in rootfs-dbg does not | ||
476 | # work without image-combined-dbg.bbclass yet. | ||
477 | os.path.join(image.rootfs, base_libdir), | ||
478 | os.path.join(image.rootfs, libdir) | ||
479 | ] | ||
480 | return so_paths | ||
481 | |||
482 | def solib_search_path_str(self, image): | ||
483 | """Return a : separated list of paths usable by GDB's set solib-search-path""" | ||
484 | return ':'.join(self.solib_search_path(image)) | ||
485 | |||
486 | def __init_exported_variables(self, d): | ||
487 | """Find all variables with export flag set. | ||
488 | |||
489 | This allows to generate IDE configurations which compile with the same | ||
490 | environment as bitbake does. That's at least a reasonable default behavior. | ||
491 | """ | ||
492 | exported_vars = {} | ||
493 | |||
494 | vars = (key for key in d.keys() if not key.startswith( | ||
495 | "__") and not d.getVarFlag(key, "func", False)) | ||
496 | for var in vars: | ||
497 | func = d.getVarFlag(var, "func", False) | ||
498 | if d.getVarFlag(var, 'python', False) and func: | ||
499 | continue | ||
500 | export = d.getVarFlag(var, "export", False) | ||
501 | unexport = d.getVarFlag(var, "unexport", False) | ||
502 | if not export and not unexport and not func: | ||
503 | continue | ||
504 | if unexport: | ||
505 | continue | ||
506 | |||
507 | val = d.getVar(var) | ||
508 | if val is None: | ||
509 | continue | ||
510 | if set(var) & set("-.{}+"): | ||
511 | logger.warn( | ||
512 | "Warning: Found invalid character in variable name %s", str(var)) | ||
513 | continue | ||
514 | varExpanded = d.expand(var) | ||
515 | val = str(val) | ||
516 | |||
517 | if not RecipeModified.is_valid_shell_variable(varExpanded): | ||
518 | continue | ||
519 | |||
520 | if func: | ||
521 | code_line = "line: {0}, file: {1}\n".format( | ||
522 | d.getVarFlag(var, "lineno", False), | ||
523 | d.getVarFlag(var, "filename", False)) | ||
524 | val = val.rstrip('\n') | ||
525 | logger.warn("Warning: exported shell function %s() is not exported (%s)" % | ||
526 | (varExpanded, code_line)) | ||
527 | continue | ||
528 | |||
529 | if export: | ||
530 | exported_vars[varExpanded] = val.strip() | ||
531 | continue | ||
532 | |||
533 | self.exported_vars = exported_vars | ||
534 | |||
535 | def __init_cmake_preset_cache(self, d): | ||
536 | """Get the arguments passed to cmake | ||
537 | |||
538 | Replicate the cmake configure arguments with all details to | ||
539 | share on build folder between bitbake and SDK. | ||
540 | """ | ||
541 | site_file = os.path.join(self.workdir, 'site-file.cmake') | ||
542 | if os.path.exists(site_file): | ||
543 | print("Warning: site-file.cmake is not supported") | ||
544 | |||
545 | cache_vars = {} | ||
546 | oecmake_args = d.getVar('OECMAKE_ARGS').split() | ||
547 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() | ||
548 | for param in oecmake_args + extra_oecmake: | ||
549 | d_pref = "-D" | ||
550 | if param.startswith(d_pref): | ||
551 | param = param[len(d_pref):] | ||
552 | else: | ||
553 | print("Error: expected a -D") | ||
554 | param_s = param.split('=', 1) | ||
555 | param_nt = param_s[0].split(':', 1) | ||
556 | |||
557 | def handle_undefined_variable(var): | ||
558 | if var.startswith('${') and var.endswith('}'): | ||
559 | return '' | ||
560 | else: | ||
561 | return var | ||
562 | # Example: FOO=ON | ||
563 | if len(param_nt) == 1: | ||
564 | cache_vars[param_s[0]] = handle_undefined_variable(param_s[1]) | ||
565 | # Example: FOO:PATH=/tmp | ||
566 | elif len(param_nt) == 2: | ||
567 | cache_vars[param_nt[0]] = { | ||
568 | "type": param_nt[1], | ||
569 | "value": handle_undefined_variable(param_s[1]), | ||
570 | } | ||
571 | else: | ||
572 | print("Error: cannot parse %s" % param) | ||
573 | self.cmake_cache_vars = cache_vars | ||
574 | |||
575 | def cmake_preset(self): | ||
576 | """Create a preset for cmake that mimics how bitbake calls cmake""" | ||
577 | toolchain_file = os.path.join(self.workdir, 'toolchain.cmake') | ||
578 | cmake_executable = os.path.join( | ||
579 | self.recipe_sysroot_native, 'usr', 'bin', 'cmake') | ||
580 | self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id | ||
581 | |||
582 | preset_dict_configure = { | ||
583 | "name": self.recipe_id, | ||
584 | "displayName": self.recipe_id_pretty, | ||
585 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
586 | "binaryDir": self.b, | ||
587 | "generator": self.oecmake_generator, | ||
588 | "toolchainFile": toolchain_file, | ||
589 | "cacheVariables": self.cmake_cache_vars, | ||
590 | "environment": self.exported_vars, | ||
591 | "cmakeExecutable": cmake_executable | ||
592 | } | ||
593 | |||
594 | preset_dict_build = { | ||
595 | "name": self.recipe_id, | ||
596 | "displayName": self.recipe_id_pretty, | ||
597 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
598 | "configurePreset": self.recipe_id, | ||
599 | "inheritConfigureEnvironment": True | ||
600 | } | ||
601 | |||
602 | preset_dict_test = { | ||
603 | "name": self.recipe_id, | ||
604 | "displayName": self.recipe_id_pretty, | ||
605 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
606 | "configurePreset": self.recipe_id, | ||
607 | "inheritConfigureEnvironment": True | ||
608 | } | ||
609 | |||
610 | preset_dict = { | ||
611 | "version": 3, # cmake 3.21, backward compatible with kirkstone | ||
612 | "configurePresets": [preset_dict_configure], | ||
613 | "buildPresets": [preset_dict_build], | ||
614 | "testPresets": [preset_dict_test] | ||
615 | } | ||
616 | |||
617 | # Finally write the json file | ||
618 | json_file = 'CMakeUserPresets.json' | ||
619 | json_path = os.path.join(self.real_srctree, json_file) | ||
620 | logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path)) | ||
621 | if not os.path.exists(self.real_srctree): | ||
622 | os.makedirs(self.real_srctree) | ||
623 | try: | ||
624 | with open(json_path) as f: | ||
625 | orig_dict = json.load(f) | ||
626 | except json.decoder.JSONDecodeError: | ||
627 | logger.info( | ||
628 | "Decoding %s failed. Probably because of comments in the json file" % json_path) | ||
629 | orig_dict = {} | ||
630 | except FileNotFoundError: | ||
631 | orig_dict = {} | ||
632 | |||
633 | # Add or update the presets for the recipe and keep other presets | ||
634 | for k, v in preset_dict.items(): | ||
635 | if isinstance(v, list): | ||
636 | update_preset = v[0] | ||
637 | preset_added = False | ||
638 | if k in orig_dict: | ||
639 | for index, orig_preset in enumerate(orig_dict[k]): | ||
640 | if 'name' in orig_preset: | ||
641 | if orig_preset['name'] == update_preset['name']: | ||
642 | logger.debug("Updating preset: %s" % | ||
643 | orig_preset['name']) | ||
644 | orig_dict[k][index] = update_preset | ||
645 | preset_added = True | ||
646 | break | ||
647 | else: | ||
648 | logger.debug("keeping preset: %s" % | ||
649 | orig_preset['name']) | ||
650 | else: | ||
651 | logger.warn("preset without a name found") | ||
652 | if not preset_added: | ||
653 | if not k in orig_dict: | ||
654 | orig_dict[k] = [] | ||
655 | orig_dict[k].append(update_preset) | ||
656 | logger.debug("Added preset: %s" % | ||
657 | update_preset['name']) | ||
658 | else: | ||
659 | orig_dict[k] = v | ||
660 | |||
661 | with open(json_path, 'w') as f: | ||
662 | json.dump(orig_dict, f, indent=4) | ||
663 | |||
664 | def gen_meson_wrapper(self): | ||
665 | """Generate a wrapper script to call meson with the cross environment""" | ||
666 | bb.utils.mkdirhier(self.ide_sdk_scripts_dir) | ||
667 | meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson') | ||
668 | meson_real = os.path.join( | ||
669 | self.recipe_sysroot_native, 'usr', 'bin', 'meson.real') | ||
670 | with open(meson_wrapper, 'w') as mwrap: | ||
671 | mwrap.write("#!/bin/sh" + os.linesep) | ||
672 | for var, val in self.exported_vars.items(): | ||
673 | mwrap.write('export %s="%s"' % (var, val) + os.linesep) | ||
674 | mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep) | ||
675 | private_temp = os.path.join(self.b, "meson-private", "tmp") | ||
676 | mwrap.write('mkdir -p "%s"' % private_temp + os.linesep) | ||
677 | mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep) | ||
678 | mwrap.write('exec "%s" "$@"' % meson_real + os.linesep) | ||
679 | st = os.stat(meson_wrapper) | ||
680 | os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC) | ||
681 | self.meson_wrapper = meson_wrapper | ||
682 | self.cmd_compile = meson_wrapper + " compile -C " + self.b | ||
683 | |||
684 | def which(self, executable): | ||
685 | bin_path = shutil.which(executable, path=self.path) | ||
686 | if not bin_path: | ||
687 | raise DevtoolError( | ||
688 | 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn)) | ||
689 | return bin_path | ||
690 | |||
691 | @staticmethod | ||
692 | def is_elf_file(file_path): | ||
693 | with open(file_path, "rb") as f: | ||
694 | data = f.read(4) | ||
695 | if data == b'\x7fELF': | ||
696 | return True | ||
697 | return False | ||
698 | |||
699 | def find_installed_binaries(self): | ||
700 | """find all executable elf files in the image directory""" | ||
701 | binaries = [] | ||
702 | d_len = len(self.d) | ||
703 | re_so = re.compile(r'.*\.so[.0-9]*$') | ||
704 | for root, _, files in os.walk(self.d, followlinks=False): | ||
705 | for file in files: | ||
706 | if os.path.islink(file): | ||
707 | continue | ||
708 | if re_so.match(file): | ||
709 | continue | ||
710 | abs_name = os.path.join(root, file) | ||
711 | if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name): | ||
712 | binaries.append(abs_name[d_len:]) | ||
713 | return sorted(binaries) | ||
714 | |||
715 | def gen_delete_package_dirs(self): | ||
716 | """delete folders of package tasks | ||
717 | |||
718 | This is a workaround for and issue with recipes having their sources | ||
719 | downloaded as file:// | ||
720 | This likely breaks pseudo like: | ||
721 | path mismatch [3 links]: ino 79147802 db | ||
722 | .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/ | ||
723 | cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp | ||
724 | .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp | ||
725 | Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue. | ||
726 | """ | ||
727 | cmd_lines = ['#!/bin/sh'] | ||
728 | |||
729 | # Set up the appropriate environment | ||
730 | newenv = dict(os.environ) | ||
731 | for varvalue in self.fakerootenv.split(): | ||
732 | if '=' in varvalue: | ||
733 | splitval = varvalue.split('=', 1) | ||
734 | newenv[splitval[0]] = splitval[1] | ||
735 | |||
736 | # Replicate the environment variables from bitbake | ||
737 | for var, val in newenv.items(): | ||
738 | if not RecipeModified.is_valid_shell_variable(var): | ||
739 | continue | ||
740 | cmd_lines.append('%s="%s"' % (var, val)) | ||
741 | cmd_lines.append('export %s' % var) | ||
742 | |||
743 | # Delete the folders | ||
744 | pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [ | ||
745 | "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]]) | ||
746 | cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs) | ||
747 | cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd)) | ||
748 | |||
749 | return self.write_script(cmd_lines, 'delete_package_dirs') | ||
750 | |||
751 | def gen_deploy_target_script(self, args): | ||
752 | """Generate a script which does what devtool deploy-target does | ||
753 | |||
754 | This script is much quicker than devtool target-deploy. Because it | ||
755 | does not need to start a bitbake server. All information from tinfoil | ||
756 | is hard-coded in the generated script. | ||
757 | """ | ||
758 | cmd_lines = ['#!%s' % str(sys.executable)] | ||
759 | cmd_lines.append('import sys') | ||
760 | cmd_lines.append('devtool_sys_path = %s' % str(sys.path)) | ||
761 | cmd_lines.append('devtool_sys_path.reverse()') | ||
762 | cmd_lines.append('for p in devtool_sys_path:') | ||
763 | cmd_lines.append(' if p not in sys.path:') | ||
764 | cmd_lines.append(' sys.path.insert(0, p)') | ||
765 | cmd_lines.append('from devtool.deploy import deploy_no_d') | ||
766 | args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check', | ||
767 | 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target'] | ||
768 | filtered_args_dict = {key: value for key, value in vars( | ||
769 | args).items() if key in args_filter} | ||
770 | cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict)) | ||
771 | cmd_lines.append('class Dict2Class(object):') | ||
772 | cmd_lines.append(' def __init__(self, my_dict):') | ||
773 | cmd_lines.append(' for key in my_dict:') | ||
774 | cmd_lines.append(' setattr(self, key, my_dict[key])') | ||
775 | cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)') | ||
776 | cmd_lines.append( | ||
777 | 'setattr(filtered_args, "recipename", "%s")' % self.bpn) | ||
778 | cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' % | ||
779 | (self.d, self.workdir, self.path, self.strip_cmd, | ||
780 | self.libdir, self.base_libdir, self.max_process, | ||
781 | self.fakerootcmd, self.fakerootenv)) | ||
782 | return self.write_script(cmd_lines, 'deploy_target') | ||
783 | |||
784 | def gen_install_deploy_script(self, args): | ||
785 | """Generate a script which does install and deploy""" | ||
786 | cmd_lines = ['#!/bin/bash'] | ||
787 | |||
788 | cmd_lines.append(self.gen_delete_package_dirs()) | ||
789 | |||
790 | # . oe-init-build-env $BUILDDIR | ||
791 | # Note: Sourcing scripts with arguments requires bash | ||
792 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( | ||
793 | self.oe_init_dir, self.oe_init_dir)) | ||
794 | cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % ( | ||
795 | self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir)) | ||
796 | |||
797 | # bitbake -c install | ||
798 | cmd_lines.append( | ||
799 | 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn)) | ||
800 | |||
801 | # Self contained devtool deploy-target | ||
802 | cmd_lines.append(self.gen_deploy_target_script(args)) | ||
803 | |||
804 | return self.write_script(cmd_lines, 'install_and_deploy') | ||
805 | |||
806 | def write_script(self, cmd_lines, script_name): | ||
807 | bb.utils.mkdirhier(self.ide_sdk_scripts_dir) | ||
808 | script_name_arch = script_name + '_' + self.recipe_id | ||
809 | script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch) | ||
810 | with open(script_file, 'w') as script_f: | ||
811 | script_f.write(os.linesep.join(cmd_lines)) | ||
812 | st = os.stat(script_file) | ||
813 | os.chmod(script_file, st.st_mode | stat.S_IEXEC) | ||
814 | return script_file | ||
815 | |||
816 | @property | ||
817 | def oe_init_build_env(self): | ||
818 | """Find the oe-init-build-env used for this setup""" | ||
819 | oe_init_dir = self.oe_init_dir | ||
820 | if oe_init_dir: | ||
821 | return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV) | ||
822 | return None | ||
823 | |||
824 | @property | ||
825 | def oe_init_dir(self): | ||
826 | """Find the directory where the oe-init-build-env is located | ||
827 | |||
828 | Assumption: There might be a layer with higher priority than poky | ||
829 | which provides to oe-init-build-env in the layer's toplevel folder. | ||
830 | """ | ||
831 | if not self.__oe_init_dir: | ||
832 | for layer in reversed(self.bblayers): | ||
833 | result = subprocess.run( | ||
834 | ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True) | ||
835 | if result.returncode == 0: | ||
836 | oe_init_dir = result.stdout.decode('utf-8').strip() | ||
837 | oe_init_path = os.path.join( | ||
838 | oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV) | ||
839 | if os.path.exists(oe_init_path): | ||
840 | logger.debug("Using %s from: %s" % ( | ||
841 | RecipeModified.OE_INIT_BUILD_ENV, oe_init_path)) | ||
842 | self.__oe_init_dir = oe_init_dir | ||
843 | break | ||
844 | if not self.__oe_init_dir: | ||
845 | logger.error("Cannot find the bitbake top level folder") | ||
846 | return self.__oe_init_dir | ||
847 | |||
848 | |||
849 | def ide_setup(args, config, basepath, workspace): | ||
850 | """Generate the IDE configuration for the workspace""" | ||
851 | |||
852 | # Explicitely passing some special recipes does not make sense | ||
853 | for recipe in args.recipenames: | ||
854 | if recipe in ['meta-ide-support', 'build-sysroots']: | ||
855 | raise DevtoolError("Invalid recipe: %s." % recipe) | ||
856 | |||
857 | # Collect information about tasks which need to be bitbaked | ||
858 | bootstrap_tasks = [] | ||
859 | bootstrap_tasks_late = [] | ||
860 | tinfoil = setup_tinfoil(config_only=False, basepath=basepath) | ||
861 | try: | ||
862 | # define mode depending on recipes which need to be processed | ||
863 | recipes_image_names = [] | ||
864 | recipes_modified_names = [] | ||
865 | recipes_other_names = [] | ||
866 | for recipe in args.recipenames: | ||
867 | try: | ||
868 | check_workspace_recipe( | ||
869 | workspace, recipe, bbclassextend=True) | ||
870 | recipes_modified_names.append(recipe) | ||
871 | except DevtoolError: | ||
872 | recipe_d = parse_recipe( | ||
873 | config, tinfoil, recipe, appends=True, filter_workspace=False) | ||
874 | if not recipe_d: | ||
875 | raise DevtoolError("Parsing recipe %s failed" % recipe) | ||
876 | if bb.data.inherits_class('image', recipe_d): | ||
877 | recipes_image_names.append(recipe) | ||
878 | else: | ||
879 | recipes_other_names.append(recipe) | ||
880 | |||
881 | invalid_params = False | ||
882 | if args.mode == DevtoolIdeMode.shared: | ||
883 | if len(recipes_modified_names): | ||
884 | logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str( | ||
885 | recipes_modified_names)) | ||
886 | invalid_params = True | ||
887 | if args.mode == DevtoolIdeMode.modified: | ||
888 | if len(recipes_other_names): | ||
889 | logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str( | ||
890 | recipes_other_names)) | ||
891 | invalid_params = True | ||
892 | if len(recipes_image_names) != 1: | ||
893 | logger.error( | ||
894 | "One image recipe is required as the rootfs for the remote development.") | ||
895 | invalid_params = True | ||
896 | for modified_recipe_name in recipes_modified_names: | ||
897 | if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'): | ||
898 | logger.error( | ||
899 | "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name) | ||
900 | invalid_params = True | ||
901 | |||
902 | if invalid_params: | ||
903 | raise DevtoolError("Invalid parameters are passed.") | ||
904 | |||
905 | # For the shared sysroots mode, add all dependencies of all the images to the sysroots | ||
906 | # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg | ||
907 | recipes_images = [] | ||
908 | for recipes_image_name in recipes_image_names: | ||
909 | logger.info("Using image: %s" % recipes_image_name) | ||
910 | recipe_image = RecipeImage(recipes_image_name) | ||
911 | recipe_image.initialize(config, tinfoil) | ||
912 | bootstrap_tasks += recipe_image.bootstrap_tasks | ||
913 | recipes_images.append(recipe_image) | ||
914 | |||
915 | # Provide a Direct SDK with shared sysroots | ||
916 | recipes_not_modified = [] | ||
917 | if args.mode == DevtoolIdeMode.shared: | ||
918 | ide_support = RecipeMetaIdeSupport() | ||
919 | ide_support.initialize(config, tinfoil) | ||
920 | bootstrap_tasks += ide_support.bootstrap_tasks | ||
921 | |||
922 | logger.info("Adding %s to the Direct SDK sysroots." % | ||
923 | str(recipes_other_names)) | ||
924 | for recipe_name in recipes_other_names: | ||
925 | recipe_not_modified = RecipeNotModified(recipe_name) | ||
926 | bootstrap_tasks += recipe_not_modified.bootstrap_tasks | ||
927 | recipes_not_modified.append(recipe_not_modified) | ||
928 | |||
929 | build_sysroots = RecipeBuildSysroots() | ||
930 | build_sysroots.initialize(config, tinfoil) | ||
931 | bootstrap_tasks_late += build_sysroots.bootstrap_tasks | ||
932 | shared_env = SharedSysrootsEnv() | ||
933 | shared_env.initialize(ide_support, build_sysroots) | ||
934 | |||
935 | recipes_modified = [] | ||
936 | if args.mode == DevtoolIdeMode.modified: | ||
937 | logger.info("Setting up workspaces for modified recipe: %s" % | ||
938 | str(recipes_modified_names)) | ||
939 | gdbs_cross = {} | ||
940 | for recipe_name in recipes_modified_names: | ||
941 | recipe_modified = RecipeModified(recipe_name) | ||
942 | recipe_modified.initialize(config, workspace, tinfoil) | ||
943 | bootstrap_tasks += recipe_modified.bootstrap_tasks | ||
944 | recipes_modified.append(recipe_modified) | ||
945 | |||
946 | if recipe_modified.target_arch not in gdbs_cross: | ||
947 | target_device = TargetDevice(args) | ||
948 | gdb_cross = RecipeGdbCross( | ||
949 | args, recipe_modified.target_arch, target_device) | ||
950 | gdb_cross.initialize(config, workspace, tinfoil) | ||
951 | bootstrap_tasks += gdb_cross.bootstrap_tasks | ||
952 | gdbs_cross[recipe_modified.target_arch] = gdb_cross | ||
953 | recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch] | ||
954 | |||
955 | finally: | ||
956 | tinfoil.shutdown() | ||
957 | |||
958 | if not args.skip_bitbake: | ||
959 | bb_cmd = 'bitbake ' | ||
960 | if args.bitbake_k: | ||
961 | bb_cmd += "-k " | ||
962 | bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks) | ||
963 | exec_build_env_command( | ||
964 | config.init_path, basepath, bb_cmd_early, watch=True) | ||
965 | if bootstrap_tasks_late: | ||
966 | bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late) | ||
967 | exec_build_env_command( | ||
968 | config.init_path, basepath, bb_cmd_late, watch=True) | ||
969 | |||
970 | for recipe_image in recipes_images: | ||
971 | if (recipe_image.gdbserver_missing): | ||
972 | logger.warning( | ||
973 | "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image) | ||
974 | |||
975 | if recipe_image.combine_dbg_image is False: | ||
976 | logger.warning( | ||
977 | 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image) | ||
978 | |||
979 | # Instantiate the active IDE plugin | ||
980 | ide = ide_plugins[args.ide]() | ||
981 | if args.mode == DevtoolIdeMode.shared: | ||
982 | ide.setup_shared_sysroots(shared_env) | ||
983 | elif args.mode == DevtoolIdeMode.modified: | ||
984 | for recipe_modified in recipes_modified: | ||
985 | if recipe_modified.build_tool is BuildTool.CMAKE: | ||
986 | recipe_modified.cmake_preset() | ||
987 | if recipe_modified.build_tool is BuildTool.MESON: | ||
988 | recipe_modified.gen_meson_wrapper() | ||
989 | ide.setup_modified_recipe( | ||
990 | args, recipe_image, recipe_modified) | ||
991 | else: | ||
992 | raise DevtoolError("Must not end up here.") | ||
993 | |||
994 | |||
995 | def register_commands(subparsers, context): | ||
996 | """Register devtool subcommands from this plugin""" | ||
997 | |||
998 | global ide_plugins | ||
999 | |||
1000 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. | ||
1001 | pluginpaths = [os.path.join(path, 'ide_plugins') | ||
1002 | for path in context.pluginpaths] | ||
1003 | ide_plugin_modules = [] | ||
1004 | for pluginpath in pluginpaths: | ||
1005 | scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath) | ||
1006 | |||
1007 | for ide_plugin_module in ide_plugin_modules: | ||
1008 | if hasattr(ide_plugin_module, 'register_ide_plugin'): | ||
1009 | ide_plugin_module.register_ide_plugin(ide_plugins) | ||
1010 | # Sort plugins according to their priority. The first entry is the default IDE plugin. | ||
1011 | ide_plugins = dict(sorted(ide_plugins.items(), | ||
1012 | key=lambda p: p[1].ide_plugin_priority(), reverse=True)) | ||
1013 | |||
1014 | parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter, | ||
1015 | help='Setup the SDK and configure the IDE') | ||
1016 | parser_ide_sdk.add_argument( | ||
1017 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' | ||
1018 | 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.') | ||
1019 | parser_ide_sdk.add_argument( | ||
1020 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, | ||
1021 | help='Different SDK types are supported:\n' | ||
1022 | '- "' + DevtoolIdeMode.modified.name + '" (default):\n' | ||
1023 | ' devtool modify creates a workspace to work on the source code of a recipe.\n' | ||
1024 | ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n' | ||
1025 | ' Usage example:\n' | ||
1026 | ' devtool modify cmake-example\n' | ||
1027 | ' devtool ide-sdk cmake-example core-image-minimal\n' | ||
1028 | ' Start the IDE in the workspace folder\n' | ||
1029 | ' At least one devtool modified recipe plus one image recipe are required:\n' | ||
1030 | ' The image recipe is used to generate the target image and the remote debug configuration.\n' | ||
1031 | '- "' + DevtoolIdeMode.shared.name + '":\n' | ||
1032 | ' Usage example:\n' | ||
1033 | ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n' | ||
1034 | ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n' | ||
1035 | ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n' | ||
1036 | ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit') | ||
1037 | default_ide = list(ide_plugins.keys())[0] | ||
1038 | parser_ide_sdk.add_argument( | ||
1039 | '-i', '--ide', choices=ide_plugins.keys(), default=default_ide, | ||
1040 | help='Setup the configuration for this IDE (default: %s)' % default_ide) | ||
1041 | parser_ide_sdk.add_argument( | ||
1042 | '-t', '--target', default='root@192.168.7.2', | ||
1043 | help='Live target machine running an ssh server: user@hostname.') | ||
1044 | parser_ide_sdk.add_argument( | ||
1045 | '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.') | ||
1046 | parser_ide_sdk.add_argument( | ||
1047 | '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true') | ||
1048 | parser_ide_sdk.add_argument( | ||
1049 | '-e', '--ssh-exec', help='Executable to use in place of ssh') | ||
1050 | parser_ide_sdk.add_argument( | ||
1051 | '-P', '--port', help='Specify ssh port to use for connection to the target') | ||
1052 | parser_ide_sdk.add_argument( | ||
1053 | '-I', '--key', help='Specify ssh private key for connection to the target') | ||
1054 | parser_ide_sdk.add_argument( | ||
1055 | '--skip-bitbake', help='Generate IDE configuration but skip calling bibtake to update the SDK.', action='store_true') | ||
1056 | parser_ide_sdk.add_argument( | ||
1057 | '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true') | ||
1058 | parser_ide_sdk.add_argument( | ||
1059 | '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false') | ||
1060 | parser_ide_sdk.add_argument( | ||
1061 | '-n', '--dry-run', help='List files to be undeployed only', action='store_true') | ||
1062 | parser_ide_sdk.add_argument( | ||
1063 | '-s', '--show-status', help='Show progress/status output', action='store_true') | ||
1064 | parser_ide_sdk.add_argument( | ||
1065 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') | ||
1066 | parser_ide_sdk.add_argument( | ||
1067 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') | ||
1068 | parser_ide_sdk.add_argument( | ||
1069 | '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true') | ||
1070 | parser_ide_sdk.set_defaults(func=ide_setup) | ||
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py index 95384c5333..18daef30c3 100644 --- a/scripts/lib/devtool/menuconfig.py +++ b/scripts/lib/devtool/menuconfig.py | |||
@@ -3,6 +3,8 @@ | |||
3 | # Copyright (C) 2018 Xilinx | 3 | # Copyright (C) 2018 Xilinx |
4 | # Written by: Chandana Kalluri <ckalluri@xilinx.com> | 4 | # Written by: Chandana Kalluri <ckalluri@xilinx.com> |
5 | # | 5 | # |
6 | # SPDX-License-Identifier: MIT | ||
7 | # | ||
6 | # This program is free software; you can redistribute it and/or modify | 8 | # This program is free software; you can redistribute it and/or modify |
7 | # it under the terms of the GNU General Public License version 2 as | 9 | # it under the terms of the GNU General Public License version 2 as |
8 | # published by the Free Software Foundation. | 10 | # published by the Free Software Foundation. |
@@ -43,7 +45,7 @@ def menuconfig(args, config, basepath, workspace): | |||
43 | return 1 | 45 | return 1 |
44 | 46 | ||
45 | check_workspace_recipe(workspace, args.component) | 47 | check_workspace_recipe(workspace, args.component) |
46 | pn = rd.getVar('PN', True) | 48 | pn = rd.getVar('PN') |
47 | 49 | ||
48 | if not rd.getVarFlag('do_menuconfig','task'): | 50 | if not rd.getVarFlag('do_menuconfig','task'): |
49 | raise DevtoolError("This recipe does not support menuconfig option") | 51 | raise DevtoolError("This recipe does not support menuconfig option") |
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py index 3aa42a1466..9aefd7e354 100644 --- a/scripts/lib/devtool/sdk.py +++ b/scripts/lib/devtool/sdk.py | |||
@@ -207,7 +207,7 @@ def sdk_update(args, config, basepath, workspace): | |||
207 | if not sstate_mirrors: | 207 | if not sstate_mirrors: |
208 | with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: | 208 | with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: |
209 | f.write('SCONF_VERSION = "%s"\n' % site_conf_version) | 209 | f.write('SCONF_VERSION = "%s"\n' % site_conf_version) |
210 | f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) | 210 | f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver) |
211 | finally: | 211 | finally: |
212 | shutil.rmtree(tmpsdk_dir) | 212 | shutil.rmtree(tmpsdk_dir) |
213 | 213 | ||
@@ -300,7 +300,8 @@ def sdk_install(args, config, basepath, workspace): | |||
300 | return 2 | 300 | return 2 |
301 | 301 | ||
302 | try: | 302 | try: |
303 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True) | 303 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True) |
304 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True) | ||
304 | except bb.process.ExecutionError as e: | 305 | except bb.process.ExecutionError as e: |
305 | raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) | 306 | raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) |
306 | 307 | ||
diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py index d24040df37..70b81cac5e 100644 --- a/scripts/lib/devtool/search.py +++ b/scripts/lib/devtool/search.py | |||
@@ -62,10 +62,11 @@ def search(args, config, basepath, workspace): | |||
62 | with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f: | 62 | with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f: |
63 | for line in f: | 63 | for line in f: |
64 | if ': ' in line: | 64 | if ': ' in line: |
65 | splitline = line.split(':', 1) | 65 | splitline = line.split(': ', 1) |
66 | key = splitline[0] | 66 | key = splitline[0] |
67 | value = splitline[1].strip() | 67 | value = splitline[1].strip() |
68 | if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'): | 68 | key = key.replace(":" + pkg, "") |
69 | if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']: | ||
69 | if keyword_rc.search(value): | 70 | if keyword_rc.search(value): |
70 | match = True | 71 | match = True |
71 | break | 72 | break |
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 7b62b7e7b8..bd009f44b1 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -147,6 +147,8 @@ def add(args, config, basepath, workspace): | |||
147 | extracmdopts += ' -a' | 147 | extracmdopts += ' -a' |
148 | if args.npm_dev: | 148 | if args.npm_dev: |
149 | extracmdopts += ' --npm-dev' | 149 | extracmdopts += ' --npm-dev' |
150 | if args.no_pypi: | ||
151 | extracmdopts += ' --no-pypi' | ||
150 | if args.mirrors: | 152 | if args.mirrors: |
151 | extracmdopts += ' --mirrors' | 153 | extracmdopts += ' --mirrors' |
152 | if args.srcrev: | 154 | if args.srcrev: |
@@ -234,10 +236,14 @@ def add(args, config, basepath, workspace): | |||
234 | if args.fetchuri and not args.no_git: | 236 | if args.fetchuri and not args.no_git: |
235 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) | 237 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) |
236 | 238 | ||
237 | initial_rev = None | 239 | initial_rev = {} |
238 | if os.path.exists(os.path.join(srctree, '.git')): | 240 | if os.path.exists(os.path.join(srctree, '.git')): |
239 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 241 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
240 | initial_rev = stdout.rstrip() | 242 | initial_rev["."] = stdout.rstrip() |
243 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree) | ||
244 | for line in stdout.splitlines(): | ||
245 | (rev, submodule) = line.split() | ||
246 | initial_rev[os.path.relpath(submodule, srctree)] = rev | ||
241 | 247 | ||
242 | if args.src_subdir: | 248 | if args.src_subdir: |
243 | srctree = os.path.join(srctree, args.src_subdir) | 249 | srctree = os.path.join(srctree, args.src_subdir) |
@@ -251,16 +257,17 @@ def add(args, config, basepath, workspace): | |||
251 | if b_is_s: | 257 | if b_is_s: |
252 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) | 258 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) |
253 | if initial_rev: | 259 | if initial_rev: |
254 | f.write('\n# initial_rev: %s\n' % initial_rev) | 260 | for key, value in initial_rev.items(): |
261 | f.write('\n# initial_rev %s: %s\n' % (key, value)) | ||
255 | 262 | ||
256 | if args.binary: | 263 | if args.binary: |
257 | f.write('do_install_append() {\n') | 264 | f.write('do_install:append() {\n') |
258 | f.write(' rm -rf ${D}/.git\n') | 265 | f.write(' rm -rf ${D}/.git\n') |
259 | f.write(' rm -f ${D}/singletask.lock\n') | 266 | f.write(' rm -f ${D}/singletask.lock\n') |
260 | f.write('}\n') | 267 | f.write('}\n') |
261 | 268 | ||
262 | if bb.data.inherits_class('npm', rd): | 269 | if bb.data.inherits_class('npm', rd): |
263 | f.write('python do_configure_append() {\n') | 270 | f.write('python do_configure:append() {\n') |
264 | f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n') | 271 | f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n') |
265 | f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n') | 272 | f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n') |
266 | f.write(' bb.utils.remove(lockfile)\n') | 273 | f.write(' bb.utils.remove(lockfile)\n') |
@@ -318,10 +325,6 @@ def _check_compatible_recipe(pn, d): | |||
318 | raise DevtoolError("The %s recipe is a packagegroup, and therefore is " | 325 | raise DevtoolError("The %s recipe is a packagegroup, and therefore is " |
319 | "not supported by this tool" % pn, 4) | 326 | "not supported by this tool" % pn, 4) |
320 | 327 | ||
321 | if bb.data.inherits_class('meta', d): | ||
322 | raise DevtoolError("The %s recipe is a meta-recipe, and therefore is " | ||
323 | "not supported by this tool" % pn, 4) | ||
324 | |||
325 | if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): | 328 | if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): |
326 | # Not an incompatibility error per se, so we don't pass the error code | 329 | # Not an incompatibility error per se, so we don't pass the error code |
327 | raise DevtoolError("externalsrc is currently enabled for the %s " | 330 | raise DevtoolError("externalsrc is currently enabled for the %s " |
@@ -357,7 +360,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
357 | bb.utils.mkdirhier(dst_d) | 360 | bb.utils.mkdirhier(dst_d) |
358 | shutil.move(src, dst) | 361 | shutil.move(src, dst) |
359 | 362 | ||
360 | def _copy_file(src, dst, dry_run_outdir=None): | 363 | def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): |
361 | """Copy a file. Creates all the directory components of destination path.""" | 364 | """Copy a file. Creates all the directory components of destination path.""" |
362 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 365 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
363 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) | 366 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) |
@@ -457,7 +460,7 @@ def sync(args, config, basepath, workspace): | |||
457 | finally: | 460 | finally: |
458 | tinfoil.shutdown() | 461 | tinfoil.shutdown() |
459 | 462 | ||
460 | def symlink_oelocal_files_srctree(rd,srctree): | 463 | def symlink_oelocal_files_srctree(rd, srctree): |
461 | import oe.patch | 464 | import oe.patch |
462 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | 465 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): |
463 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | 466 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree |
@@ -481,11 +484,7 @@ def symlink_oelocal_files_srctree(rd,srctree): | |||
481 | os.symlink('oe-local-files/%s' % fn, destpth) | 484 | os.symlink('oe-local-files/%s' % fn, destpth) |
482 | addfiles.append(os.path.join(relpth, fn)) | 485 | addfiles.append(os.path.join(relpth, fn)) |
483 | if addfiles: | 486 | if addfiles: |
484 | bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree) | 487 | oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd) |
485 | useroptions = [] | ||
486 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | ||
487 | bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree) | ||
488 | |||
489 | 488 | ||
490 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 489 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
491 | """Extract sources of a recipe""" | 490 | """Extract sources of a recipe""" |
@@ -523,8 +522,10 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
523 | history = d.varhistory.variable('SRC_URI') | 522 | history = d.varhistory.variable('SRC_URI') |
524 | for event in history: | 523 | for event in history: |
525 | if not 'flag' in event: | 524 | if not 'flag' in event: |
526 | if event['op'].startswith(('_append[', '_prepend[')): | 525 | if event['op'].startswith((':append[', ':prepend[')): |
527 | extra_overrides.append(event['op'].split('[')[1].split(']')[0]) | 526 | override = event['op'].split('[')[1].split(']')[0] |
527 | if not override.startswith('pn-'): | ||
528 | extra_overrides.append(override) | ||
528 | # We want to remove duplicate overrides. If a recipe had multiple | 529 | # We want to remove duplicate overrides. If a recipe had multiple |
529 | # SRC_URI_override += values it would cause mulitple instances of | 530 | # SRC_URI_override += values it would cause mulitple instances of |
530 | # overrides. This doesn't play nicely with things like creating a | 531 | # overrides. This doesn't play nicely with things like creating a |
@@ -569,6 +570,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
569 | logger.debug('writing append file %s' % appendfile) | 570 | logger.debug('writing append file %s' % appendfile) |
570 | with open(appendfile, 'a') as f: | 571 | with open(appendfile, 'a') as f: |
571 | f.write('###--- _extract_source\n') | 572 | f.write('###--- _extract_source\n') |
573 | f.write('deltask do_recipe_qa\n') | ||
574 | f.write('deltask do_recipe_qa_setscene\n') | ||
575 | f.write('ERROR_QA:remove = "patch-fuzz"\n') | ||
572 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) | 576 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) |
573 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) | 577 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) |
574 | if not is_kernel_yocto: | 578 | if not is_kernel_yocto: |
@@ -586,6 +590,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
586 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') | 590 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') |
587 | with open(preservestampfile, 'w') as f: | 591 | with open(preservestampfile, 'w') as f: |
588 | f.write(d.getVar('STAMP')) | 592 | f.write(d.getVar('STAMP')) |
593 | tinfoil.modified_files() | ||
589 | try: | 594 | try: |
590 | if is_kernel_yocto: | 595 | if is_kernel_yocto: |
591 | # We need to generate the kernel config | 596 | # We need to generate the kernel config |
@@ -648,23 +653,34 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
648 | 653 | ||
649 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): | 654 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): |
650 | shutil.rmtree(workshareddir) | 655 | shutil.rmtree(workshareddir) |
651 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 656 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
652 | elif not os.path.exists(workshareddir): | 657 | elif not os.path.exists(workshareddir): |
653 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 658 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
654 | 659 | ||
655 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | 660 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') |
656 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | 661 | srctree_localdir = os.path.join(srctree, 'oe-local-files') |
657 | 662 | ||
658 | if sync: | 663 | if sync: |
659 | bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | 664 | try: |
660 | 665 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | |
661 | # Move oe-local-files directory to srctree | 666 | bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree) |
662 | # As the oe-local-files is not part of the constructed git tree, | 667 | |
663 | # remove them directly during the synchrounizating might surprise | 668 | # Use git fetch to update the source with the current recipe |
664 | # the users. Instead, we move it to oe-local-files.bak and remind | 669 | # To be able to update the currently checked out branch with |
665 | # user in the log message. | 670 | # possibly new history (no fast-forward) git needs to be told |
671 | # that's ok | ||
672 | logger.info('Syncing source files including patches to git branch: %s' % devbranch) | ||
673 | bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | ||
674 | except bb.process.ExecutionError as e: | ||
675 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) | ||
676 | |||
677 | # Move the oe-local-files directory to srctree. | ||
678 | # As oe-local-files is not part of the constructed git tree, | ||
679 | # removing it directly during the synchronization might surprise | ||
680 | # the user. Instead, we move it to oe-local-files.bak and remind | ||
681 | # the user in the log message. | ||
666 | if os.path.exists(srctree_localdir + '.bak'): | 682 | if os.path.exists(srctree_localdir + '.bak'): |
667 | shutil.rmtree(srctree_localdir, srctree_localdir + '.bak') | 683 | shutil.rmtree(srctree_localdir + '.bak') |
668 | 684 | ||
669 | if os.path.exists(srctree_localdir): | 685 | if os.path.exists(srctree_localdir): |
670 | logger.info('Backing up current local file directory %s' % srctree_localdir) | 686 | logger.info('Backing up current local file directory %s' % srctree_localdir) |
@@ -680,7 +696,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
680 | shutil.move(tempdir_localdir, srcsubdir) | 696 | shutil.move(tempdir_localdir, srcsubdir) |
681 | 697 | ||
682 | shutil.move(srcsubdir, srctree) | 698 | shutil.move(srcsubdir, srctree) |
683 | symlink_oelocal_files_srctree(d,srctree) | 699 | symlink_oelocal_files_srctree(d, srctree) |
684 | 700 | ||
685 | if is_kernel_yocto: | 701 | if is_kernel_yocto: |
686 | logger.info('Copying kernel config to srctree') | 702 | logger.info('Copying kernel config to srctree') |
@@ -746,14 +762,14 @@ def _check_preserve(config, recipename): | |||
746 | os.remove(removefile) | 762 | os.remove(removefile) |
747 | else: | 763 | else: |
748 | tf.write(line) | 764 | tf.write(line) |
749 | os.rename(newfile, origfile) | 765 | bb.utils.rename(newfile, origfile) |
750 | 766 | ||
751 | def get_staging_kver(srcdir): | 767 | def get_staging_kver(srcdir): |
752 | # Kernel version from work-shared | 768 | # Kernel version from work-shared |
753 | kerver = [] | 769 | kerver = [] |
754 | staging_kerVer="" | 770 | staging_kerVer="" |
755 | if os.path.exists(srcdir) and os.listdir(srcdir): | 771 | if os.path.exists(srcdir) and os.listdir(srcdir): |
756 | with open(os.path.join(srcdir,"Makefile")) as f: | 772 | with open(os.path.join(srcdir, "Makefile")) as f: |
757 | version = [next(f) for x in range(5)][1:4] | 773 | version = [next(f) for x in range(5)][1:4] |
758 | for word in version: | 774 | for word in version: |
759 | kerver.append(word.split('= ')[1].split('\n')[0]) | 775 | kerver.append(word.split('= ')[1].split('\n')[0]) |
@@ -763,10 +779,20 @@ def get_staging_kver(srcdir): | |||
763 | def get_staging_kbranch(srcdir): | 779 | def get_staging_kbranch(srcdir): |
764 | staging_kbranch = "" | 780 | staging_kbranch = "" |
765 | if os.path.exists(srcdir) and os.listdir(srcdir): | 781 | if os.path.exists(srcdir) and os.listdir(srcdir): |
766 | (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir) | 782 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) |
767 | staging_kbranch = "".join(branch.split('\n')[0]) | 783 | staging_kbranch = "".join(branch.split('\n')[0]) |
768 | return staging_kbranch | 784 | return staging_kbranch |
769 | 785 | ||
786 | def get_real_srctree(srctree, s, workdir): | ||
787 | # Check that recipe isn't using a shared workdir | ||
788 | s = os.path.abspath(s) | ||
789 | workdir = os.path.abspath(workdir) | ||
790 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | ||
791 | # Handle if S is set to a subdirectory of the source | ||
792 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | ||
793 | srctree = os.path.join(srctree, srcsubdir) | ||
794 | return srctree | ||
795 | |||
770 | def modify(args, config, basepath, workspace): | 796 | def modify(args, config, basepath, workspace): |
771 | """Entry point for the devtool 'modify' subcommand""" | 797 | """Entry point for the devtool 'modify' subcommand""" |
772 | import bb | 798 | import bb |
@@ -811,8 +837,8 @@ def modify(args, config, basepath, workspace): | |||
811 | 837 | ||
812 | _check_compatible_recipe(pn, rd) | 838 | _check_compatible_recipe(pn, rd) |
813 | 839 | ||
814 | initial_rev = None | 840 | initial_revs = {} |
815 | commits = [] | 841 | commits = {} |
816 | check_commits = False | 842 | check_commits = False |
817 | 843 | ||
818 | if bb.data.inherits_class('kernel-yocto', rd): | 844 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -824,10 +850,10 @@ def modify(args, config, basepath, workspace): | |||
824 | staging_kerVer = get_staging_kver(srcdir) | 850 | staging_kerVer = get_staging_kver(srcdir) |
825 | staging_kbranch = get_staging_kbranch(srcdir) | 851 | staging_kbranch = get_staging_kbranch(srcdir) |
826 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 852 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
827 | oe.path.copyhardlinktree(srcdir,srctree) | 853 | oe.path.copyhardlinktree(srcdir, srctree) |
828 | workdir = rd.getVar('WORKDIR') | 854 | workdir = rd.getVar('WORKDIR') |
829 | srcsubdir = rd.getVar('S') | 855 | srcsubdir = rd.getVar('S') |
830 | localfilesdir = os.path.join(srctree,'oe-local-files') | 856 | localfilesdir = os.path.join(srctree, 'oe-local-files') |
831 | # Move local source files into separate subdir | 857 | # Move local source files into separate subdir |
832 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | 858 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] |
833 | local_files = oe.recipeutils.get_recipe_local_files(rd) | 859 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
@@ -851,9 +877,9 @@ def modify(args, config, basepath, workspace): | |||
851 | for fname in local_files: | 877 | for fname in local_files: |
852 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 878 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) |
853 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 879 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: |
854 | f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n') | 880 | f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n') |
855 | 881 | ||
856 | symlink_oelocal_files_srctree(rd,srctree) | 882 | symlink_oelocal_files_srctree(rd, srctree) |
857 | 883 | ||
858 | task = 'do_configure' | 884 | task = 'do_configure' |
859 | res = tinfoil.build_targets(pn, task, handle_events=True) | 885 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -861,22 +887,30 @@ def modify(args, config, basepath, workspace): | |||
861 | # Copy .config to workspace | 887 | # Copy .config to workspace |
862 | kconfpath = rd.getVar('B') | 888 | kconfpath = rd.getVar('B') |
863 | logger.info('Copying kernel config to workspace') | 889 | logger.info('Copying kernel config to workspace') |
864 | shutil.copy2(os.path.join(kconfpath, '.config'),srctree) | 890 | shutil.copy2(os.path.join(kconfpath, '.config'), srctree) |
865 | 891 | ||
866 | # Set this to true, we still need to get initial_rev | 892 | # Set this to true, we still need to get initial_rev |
867 | # by parsing the git repo | 893 | # by parsing the git repo |
868 | args.no_extract = True | 894 | args.no_extract = True |
869 | 895 | ||
870 | if not args.no_extract: | 896 | if not args.no_extract: |
871 | initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | 897 | initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) |
872 | if not initial_rev: | 898 | if not initial_revs["."]: |
873 | return 1 | 899 | return 1 |
874 | logger.info('Source tree extracted to %s' % srctree) | 900 | logger.info('Source tree extracted to %s' % srctree) |
901 | |||
875 | if os.path.exists(os.path.join(srctree, '.git')): | 902 | if os.path.exists(os.path.join(srctree, '.git')): |
876 | # Get list of commits since this revision | 903 | # Get list of commits since this revision |
877 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree) | 904 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) |
878 | commits = stdout.split() | 905 | commits["."] = stdout.split() |
879 | check_commits = True | 906 | check_commits = True |
907 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | ||
908 | for line in stdout.splitlines(): | ||
909 | (rev, submodule_path) = line.split() | ||
910 | submodule = os.path.relpath(submodule_path, srctree) | ||
911 | initial_revs[submodule] = rev | ||
912 | (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path) | ||
913 | commits[submodule] = stdout.split() | ||
880 | else: | 914 | else: |
881 | if os.path.exists(os.path.join(srctree, '.git')): | 915 | if os.path.exists(os.path.join(srctree, '.git')): |
882 | # Check if it's a tree previously extracted by us. This is done | 916 | # Check if it's a tree previously extracted by us. This is done |
@@ -893,11 +927,11 @@ def modify(args, config, basepath, workspace): | |||
893 | for line in stdout.splitlines(): | 927 | for line in stdout.splitlines(): |
894 | if line.startswith('*'): | 928 | if line.startswith('*'): |
895 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) | 929 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) |
896 | initial_rev = stdout.rstrip() | 930 | initial_revs["."] = stdout.rstrip() |
897 | if not initial_rev: | 931 | if "." not in initial_revs: |
898 | # Otherwise, just grab the head revision | 932 | # Otherwise, just grab the head revision |
899 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 933 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
900 | initial_rev = stdout.rstrip() | 934 | initial_revs["."] = stdout.rstrip() |
901 | 935 | ||
902 | branch_patches = {} | 936 | branch_patches = {} |
903 | if check_commits: | 937 | if check_commits: |
@@ -914,62 +948,81 @@ def modify(args, config, basepath, workspace): | |||
914 | seen_patches = [] | 948 | seen_patches = [] |
915 | for branch in branches: | 949 | for branch in branches: |
916 | branch_patches[branch] = [] | 950 | branch_patches[branch] = [] |
917 | (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree) | 951 | (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree) |
918 | for line in stdout.splitlines(): | 952 | for sha1 in stdout.splitlines(): |
919 | line = line.strip() | 953 | notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip()) |
920 | if line.startswith(oe.patch.GitApplyTree.patch_line_prefix): | 954 | origpatch = notes.get(oe.patch.GitApplyTree.original_patch) |
921 | origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip() | 955 | if origpatch and origpatch not in seen_patches: |
922 | if not origpatch in seen_patches: | 956 | seen_patches.append(origpatch) |
923 | seen_patches.append(origpatch) | 957 | branch_patches[branch].append(origpatch) |
924 | branch_patches[branch].append(origpatch) | ||
925 | 958 | ||
926 | # Need to grab this here in case the source is within a subdirectory | 959 | # Need to grab this here in case the source is within a subdirectory |
927 | srctreebase = srctree | 960 | srctreebase = srctree |
928 | 961 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | |
929 | # Check that recipe isn't using a shared workdir | ||
930 | s = os.path.abspath(rd.getVar('S')) | ||
931 | workdir = os.path.abspath(rd.getVar('WORKDIR')) | ||
932 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | ||
933 | # Handle if S is set to a subdirectory of the source | ||
934 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | ||
935 | srctree = os.path.join(srctree, srcsubdir) | ||
936 | 962 | ||
937 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 963 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
938 | with open(appendfile, 'w') as f: | 964 | with open(appendfile, 'w') as f: |
939 | f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n') | 965 | # if not present, add type=git-dependency to the secondary sources |
966 | # (non local files) so they can be extracted correctly when building a recipe after | ||
967 | # doing a devtool modify on it | ||
968 | src_uri = rd.getVar('SRC_URI').split() | ||
969 | src_uri_append = [] | ||
970 | src_uri_remove = [] | ||
971 | |||
972 | # Assume first entry is main source extracted in ${S} so skip it | ||
973 | src_uri = src_uri[1::] | ||
974 | |||
975 | # Add "type=git-dependency" to all non local sources | ||
976 | for url in src_uri: | ||
977 | if not url.startswith('file://') and not 'type=' in url: | ||
978 | src_uri_remove.append(url) | ||
979 | src_uri_append.append('%s;type=git-dependency' % url) | ||
980 | |||
981 | if src_uri_remove: | ||
982 | f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove)) | ||
983 | f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append)) | ||
984 | |||
985 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') | ||
940 | # Local files can be modified/tracked in separate subdir under srctree | 986 | # Local files can be modified/tracked in separate subdir under srctree |
941 | # Mostly useful for packages with S != WORKDIR | 987 | # Mostly useful for packages with S != WORKDIR |
942 | f.write('FILESPATH_prepend := "%s:"\n' % | 988 | f.write('FILESPATH:prepend := "%s:"\n' % |
943 | os.path.join(srctreebase, 'oe-local-files')) | 989 | os.path.join(srctreebase, 'oe-local-files')) |
944 | f.write('# srctreebase: %s\n' % srctreebase) | 990 | f.write('# srctreebase: %s\n' % srctreebase) |
945 | 991 | ||
946 | f.write('\ninherit externalsrc\n') | 992 | f.write('\ninherit externalsrc\n') |
947 | f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n') | 993 | f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n') |
948 | f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) | 994 | f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree)) |
949 | 995 | ||
950 | b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) | 996 | b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) |
951 | if b_is_s: | 997 | if b_is_s: |
952 | f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) | 998 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
953 | 999 | ||
954 | if bb.data.inherits_class('kernel', rd): | 1000 | if bb.data.inherits_class('kernel', rd): |
955 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | 1001 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' |
956 | 'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n') | 1002 | 'do_fetch do_unpack do_kernel_configcheck"\n') |
957 | f.write('\ndo_patch[noexec] = "1"\n') | 1003 | f.write('\ndo_patch[noexec] = "1"\n') |
958 | f.write('\ndo_configure_append() {\n' | 1004 | f.write('\ndo_configure:append() {\n' |
959 | ' cp ${B}/.config ${S}/.config.baseline\n' | 1005 | ' cp ${B}/.config ${S}/.config.baseline\n' |
960 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | 1006 | ' ln -sfT ${B}/.config ${S}/.config.new\n' |
961 | '}\n') | 1007 | '}\n') |
962 | if rd.getVarFlag('do_menuconfig','task'): | 1008 | f.write('\ndo_kernel_configme:prepend() {\n' |
963 | f.write('\ndo_configure_append() {\n' | 1009 | ' if [ -e ${S}/.config ]; then\n' |
964 | ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' | 1010 | ' mv ${S}/.config ${S}/.config.old\n' |
965 | ' cp ${B}/.config ${S}/.config.baseline\n' | 1011 | ' fi\n' |
966 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | 1012 | '}\n') |
1013 | if rd.getVarFlag('do_menuconfig', 'task'): | ||
1014 | f.write('\ndo_configure:append() {\n' | ||
1015 | ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n' | ||
1016 | ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n' | ||
1017 | ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n' | ||
967 | ' fi\n' | 1018 | ' fi\n' |
968 | '}\n') | 1019 | '}\n') |
969 | if initial_rev: | 1020 | if initial_revs: |
970 | f.write('\n# initial_rev: %s\n' % initial_rev) | 1021 | for name, rev in initial_revs.items(): |
971 | for commit in commits: | 1022 | f.write('\n# initial_rev %s: %s\n' % (name, rev)) |
972 | f.write('# commit: %s\n' % commit) | 1023 | if name in commits: |
1024 | for commit in commits[name]: | ||
1025 | f.write('# commit %s: %s\n' % (name, commit)) | ||
973 | if branch_patches: | 1026 | if branch_patches: |
974 | for branch in branch_patches: | 1027 | for branch in branch_patches: |
975 | if branch == args.branch: | 1028 | if branch == args.branch: |
@@ -1089,10 +1142,10 @@ def rename(args, config, basepath, workspace): | |||
1089 | 1142 | ||
1090 | # Rename bbappend | 1143 | # Rename bbappend |
1091 | logger.info('Renaming %s to %s' % (append, newappend)) | 1144 | logger.info('Renaming %s to %s' % (append, newappend)) |
1092 | os.rename(append, newappend) | 1145 | bb.utils.rename(append, newappend) |
1093 | # Rename recipe file | 1146 | # Rename recipe file |
1094 | logger.info('Renaming %s to %s' % (recipefile, newfile)) | 1147 | logger.info('Renaming %s to %s' % (recipefile, newfile)) |
1095 | os.rename(recipefile, newfile) | 1148 | bb.utils.rename(recipefile, newfile) |
1096 | 1149 | ||
1097 | # Rename source tree if it's the default path | 1150 | # Rename source tree if it's the default path |
1098 | appendmd5 = None | 1151 | appendmd5 = None |
@@ -1192,44 +1245,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1192 | branchname = stdout.rstrip() | 1245 | branchname = stdout.rstrip() |
1193 | 1246 | ||
1194 | # Parse initial rev from recipe if not specified | 1247 | # Parse initial rev from recipe if not specified |
1195 | commits = [] | 1248 | commits = {} |
1196 | patches = [] | 1249 | patches = [] |
1250 | initial_revs = {} | ||
1197 | with open(recipe_path, 'r') as f: | 1251 | with open(recipe_path, 'r') as f: |
1198 | for line in f: | 1252 | for line in f: |
1199 | if line.startswith('# initial_rev:'): | 1253 | pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$' |
1200 | if not initial_rev: | 1254 | match = re.search(pattern, line) |
1201 | initial_rev = line.split(':')[-1].strip() | 1255 | if match: |
1202 | elif line.startswith('# commit:') and not force_patch_refresh: | 1256 | name = match.group(1) |
1203 | commits.append(line.split(':')[-1].strip()) | 1257 | rev = match.group(2) |
1204 | elif line.startswith('# patches_%s:' % branchname): | 1258 | if line.startswith('# initial_rev'): |
1205 | patches = line.split(':')[-1].strip().split(',') | 1259 | if not (name == "." and initial_rev): |
1206 | 1260 | initial_revs[name] = rev | |
1207 | update_rev = initial_rev | 1261 | elif line.startswith('# commit') and not force_patch_refresh: |
1208 | changed_revs = None | 1262 | if name not in commits: |
1209 | if initial_rev: | 1263 | commits[name] = [rev] |
1264 | else: | ||
1265 | commits[name].append(rev) | ||
1266 | elif line.startswith('# patches_%s:' % branchname): | ||
1267 | patches = line.split(':')[-1].strip().split(',') | ||
1268 | |||
1269 | update_revs = dict(initial_revs) | ||
1270 | changed_revs = {} | ||
1271 | for name, rev in initial_revs.items(): | ||
1210 | # Find first actually changed revision | 1272 | # Find first actually changed revision |
1211 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % | 1273 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % |
1212 | initial_rev, cwd=srctree) | 1274 | rev, cwd=os.path.join(srctree, name)) |
1213 | newcommits = stdout.split() | 1275 | newcommits = stdout.split() |
1214 | for i in range(min(len(commits), len(newcommits))): | 1276 | if name in commits: |
1215 | if newcommits[i] == commits[i]: | 1277 | for i in range(min(len(commits[name]), len(newcommits))): |
1216 | update_rev = commits[i] | 1278 | if newcommits[i] == commits[name][i]: |
1279 | update_revs[name] = commits[name][i] | ||
1217 | 1280 | ||
1218 | try: | 1281 | try: |
1219 | stdout, _ = bb.process.run('git cherry devtool-patched', | 1282 | stdout, _ = bb.process.run('git cherry devtool-patched', |
1220 | cwd=srctree) | 1283 | cwd=os.path.join(srctree, name)) |
1221 | except bb.process.ExecutionError as err: | 1284 | except bb.process.ExecutionError as err: |
1222 | stdout = None | 1285 | stdout = None |
1223 | 1286 | ||
1224 | if stdout is not None and not force_patch_refresh: | 1287 | if stdout is not None and not force_patch_refresh: |
1225 | changed_revs = [] | ||
1226 | for line in stdout.splitlines(): | 1288 | for line in stdout.splitlines(): |
1227 | if line.startswith('+ '): | 1289 | if line.startswith('+ '): |
1228 | rev = line.split()[1] | 1290 | rev = line.split()[1] |
1229 | if rev in newcommits: | 1291 | if rev in newcommits: |
1230 | changed_revs.append(rev) | 1292 | if name not in changed_revs: |
1293 | changed_revs[name] = [rev] | ||
1294 | else: | ||
1295 | changed_revs[name].append(rev) | ||
1231 | 1296 | ||
1232 | return initial_rev, update_rev, changed_revs, patches | 1297 | return initial_revs, update_revs, changed_revs, patches |
1233 | 1298 | ||
1234 | def _remove_file_entries(srcuri, filelist): | 1299 | def _remove_file_entries(srcuri, filelist): |
1235 | """Remove file:// entries from SRC_URI""" | 1300 | """Remove file:// entries from SRC_URI""" |
@@ -1284,14 +1349,17 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru | |||
1284 | raise | 1349 | raise |
1285 | 1350 | ||
1286 | 1351 | ||
1287 | def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | 1352 | def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): |
1288 | """Export patches from srctree to given location. | 1353 | """Export patches from srctree to given location. |
1289 | Returns three-tuple of dicts: | 1354 | Returns three-tuple of dicts: |
1290 | 1. updated - patches that already exist in SRCURI | 1355 | 1. updated - patches that already exist in SRCURI |
1291 | 2. added - new patches that don't exist in SRCURI | 1356 | 2. added - new patches that don't exist in SRCURI |
1292 | 3 removed - patches that exist in SRCURI but not in exported patches | 1357 | 3 removed - patches that exist in SRCURI but not in exported patches |
1293 | In each dict the key is the 'basepath' of the URI and value is the | 1358 | In each dict the key is the 'basepath' of the URI and value is: |
1294 | absolute path to the existing file in recipe space (if any). | 1359 | - for updated and added dicts, a dict with 2 optionnal keys: |
1360 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1361 | - 'patchdir': the directory in wich the patch should be applied (if any) | ||
1362 | - for removed dict, the absolute path to the existing file in recipe space | ||
1295 | """ | 1363 | """ |
1296 | import oe.recipeutils | 1364 | import oe.recipeutils |
1297 | from oe.patch import GitApplyTree | 1365 | from oe.patch import GitApplyTree |
@@ -1305,54 +1373,60 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | |||
1305 | 1373 | ||
1306 | # Generate patches from Git, exclude local files directory | 1374 | # Generate patches from Git, exclude local files directory |
1307 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') | 1375 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') |
1308 | GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec) | 1376 | GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec) |
1309 | 1377 | for dirpath, dirnames, filenames in os.walk(destdir): | |
1310 | new_patches = sorted(os.listdir(destdir)) | 1378 | new_patches = filenames |
1311 | for new_patch in new_patches: | 1379 | reldirpath = os.path.relpath(dirpath, destdir) |
1312 | # Strip numbering from patch names. If it's a git sequence named patch, | 1380 | for new_patch in new_patches: |
1313 | # the numbers might not match up since we are starting from a different | 1381 | # Strip numbering from patch names. If it's a git sequence named patch, |
1314 | # revision This does assume that people are using unique shortlog | 1382 | # the numbers might not match up since we are starting from a different |
1315 | # values, but they ought to be anyway... | 1383 | # revision This does assume that people are using unique shortlog |
1316 | new_basename = seqpatch_re.match(new_patch).group(2) | 1384 | # values, but they ought to be anyway... |
1317 | match_name = None | 1385 | new_basename = seqpatch_re.match(new_patch).group(2) |
1318 | for old_patch in existing_patches: | 1386 | match_name = None |
1319 | old_basename = seqpatch_re.match(old_patch).group(2) | 1387 | for old_patch in existing_patches: |
1320 | old_basename_splitext = os.path.splitext(old_basename) | 1388 | old_basename = seqpatch_re.match(old_patch).group(2) |
1321 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: | 1389 | old_basename_splitext = os.path.splitext(old_basename) |
1322 | old_patch_noext = os.path.splitext(old_patch)[0] | 1390 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: |
1323 | match_name = old_patch_noext | 1391 | old_patch_noext = os.path.splitext(old_patch)[0] |
1324 | break | 1392 | match_name = old_patch_noext |
1325 | elif new_basename == old_basename: | 1393 | break |
1326 | match_name = old_patch | 1394 | elif new_basename == old_basename: |
1327 | break | 1395 | match_name = old_patch |
1328 | if match_name: | 1396 | break |
1329 | # Rename patch files | 1397 | if match_name: |
1330 | if new_patch != match_name: | 1398 | # Rename patch files |
1331 | os.rename(os.path.join(destdir, new_patch), | 1399 | if new_patch != match_name: |
1332 | os.path.join(destdir, match_name)) | 1400 | bb.utils.rename(os.path.join(destdir, new_patch), |
1333 | # Need to pop it off the list now before checking changed_revs | 1401 | os.path.join(destdir, match_name)) |
1334 | oldpath = existing_patches.pop(old_patch) | 1402 | # Need to pop it off the list now before checking changed_revs |
1335 | if changed_revs is not None: | 1403 | oldpath = existing_patches.pop(old_patch) |
1336 | # Avoid updating patches that have not actually changed | 1404 | if changed_revs is not None and dirpath in changed_revs: |
1337 | with open(os.path.join(destdir, match_name), 'r') as f: | 1405 | # Avoid updating patches that have not actually changed |
1338 | firstlineitems = f.readline().split() | 1406 | with open(os.path.join(dirpath, match_name), 'r') as f: |
1339 | # Looking for "From <hash>" line | 1407 | firstlineitems = f.readline().split() |
1340 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: | 1408 | # Looking for "From <hash>" line |
1341 | if not firstlineitems[1] in changed_revs: | 1409 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: |
1342 | continue | 1410 | if not firstlineitems[1] in changed_revs[dirpath]: |
1343 | # Recompress if necessary | 1411 | continue |
1344 | if oldpath.endswith(('.gz', '.Z')): | 1412 | # Recompress if necessary |
1345 | bb.process.run(['gzip', match_name], cwd=destdir) | 1413 | if oldpath.endswith(('.gz', '.Z')): |
1346 | if oldpath.endswith('.gz'): | 1414 | bb.process.run(['gzip', match_name], cwd=destdir) |
1347 | match_name += '.gz' | 1415 | if oldpath.endswith('.gz'): |
1348 | else: | 1416 | match_name += '.gz' |
1349 | match_name += '.Z' | 1417 | else: |
1350 | elif oldpath.endswith('.bz2'): | 1418 | match_name += '.Z' |
1351 | bb.process.run(['bzip2', match_name], cwd=destdir) | 1419 | elif oldpath.endswith('.bz2'): |
1352 | match_name += '.bz2' | 1420 | bb.process.run(['bzip2', match_name], cwd=destdir) |
1353 | updated[match_name] = oldpath | 1421 | match_name += '.bz2' |
1354 | else: | 1422 | updated[match_name] = {'path' : oldpath} |
1355 | added[new_patch] = None | 1423 | if reldirpath != ".": |
1424 | updated[match_name]['patchdir'] = reldirpath | ||
1425 | else: | ||
1426 | added[new_patch] = {} | ||
1427 | if reldirpath != ".": | ||
1428 | added[new_patch]['patchdir'] = reldirpath | ||
1429 | |||
1356 | return (updated, added, existing_patches) | 1430 | return (updated, added, existing_patches) |
1357 | 1431 | ||
1358 | 1432 | ||
@@ -1389,8 +1463,10 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1389 | 1. updated - files that already exist in SRCURI | 1463 | 1. updated - files that already exist in SRCURI |
1390 | 2. added - new files files that don't exist in SRCURI | 1464 | 2. added - new files files that don't exist in SRCURI |
1391 | 3 removed - files that exist in SRCURI but not in exported files | 1465 | 3 removed - files that exist in SRCURI but not in exported files |
1392 | In each dict the key is the 'basepath' of the URI and value is the | 1466 | In each dict the key is the 'basepath' of the URI and value is: |
1393 | absolute path to the existing file in recipe space (if any). | 1467 | - for updated and added dicts, a dict with 1 optionnal key: |
1468 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1469 | - for removed dict, the absolute path to the existing file in recipe space | ||
1394 | """ | 1470 | """ |
1395 | import oe.recipeutils | 1471 | import oe.recipeutils |
1396 | 1472 | ||
@@ -1403,6 +1479,18 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1403 | updated = OrderedDict() | 1479 | updated = OrderedDict() |
1404 | added = OrderedDict() | 1480 | added = OrderedDict() |
1405 | removed = OrderedDict() | 1481 | removed = OrderedDict() |
1482 | |||
1483 | # Get current branch and return early with empty lists | ||
1484 | # if on one of the override branches | ||
1485 | # (local files are provided only for the main branch and processing | ||
1486 | # them against lists from recipe overrides will result in mismatches | ||
1487 | # and broken modifications to recipes). | ||
1488 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', | ||
1489 | cwd=srctree) | ||
1490 | branchname = stdout.rstrip() | ||
1491 | if branchname.startswith(override_branch_prefix): | ||
1492 | return (updated, added, removed) | ||
1493 | |||
1406 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1494 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') |
1407 | git_files = _git_ls_tree(srctree) | 1495 | git_files = _git_ls_tree(srctree) |
1408 | if 'oe-local-files' in git_files: | 1496 | if 'oe-local-files' in git_files: |
@@ -1460,9 +1548,9 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1460 | origpath = existing_files.pop(fname) | 1548 | origpath = existing_files.pop(fname) |
1461 | workpath = os.path.join(local_files_dir, fname) | 1549 | workpath = os.path.join(local_files_dir, fname) |
1462 | if not filecmp.cmp(origpath, workpath): | 1550 | if not filecmp.cmp(origpath, workpath): |
1463 | updated[fname] = origpath | 1551 | updated[fname] = {'path' : origpath} |
1464 | elif fname != '.gitignore': | 1552 | elif fname != '.gitignore': |
1465 | added[fname] = None | 1553 | added[fname] = {} |
1466 | 1554 | ||
1467 | workdir = rd.getVar('WORKDIR') | 1555 | workdir = rd.getVar('WORKDIR') |
1468 | s = rd.getVar('S') | 1556 | s = rd.getVar('S') |
@@ -1479,7 +1567,7 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1479 | if os.path.exists(fpath): | 1567 | if os.path.exists(fpath): |
1480 | origpath = existing_files.pop(fname) | 1568 | origpath = existing_files.pop(fname) |
1481 | if not filecmp.cmp(origpath, fpath): | 1569 | if not filecmp.cmp(origpath, fpath): |
1482 | updated[fpath] = origpath | 1570 | updated[fpath] = {'path' : origpath} |
1483 | 1571 | ||
1484 | removed = existing_files | 1572 | removed = existing_files |
1485 | return (updated, added, removed) | 1573 | return (updated, added, removed) |
@@ -1508,6 +1596,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1508 | recipedir = os.path.basename(recipefile) | 1596 | recipedir = os.path.basename(recipefile) |
1509 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) | 1597 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) |
1510 | 1598 | ||
1599 | # Get original SRCREV | ||
1600 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1601 | if old_srcrev == "INVALID": | ||
1602 | raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository') | ||
1603 | old_srcrev = {'.': old_srcrev} | ||
1604 | |||
1511 | # Get HEAD revision | 1605 | # Get HEAD revision |
1512 | try: | 1606 | try: |
1513 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) | 1607 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) |
@@ -1534,13 +1628,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1534 | if not no_remove: | 1628 | if not no_remove: |
1535 | # Find list of existing patches in recipe file | 1629 | # Find list of existing patches in recipe file |
1536 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1630 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1537 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1538 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, | 1631 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, |
1539 | patches_dir) | 1632 | patches_dir) |
1540 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) | 1633 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) |
1541 | 1634 | ||
1542 | # Remove deleted local files and "overlapping" patches | 1635 | # Remove deleted local files and "overlapping" patches |
1543 | remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values()) | 1636 | remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value] |
1544 | if remove_files: | 1637 | if remove_files: |
1545 | removedentries = _remove_file_entries(srcuri, remove_files)[0] | 1638 | removedentries = _remove_file_entries(srcuri, remove_files)[0] |
1546 | update_srcuri = True | 1639 | update_srcuri = True |
@@ -1554,14 +1647,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1554 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) | 1647 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) |
1555 | if dry_run_outdir: | 1648 | if dry_run_outdir: |
1556 | logger.info('Creating bbappend (dry-run)') | 1649 | logger.info('Creating bbappend (dry-run)') |
1557 | else: | 1650 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1558 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1651 | rd, appendlayerdir, files, wildcardver=wildcard_version, |
1559 | rd, appendlayerdir, files, wildcardver=wildcard_version, | 1652 | extralines=patchfields, removevalues=removevalues, |
1560 | extralines=patchfields, removevalues=removevalues, | 1653 | redirect_output=dry_run_outdir) |
1561 | redirect_output=dry_run_outdir) | ||
1562 | else: | 1654 | else: |
1563 | files_dir = _determine_files_dir(rd) | 1655 | files_dir = _determine_files_dir(rd) |
1564 | for basepath, path in upd_f.items(): | 1656 | for basepath, param in upd_f.items(): |
1657 | path = param['path'] | ||
1565 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) | 1658 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) |
1566 | if os.path.isabs(basepath): | 1659 | if os.path.isabs(basepath): |
1567 | # Original file (probably with subdir pointing inside source tree) | 1660 | # Original file (probably with subdir pointing inside source tree) |
@@ -1571,7 +1664,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1571 | _move_file(os.path.join(local_files_dir, basepath), path, | 1664 | _move_file(os.path.join(local_files_dir, basepath), path, |
1572 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1665 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1573 | update_srcuri= True | 1666 | update_srcuri= True |
1574 | for basepath, path in new_f.items(): | 1667 | for basepath, param in new_f.items(): |
1668 | path = param['path'] | ||
1575 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1669 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1576 | _move_file(os.path.join(local_files_dir, basepath), | 1670 | _move_file(os.path.join(local_files_dir, basepath), |
1577 | os.path.join(files_dir, basepath), | 1671 | os.path.join(files_dir, basepath), |
@@ -1603,9 +1697,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1603 | if not os.path.exists(append): | 1697 | if not os.path.exists(append): |
1604 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % | 1698 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % |
1605 | recipename) | 1699 | recipename) |
1700 | srctreebase = workspace[recipename]['srctreebase'] | ||
1701 | relpatchdir = os.path.relpath(srctreebase, srctree) | ||
1702 | if relpatchdir == '.': | ||
1703 | patchdir_params = {} | ||
1704 | else: | ||
1705 | patchdir_params = {'patchdir': relpatchdir} | ||
1606 | 1706 | ||
1607 | initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) | 1707 | def srcuri_entry(basepath, patchdir_params): |
1608 | if not initial_rev: | 1708 | if patchdir_params: |
1709 | paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items()) | ||
1710 | else: | ||
1711 | paramstr = '' | ||
1712 | return 'file://%s%s' % (basepath, paramstr) | ||
1713 | |||
1714 | initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) | ||
1715 | if not initial_revs: | ||
1609 | raise DevtoolError('Unable to find initial revision - please specify ' | 1716 | raise DevtoolError('Unable to find initial revision - please specify ' |
1610 | 'it with --initial-rev') | 1717 | 'it with --initial-rev') |
1611 | 1718 | ||
@@ -1619,61 +1726,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1619 | tempdir = tempfile.mkdtemp(prefix='devtool') | 1726 | tempdir = tempfile.mkdtemp(prefix='devtool') |
1620 | try: | 1727 | try: |
1621 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1728 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1622 | if filter_patches: | 1729 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1623 | upd_f = {} | ||
1624 | new_f = {} | ||
1625 | del_f = {} | ||
1626 | else: | ||
1627 | srctreebase = workspace[recipename]['srctreebase'] | ||
1628 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | ||
1629 | |||
1630 | remove_files = [] | ||
1631 | if not no_remove: | ||
1632 | # Get all patches from source tree and check if any should be removed | ||
1633 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1634 | _, _, del_p = _export_patches(srctree, rd, initial_rev, | ||
1635 | all_patches_dir) | ||
1636 | # Remove deleted local files and patches | ||
1637 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1638 | 1730 | ||
1639 | # Get updated patches from source tree | 1731 | # Get updated patches from source tree |
1640 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1732 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1641 | upd_p, new_p, _ = _export_patches(srctree, rd, update_rev, | 1733 | upd_p, new_p, _ = _export_patches(srctree, rd, update_revs, |
1642 | patches_dir, changed_revs) | 1734 | patches_dir, changed_revs) |
1735 | # Get all patches from source tree and check if any should be removed | ||
1736 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1737 | _, _, del_p = _export_patches(srctree, rd, initial_revs, | ||
1738 | all_patches_dir) | ||
1643 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) | 1739 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) |
1644 | if filter_patches: | 1740 | if filter_patches: |
1645 | new_p = OrderedDict() | 1741 | new_p = OrderedDict() |
1646 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) | 1742 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) |
1647 | remove_files = [f for f in remove_files if f in filter_patches] | 1743 | del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches) |
1744 | remove_files = [] | ||
1745 | if not no_remove: | ||
1746 | # Remove deleted local files and patches | ||
1747 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1648 | updatefiles = False | 1748 | updatefiles = False |
1649 | updaterecipe = False | 1749 | updaterecipe = False |
1650 | destpath = None | 1750 | destpath = None |
1651 | srcuri = (rd.getVar('SRC_URI', False) or '').split() | 1751 | srcuri = (rd.getVar('SRC_URI', False) or '').split() |
1752 | |||
1652 | if appendlayerdir: | 1753 | if appendlayerdir: |
1653 | files = OrderedDict((os.path.join(local_files_dir, key), val) for | 1754 | files = OrderedDict((os.path.join(local_files_dir, key), val) for |
1654 | key, val in list(upd_f.items()) + list(new_f.items())) | 1755 | key, val in list(upd_f.items()) + list(new_f.items())) |
1655 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for | 1756 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for |
1656 | key, val in list(upd_p.items()) + list(new_p.items()))) | 1757 | key, val in list(upd_p.items()) + list(new_p.items()))) |
1758 | |||
1759 | params = [] | ||
1760 | for file, param in files.items(): | ||
1761 | patchdir_param = dict(patchdir_params) | ||
1762 | patchdir = param.get('patchdir', ".") | ||
1763 | if patchdir != "." : | ||
1764 | if patchdir_param: | ||
1765 | patchdir_param['patchdir'] += patchdir | ||
1766 | else: | ||
1767 | patchdir_param['patchdir'] = patchdir | ||
1768 | params.append(patchdir_param) | ||
1769 | |||
1657 | if files or remove_files: | 1770 | if files or remove_files: |
1658 | removevalues = None | 1771 | removevalues = None |
1659 | if remove_files: | 1772 | if remove_files: |
1660 | removedentries, remaining = _remove_file_entries( | 1773 | removedentries, remaining = _remove_file_entries( |
1661 | srcuri, remove_files) | 1774 | srcuri, remove_files) |
1662 | if removedentries or remaining: | 1775 | if removedentries or remaining: |
1663 | remaining = ['file://' + os.path.basename(item) for | 1776 | remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for |
1664 | item in remaining] | 1777 | item in remaining] |
1665 | removevalues = {'SRC_URI': removedentries + remaining} | 1778 | removevalues = {'SRC_URI': removedentries + remaining} |
1666 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1779 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1667 | rd, appendlayerdir, files, | 1780 | rd, appendlayerdir, files, |
1668 | wildcardver=wildcard_version, | 1781 | wildcardver=wildcard_version, |
1669 | removevalues=removevalues, | 1782 | removevalues=removevalues, |
1670 | redirect_output=dry_run_outdir) | 1783 | redirect_output=dry_run_outdir, |
1784 | params=params) | ||
1671 | else: | 1785 | else: |
1672 | logger.info('No patches or local source files needed updating') | 1786 | logger.info('No patches or local source files needed updating') |
1673 | else: | 1787 | else: |
1674 | # Update existing files | 1788 | # Update existing files |
1675 | files_dir = _determine_files_dir(rd) | 1789 | files_dir = _determine_files_dir(rd) |
1676 | for basepath, path in upd_f.items(): | 1790 | for basepath, param in upd_f.items(): |
1791 | path = param['path'] | ||
1677 | logger.info('Updating file %s' % basepath) | 1792 | logger.info('Updating file %s' % basepath) |
1678 | if os.path.isabs(basepath): | 1793 | if os.path.isabs(basepath): |
1679 | # Original file (probably with subdir pointing inside source tree) | 1794 | # Original file (probably with subdir pointing inside source tree) |
@@ -1684,14 +1799,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1684 | _move_file(os.path.join(local_files_dir, basepath), path, | 1799 | _move_file(os.path.join(local_files_dir, basepath), path, |
1685 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1800 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1686 | updatefiles = True | 1801 | updatefiles = True |
1687 | for basepath, path in upd_p.items(): | 1802 | for basepath, param in upd_p.items(): |
1688 | patchfn = os.path.join(patches_dir, basepath) | 1803 | path = param['path'] |
1804 | patchdir = param.get('patchdir', ".") | ||
1805 | if patchdir != "." : | ||
1806 | patchdir_param = dict(patchdir_params) | ||
1807 | if patchdir_param: | ||
1808 | patchdir_param['patchdir'] += patchdir | ||
1809 | else: | ||
1810 | patchdir_param['patchdir'] = patchdir | ||
1811 | patchfn = os.path.join(patches_dir, patchdir, basepath) | ||
1689 | if os.path.dirname(path) + '/' == dl_dir: | 1812 | if os.path.dirname(path) + '/' == dl_dir: |
1690 | # This is a a downloaded patch file - we now need to | 1813 | # This is a a downloaded patch file - we now need to |
1691 | # replace the entry in SRC_URI with our local version | 1814 | # replace the entry in SRC_URI with our local version |
1692 | logger.info('Replacing remote patch %s with updated local version' % basepath) | 1815 | logger.info('Replacing remote patch %s with updated local version' % basepath) |
1693 | path = os.path.join(files_dir, basepath) | 1816 | path = os.path.join(files_dir, basepath) |
1694 | _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) | 1817 | _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param)) |
1695 | updaterecipe = True | 1818 | updaterecipe = True |
1696 | else: | 1819 | else: |
1697 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) | 1820 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) |
@@ -1699,21 +1822,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1699 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1822 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1700 | updatefiles = True | 1823 | updatefiles = True |
1701 | # Add any new files | 1824 | # Add any new files |
1702 | for basepath, path in new_f.items(): | 1825 | for basepath, param in new_f.items(): |
1703 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1826 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1704 | _move_file(os.path.join(local_files_dir, basepath), | 1827 | _move_file(os.path.join(local_files_dir, basepath), |
1705 | os.path.join(files_dir, basepath), | 1828 | os.path.join(files_dir, basepath), |
1706 | dry_run_outdir=dry_run_outdir, | 1829 | dry_run_outdir=dry_run_outdir, |
1707 | base_outdir=recipedir) | 1830 | base_outdir=recipedir) |
1708 | srcuri.append('file://%s' % basepath) | 1831 | srcuri.append(srcuri_entry(basepath, patchdir_params)) |
1709 | updaterecipe = True | 1832 | updaterecipe = True |
1710 | for basepath, path in new_p.items(): | 1833 | for basepath, param in new_p.items(): |
1834 | patchdir = param.get('patchdir', ".") | ||
1711 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) | 1835 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) |
1712 | _move_file(os.path.join(patches_dir, basepath), | 1836 | _move_file(os.path.join(patches_dir, patchdir, basepath), |
1713 | os.path.join(files_dir, basepath), | 1837 | os.path.join(files_dir, basepath), |
1714 | dry_run_outdir=dry_run_outdir, | 1838 | dry_run_outdir=dry_run_outdir, |
1715 | base_outdir=recipedir) | 1839 | base_outdir=recipedir) |
1716 | srcuri.append('file://%s' % basepath) | 1840 | params = dict(patchdir_params) |
1841 | if patchdir != "." : | ||
1842 | if params: | ||
1843 | params['patchdir'] += patchdir | ||
1844 | else: | ||
1845 | params['patchdir'] = patchdir | ||
1846 | |||
1847 | srcuri.append(srcuri_entry(basepath, params)) | ||
1717 | updaterecipe = True | 1848 | updaterecipe = True |
1718 | # Update recipe, if needed | 1849 | # Update recipe, if needed |
1719 | if _remove_file_entries(srcuri, remove_files)[0]: | 1850 | if _remove_file_entries(srcuri, remove_files)[0]: |
@@ -1770,6 +1901,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver | |||
1770 | for line in stdout.splitlines(): | 1901 | for line in stdout.splitlines(): |
1771 | branchname = line[2:] | 1902 | branchname = line[2:] |
1772 | if line.startswith('* '): | 1903 | if line.startswith('* '): |
1904 | if 'HEAD' in line: | ||
1905 | raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"') | ||
1773 | startbranch = branchname | 1906 | startbranch = branchname |
1774 | if branchname.startswith(override_branch_prefix): | 1907 | if branchname.startswith(override_branch_prefix): |
1775 | override_branches.append(branchname) | 1908 | override_branches.append(branchname) |
@@ -1959,9 +2092,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1959 | shutil.rmtree(srctreebase) | 2092 | shutil.rmtree(srctreebase) |
1960 | else: | 2093 | else: |
1961 | # We don't want to risk wiping out any work in progress | 2094 | # We don't want to risk wiping out any work in progress |
1962 | logger.info('Leaving source tree %s as-is; if you no ' | 2095 | if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')): |
1963 | 'longer need it then please delete it manually' | 2096 | from datetime import datetime |
1964 | % srctreebase) | 2097 | preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S"))) |
2098 | logger.info('Preserving source tree in %s\nIf you no ' | ||
2099 | 'longer need it then please delete it manually.\n' | ||
2100 | 'It is also possible to reuse it via devtool source tree argument.' | ||
2101 | % preservesrc) | ||
2102 | bb.utils.mkdirhier(os.path.dirname(preservesrc)) | ||
2103 | shutil.move(srctreebase, preservesrc) | ||
2104 | else: | ||
2105 | logger.info('Leaving source tree %s as-is; if you no ' | ||
2106 | 'longer need it then please delete it manually' | ||
2107 | % srctreebase) | ||
1965 | else: | 2108 | else: |
1966 | # This is unlikely, but if it's empty we can just remove it | 2109 | # This is unlikely, but if it's empty we can just remove it |
1967 | os.rmdir(srctreebase) | 2110 | os.rmdir(srctreebase) |
@@ -2221,6 +2364,7 @@ def register_commands(subparsers, context): | |||
2221 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") | 2364 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") |
2222 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') | 2365 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') |
2223 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") | 2366 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") |
2367 | parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true") | ||
2224 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') | 2368 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') |
2225 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") | 2369 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") |
2226 | group = parser_add.add_mutually_exclusive_group() | 2370 | group = parser_add.add_mutually_exclusive_group() |
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index 5a057e95f5..fa5b8ef3c7 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -35,6 +35,8 @@ def _get_srctree(tmpdir): | |||
35 | dirs = scriptutils.filter_src_subdirs(tmpdir) | 35 | dirs = scriptutils.filter_src_subdirs(tmpdir) |
36 | if len(dirs) == 1: | 36 | if len(dirs) == 1: |
37 | srctree = os.path.join(tmpdir, dirs[0]) | 37 | srctree = os.path.join(tmpdir, dirs[0]) |
38 | else: | ||
39 | raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs)) | ||
38 | return srctree | 40 | return srctree |
39 | 41 | ||
40 | def _copy_source_code(orig, dest): | 42 | def _copy_source_code(orig, dest): |
@@ -71,7 +73,8 @@ def _rename_recipe_dirs(oldpv, newpv, path): | |||
71 | if oldfile.find(oldpv) != -1: | 73 | if oldfile.find(oldpv) != -1: |
72 | newfile = oldfile.replace(oldpv, newpv) | 74 | newfile = oldfile.replace(oldpv, newpv) |
73 | if oldfile != newfile: | 75 | if oldfile != newfile: |
74 | os.rename(os.path.join(path, oldfile), os.path.join(path, newfile)) | 76 | bb.utils.rename(os.path.join(path, oldfile), |
77 | os.path.join(path, newfile)) | ||
75 | 78 | ||
76 | def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): | 79 | def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): |
77 | oldrecipe = os.path.basename(oldrecipe) | 80 | oldrecipe = os.path.basename(oldrecipe) |
@@ -87,7 +90,7 @@ def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): | |||
87 | _rename_recipe_dirs(oldpv, newpv, path) | 90 | _rename_recipe_dirs(oldpv, newpv, path) |
88 | return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) | 91 | return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) |
89 | 92 | ||
90 | def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d): | 93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): |
91 | """Writes an append file""" | 94 | """Writes an append file""" |
92 | if not os.path.exists(rc): | 95 | if not os.path.exists(rc): |
93 | raise DevtoolError("bbappend not created because %s does not exist" % rc) | 96 | raise DevtoolError("bbappend not created because %s does not exist" % rc) |
@@ -102,36 +105,38 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d) | |||
102 | pn = d.getVar('PN') | 105 | pn = d.getVar('PN') |
103 | af = os.path.join(appendpath, '%s.bbappend' % brf) | 106 | af = os.path.join(appendpath, '%s.bbappend' % brf) |
104 | with open(af, 'w') as f: | 107 | with open(af, 'w') as f: |
105 | f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n') | 108 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n') |
109 | # Local files can be modified/tracked in separate subdir under srctree | ||
110 | # Mostly useful for packages with S != WORKDIR | ||
111 | f.write('FILESPATH:prepend := "%s:"\n' % | ||
112 | os.path.join(srctreebase, 'oe-local-files')) | ||
113 | f.write('# srctreebase: %s\n' % srctreebase) | ||
106 | f.write('inherit externalsrc\n') | 114 | f.write('inherit externalsrc\n') |
107 | f.write(('# NOTE: We use pn- overrides here to avoid affecting' | 115 | f.write(('# NOTE: We use pn- overrides here to avoid affecting' |
108 | 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) | 116 | 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) |
109 | f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) | 117 | f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree)) |
110 | b_is_s = use_external_build(same_dir, no_same_dir, d) | 118 | b_is_s = use_external_build(same_dir, no_same_dir, d) |
111 | if b_is_s: | 119 | if b_is_s: |
112 | f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) | 120 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
113 | f.write('\n') | 121 | f.write('\n') |
114 | if rev: | 122 | if revs: |
115 | f.write('# initial_rev: %s\n' % rev) | 123 | for name, rev in revs.items(): |
124 | f.write('# initial_rev %s: %s\n' % (name, rev)) | ||
116 | if copied: | 125 | if copied: |
117 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) | 126 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) |
118 | f.write('# original_files: %s\n' % ' '.join(copied)) | 127 | f.write('# original_files: %s\n' % ' '.join(copied)) |
119 | return af | 128 | return af |
120 | 129 | ||
121 | def _cleanup_on_error(rf, srctree): | 130 | def _cleanup_on_error(rd, srctree): |
122 | rfp = os.path.split(rf)[0] # recipe folder | 131 | if os.path.exists(rd): |
123 | rfpp = os.path.split(rfp)[0] # recipes folder | 132 | shutil.rmtree(rd) |
124 | if os.path.exists(rfp): | ||
125 | shutil.rmtree(rfp) | ||
126 | if not len(os.listdir(rfpp)): | ||
127 | os.rmdir(rfpp) | ||
128 | srctree = os.path.abspath(srctree) | 133 | srctree = os.path.abspath(srctree) |
129 | if os.path.exists(srctree): | 134 | if os.path.exists(srctree): |
130 | shutil.rmtree(srctree) | 135 | shutil.rmtree(srctree) |
131 | 136 | ||
132 | def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None): | 137 | def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None): |
133 | if rf and not keep_failure: | 138 | if not keep_failure: |
134 | _cleanup_on_error(rf, srctree) | 139 | _cleanup_on_error(rd, srctree) |
135 | logger.error(e) | 140 | logger.error(e) |
136 | if extramsg: | 141 | if extramsg: |
137 | logger.error(extramsg) | 142 | logger.error(extramsg) |
@@ -178,12 +183,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
178 | uri, rev = _get_uri(crd) | 183 | uri, rev = _get_uri(crd) |
179 | if srcrev: | 184 | if srcrev: |
180 | rev = srcrev | 185 | rev = srcrev |
186 | paths = [srctree] | ||
181 | if uri.startswith('git://') or uri.startswith('gitsm://'): | 187 | if uri.startswith('git://') or uri.startswith('gitsm://'): |
182 | __run('git fetch') | 188 | __run('git fetch') |
183 | __run('git checkout %s' % rev) | 189 | __run('git checkout %s' % rev) |
184 | __run('git tag -f devtool-base-new') | 190 | __run('git tag -f devtool-base-new') |
185 | md5 = None | 191 | __run('git submodule update --recursive') |
186 | sha256 = None | 192 | __run('git submodule foreach \'git tag -f devtool-base-new\'') |
193 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') | ||
194 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] | ||
195 | checksums = {} | ||
187 | _, _, _, _, _, params = bb.fetch2.decodeurl(uri) | 196 | _, _, _, _, _, params = bb.fetch2.decodeurl(uri) |
188 | srcsubdir_rel = params.get('destsuffix', 'git') | 197 | srcsubdir_rel = params.get('destsuffix', 'git') |
189 | if not srcbranch: | 198 | if not srcbranch: |
@@ -191,14 +200,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
191 | get_branch = [x.strip() for x in check_branch.splitlines()] | 200 | get_branch = [x.strip() for x in check_branch.splitlines()] |
192 | # Remove HEAD reference point and drop remote prefix | 201 | # Remove HEAD reference point and drop remote prefix |
193 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] | 202 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] |
194 | if 'master' in get_branch: | 203 | if len(get_branch) == 1: |
195 | # If it is master, we do not need to append 'branch=master' as this is default. | 204 | # If srcrev is on only ONE branch, then use that branch |
196 | # Even with the case where get_branch has multiple objects, if 'master' is one | ||
197 | # of them, we should default take from 'master' | ||
198 | srcbranch = '' | ||
199 | elif len(get_branch) == 1: | ||
200 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' | ||
201 | srcbranch = get_branch[0] | 205 | srcbranch = get_branch[0] |
206 | elif 'main' in get_branch: | ||
207 | # If srcrev is on multiple branches, then choose 'main' if it is one of them | ||
208 | srcbranch = 'main' | ||
209 | elif 'master' in get_branch: | ||
210 | # Otherwise choose 'master' if it is one of the branches | ||
211 | srcbranch = 'master' | ||
202 | else: | 212 | else: |
203 | # If get_branch contains more than one objects, then display error and exit. | 213 | # If get_branch contains more than one objects, then display error and exit. |
204 | mbrch = '\n ' + '\n '.join(get_branch) | 214 | mbrch = '\n ' + '\n '.join(get_branch) |
@@ -215,9 +225,6 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
215 | if ftmpdir and keep_temp: | 225 | if ftmpdir and keep_temp: |
216 | logger.info('Fetch temp directory is %s' % ftmpdir) | 226 | logger.info('Fetch temp directory is %s' % ftmpdir) |
217 | 227 | ||
218 | md5 = checksums['md5sum'] | ||
219 | sha256 = checksums['sha256sum'] | ||
220 | |||
221 | tmpsrctree = _get_srctree(tmpdir) | 228 | tmpsrctree = _get_srctree(tmpdir) |
222 | srctree = os.path.abspath(srctree) | 229 | srctree = os.path.abspath(srctree) |
223 | srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) | 230 | srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) |
@@ -251,30 +258,50 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
251 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) | 258 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) |
252 | __run('git tag -f devtool-base-%s' % newpv) | 259 | __run('git tag -f devtool-base-%s' % newpv) |
253 | 260 | ||
254 | (stdout, _) = __run('git rev-parse HEAD') | 261 | revs = {} |
255 | rev = stdout.rstrip() | 262 | for path in paths: |
263 | (stdout, _) = _run('git rev-parse HEAD', cwd=path) | ||
264 | revs[os.path.relpath(path, srctree)] = stdout.rstrip() | ||
256 | 265 | ||
257 | if no_patch: | 266 | if no_patch: |
258 | patches = oe.recipeutils.get_recipe_patches(crd) | 267 | patches = oe.recipeutils.get_recipe_patches(crd) |
259 | if patches: | 268 | if patches: |
260 | logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) | 269 | logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) |
261 | else: | 270 | else: |
262 | __run('git checkout devtool-patched -b %s' % branch) | 271 | for path in paths: |
263 | skiptag = False | 272 | _run('git checkout devtool-patched -b %s' % branch, cwd=path) |
264 | try: | 273 | (stdout, _) = _run('git branch --list devtool-override-*', cwd=path) |
265 | __run('git rebase %s' % rev) | 274 | branches_to_rebase = [branch] + stdout.split() |
266 | except bb.process.ExecutionError as e: | 275 | target_branch = revs[os.path.relpath(path, srctree)] |
267 | skiptag = True | 276 | |
268 | if 'conflict' in e.stdout: | 277 | # There is a bug (or feature?) in git rebase where if a commit with |
269 | logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) | 278 | # a note is fully rebased away by being part of an old commit, the |
270 | else: | 279 | # note is still attached to the old commit. Avoid this by making |
271 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) | 280 | # sure all old devtool related commits have a note attached to them |
272 | if not skiptag: | 281 | # (this assumes git config notes.rewriteMode is set to ignore). |
273 | if uri.startswith('git://') or uri.startswith('gitsm://'): | 282 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) |
274 | suffix = 'new' | 283 | for rev in stdout.splitlines(): |
275 | else: | 284 | if not oe.patch.GitApplyTree.getNotes(path, rev): |
276 | suffix = newpv | 285 | oe.patch.GitApplyTree.addNote(path, rev, "dummy") |
277 | __run('git tag -f devtool-patched-%s' % suffix) | 286 | |
287 | for b in branches_to_rebase: | ||
288 | logger.info("Rebasing {} onto {}".format(b, target_branch)) | ||
289 | _run('git checkout %s' % b, cwd=path) | ||
290 | try: | ||
291 | _run('git rebase %s' % target_branch, cwd=path) | ||
292 | except bb.process.ExecutionError as e: | ||
293 | if 'conflict' in e.stdout: | ||
294 | logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) | ||
295 | _run('git rebase --abort', cwd=path) | ||
296 | else: | ||
297 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) | ||
298 | |||
299 | # Remove any dummy notes added above. | ||
300 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) | ||
301 | for rev in stdout.splitlines(): | ||
302 | oe.patch.GitApplyTree.removeNote(path, rev, "dummy") | ||
303 | |||
304 | _run('git checkout %s' % branch, cwd=path) | ||
278 | 305 | ||
279 | if tmpsrctree: | 306 | if tmpsrctree: |
280 | if keep_temp: | 307 | if keep_temp: |
@@ -284,7 +311,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
284 | if tmpdir != tmpsrctree: | 311 | if tmpdir != tmpsrctree: |
285 | shutil.rmtree(tmpdir) | 312 | shutil.rmtree(tmpdir) |
286 | 313 | ||
287 | return (rev, md5, sha256, srcbranch, srcsubdir_rel) | 314 | return (revs, checksums, srcbranch, srcsubdir_rel) |
288 | 315 | ||
289 | def _add_license_diff_to_recipe(path, diff): | 316 | def _add_license_diff_to_recipe(path, diff): |
290 | notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. | 317 | notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. |
@@ -305,7 +332,7 @@ def _add_license_diff_to_recipe(path, diff): | |||
305 | f.write("\n#\n\n".encode()) | 332 | f.write("\n#\n\n".encode()) |
306 | f.write(orig_content) | 333 | f.write(orig_content) |
307 | 334 | ||
308 | def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): | 335 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): |
309 | """Creates the new recipe under workspace""" | 336 | """Creates the new recipe under workspace""" |
310 | 337 | ||
311 | bpn = rd.getVar('BPN') | 338 | bpn = rd.getVar('BPN') |
@@ -336,7 +363,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
336 | replacing = True | 363 | replacing = True |
337 | new_src_uri = [] | 364 | new_src_uri = [] |
338 | for entry in src_uri: | 365 | for entry in src_uri: |
339 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) | 366 | try: |
367 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) | ||
368 | except bb.fetch2.MalformedUrl as e: | ||
369 | raise DevtoolError("Could not decode SRC_URI: {}".format(e)) | ||
340 | if replacing and scheme in ['git', 'gitsm']: | 370 | if replacing and scheme in ['git', 'gitsm']: |
341 | branch = params.get('branch', 'master') | 371 | branch = params.get('branch', 'master') |
342 | if rd.expand(branch) != srcbranch: | 372 | if rd.expand(branch) != srcbranch: |
@@ -374,30 +404,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
374 | addnames.append(params['name']) | 404 | addnames.append(params['name']) |
375 | # Find what's been set in the original recipe | 405 | # Find what's been set in the original recipe |
376 | oldnames = [] | 406 | oldnames = [] |
407 | oldsums = [] | ||
377 | noname = False | 408 | noname = False |
378 | for varflag in rd.getVarFlags('SRC_URI'): | 409 | for varflag in rd.getVarFlags('SRC_URI'): |
379 | if varflag.endswith(('.md5sum', '.sha256sum')): | 410 | for checksum in checksums: |
380 | name = varflag.rsplit('.', 1)[0] | 411 | if varflag.endswith('.' + checksum): |
381 | if name not in oldnames: | 412 | name = varflag.rsplit('.', 1)[0] |
382 | oldnames.append(name) | 413 | if name not in oldnames: |
383 | elif varflag in ['md5sum', 'sha256sum']: | 414 | oldnames.append(name) |
384 | noname = True | 415 | oldsums.append(checksum) |
416 | elif varflag == checksum: | ||
417 | noname = True | ||
418 | oldsums.append(checksum) | ||
385 | # Even if SRC_URI has named entries it doesn't have to actually use the name | 419 | # Even if SRC_URI has named entries it doesn't have to actually use the name |
386 | if noname and addnames and addnames[0] not in oldnames: | 420 | if noname and addnames and addnames[0] not in oldnames: |
387 | addnames = [] | 421 | addnames = [] |
388 | # Drop any old names (the name actually might include ${PV}) | 422 | # Drop any old names (the name actually might include ${PV}) |
389 | for name in oldnames: | 423 | for name in oldnames: |
390 | if name not in newnames: | 424 | if name not in newnames: |
391 | newvalues['SRC_URI[%s.md5sum]' % name] = None | 425 | for checksum in oldsums: |
392 | newvalues['SRC_URI[%s.sha256sum]' % name] = None | 426 | newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None |
393 | 427 | ||
394 | if sha256: | 428 | nameprefix = '%s.' % addnames[0] if addnames else '' |
395 | if addnames: | 429 | |
396 | nameprefix = '%s.' % addnames[0] | 430 | # md5sum is deprecated, remove any traces of it. If it was the only old |
397 | else: | 431 | # checksum, then replace it with the default checksums. |
398 | nameprefix = '' | 432 | if 'md5sum' in oldsums: |
399 | newvalues['SRC_URI[%smd5sum]' % nameprefix] = None | 433 | newvalues['SRC_URI[%smd5sum]' % nameprefix] = None |
400 | newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256 | 434 | oldsums.remove('md5sum') |
435 | if not oldsums: | ||
436 | oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] | ||
437 | |||
438 | for checksum in oldsums: | ||
439 | newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum] | ||
401 | 440 | ||
402 | if srcsubdir_new != srcsubdir_old: | 441 | if srcsubdir_new != srcsubdir_old: |
403 | s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) | 442 | s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) |
@@ -422,10 +461,11 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
422 | newvalues["LIC_FILES_CHKSUM"] = newlicchksum | 461 | newvalues["LIC_FILES_CHKSUM"] = newlicchksum |
423 | _add_license_diff_to_recipe(fullpath, license_diff) | 462 | _add_license_diff_to_recipe(fullpath, license_diff) |
424 | 463 | ||
464 | tinfoil.modified_files() | ||
425 | try: | 465 | try: |
426 | rd = tinfoil.parse_recipe_file(fullpath, False) | 466 | rd = tinfoil.parse_recipe_file(fullpath, False) |
427 | except bb.tinfoil.TinfoilCommandFailed as e: | 467 | except bb.tinfoil.TinfoilCommandFailed as e: |
428 | _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed') | 468 | _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed') |
429 | oe.recipeutils.patch_recipe(rd, fullpath, newvalues) | 469 | oe.recipeutils.patch_recipe(rd, fullpath, newvalues) |
430 | 470 | ||
431 | return fullpath, copied | 471 | return fullpath, copied |
@@ -434,7 +474,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
434 | def _check_git_config(): | 474 | def _check_git_config(): |
435 | def getconfig(name): | 475 | def getconfig(name): |
436 | try: | 476 | try: |
437 | value = bb.process.run('git config --global %s' % name)[0].strip() | 477 | value = bb.process.run('git config %s' % name)[0].strip() |
438 | except bb.process.ExecutionError as e: | 478 | except bb.process.ExecutionError as e: |
439 | if e.exitcode == 1: | 479 | if e.exitcode == 1: |
440 | value = None | 480 | value = None |
@@ -521,6 +561,8 @@ def upgrade(args, config, basepath, workspace): | |||
521 | else: | 561 | else: |
522 | srctree = standard.get_default_srctree(config, pn) | 562 | srctree = standard.get_default_srctree(config, pn) |
523 | 563 | ||
564 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | ||
565 | |||
524 | # try to automatically discover latest version and revision if not provided on command line | 566 | # try to automatically discover latest version and revision if not provided on command line |
525 | if not args.version and not args.srcrev: | 567 | if not args.version and not args.srcrev: |
526 | version_info = oe.recipeutils.get_recipe_upstream_version(rd) | 568 | version_info = oe.recipeutils.get_recipe_upstream_version(rd) |
@@ -550,21 +592,20 @@ def upgrade(args, config, basepath, workspace): | |||
550 | try: | 592 | try: |
551 | logger.info('Extracting current version source...') | 593 | logger.info('Extracting current version source...') |
552 | rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | 594 | rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) |
553 | old_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or "")) | 595 | old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) |
554 | logger.info('Extracting upgraded version source...') | 596 | logger.info('Extracting upgraded version source...') |
555 | rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, | 597 | rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, |
556 | args.srcrev, args.srcbranch, args.branch, args.keep_temp, | 598 | args.srcrev, args.srcbranch, args.branch, args.keep_temp, |
557 | tinfoil, rd) | 599 | tinfoil, rd) |
558 | new_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or "")) | 600 | new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) |
559 | license_diff = _generate_license_diff(old_licenses, new_licenses) | 601 | license_diff = _generate_license_diff(old_licenses, new_licenses) |
560 | rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) | 602 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) |
561 | except bb.process.CmdError as e: | 603 | except (bb.process.CmdError, DevtoolError) as e: |
562 | _upgrade_error(e, rf, srctree, args.keep_failure) | 604 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN')) |
563 | except DevtoolError as e: | 605 | _upgrade_error(e, recipedir, srctree, args.keep_failure) |
564 | _upgrade_error(e, rf, srctree, args.keep_failure) | ||
565 | standard._add_md5(config, pn, os.path.dirname(rf)) | 606 | standard._add_md5(config, pn, os.path.dirname(rf)) |
566 | 607 | ||
567 | af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2, | 608 | af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2, |
568 | copied, config.workspace_path, rd) | 609 | copied, config.workspace_path, rd) |
569 | standard._add_md5(config, pn, af) | 610 | standard._add_md5(config, pn, af) |
570 | 611 | ||
@@ -574,6 +615,9 @@ def upgrade(args, config, basepath, workspace): | |||
574 | logger.info('New recipe is %s' % rf) | 615 | logger.info('New recipe is %s' % rf) |
575 | if license_diff: | 616 | if license_diff: |
576 | logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.') | 617 | logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.') |
618 | preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN')) | ||
619 | if preferred_version: | ||
620 | logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version) | ||
577 | finally: | 621 | finally: |
578 | tinfoil.shutdown() | 622 | tinfoil.shutdown() |
579 | return 0 | 623 | return 0 |
@@ -605,7 +649,7 @@ def check_upgrade_status(args, config, basepath, workspace): | |||
605 | for result in results: | 649 | for result in results: |
606 | # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason | 650 | # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason |
607 | if args.all or result[1] != 'MATCH': | 651 | if args.all or result[1] != 'MATCH': |
608 | logger.info("{:25} {:15} {:15} {} {} {}".format( result[0], | 652 | print("{:25} {:15} {:15} {} {} {}".format( result[0], |
609 | result[2], | 653 | result[2], |
610 | result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), | 654 | result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), |
611 | result[4], | 655 | result[4], |