diff options
Diffstat (limited to 'scripts/lib/devtool')
-rw-r--r-- | scripts/lib/devtool/__init__.py | 29 | ||||
-rw-r--r-- | scripts/lib/devtool/build.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/build_image.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/build_sdk.py | 9 | ||||
-rw-r--r-- | scripts/lib/devtool/deploy.py | 240 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/__init__.py | 282 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/ide_code.py | 462 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/ide_none.py | 53 | ||||
-rwxr-xr-x | scripts/lib/devtool/ide_sdk.py | 1009 | ||||
-rw-r--r-- | scripts/lib/devtool/menuconfig.py | 11 | ||||
-rw-r--r-- | scripts/lib/devtool/sdk.py | 5 | ||||
-rw-r--r-- | scripts/lib/devtool/search.py | 5 | ||||
-rw-r--r-- | scripts/lib/devtool/standard.py | 780 | ||||
-rw-r--r-- | scripts/lib/devtool/upgrade.py | 256 | ||||
-rw-r--r-- | scripts/lib/devtool/utilcmds.py | 2 |
15 files changed, 2560 insertions, 587 deletions
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py index 702db669de..fa6e1a34fd 100644 --- a/scripts/lib/devtool/__init__.py +++ b/scripts/lib/devtool/__init__.py | |||
@@ -78,12 +78,15 @@ def exec_fakeroot(d, cmd, **kwargs): | |||
78 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" | 78 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" |
79 | # Grab the command and check it actually exists | 79 | # Grab the command and check it actually exists |
80 | fakerootcmd = d.getVar('FAKEROOTCMD') | 80 | fakerootcmd = d.getVar('FAKEROOTCMD') |
81 | fakerootenv = d.getVar('FAKEROOTENV') | ||
82 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs) | ||
83 | |||
84 | def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs): | ||
81 | if not os.path.exists(fakerootcmd): | 85 | if not os.path.exists(fakerootcmd): |
82 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') | 86 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') |
83 | return 2 | 87 | return 2 |
84 | # Set up the appropriate environment | 88 | # Set up the appropriate environment |
85 | newenv = dict(os.environ) | 89 | newenv = dict(os.environ) |
86 | fakerootenv = d.getVar('FAKEROOTENV') | ||
87 | for varvalue in fakerootenv.split(): | 90 | for varvalue in fakerootenv.split(): |
88 | if '=' in varvalue: | 91 | if '=' in varvalue: |
89 | splitval = varvalue.split('=', 1) | 92 | splitval = varvalue.split('=', 1) |
@@ -231,7 +234,29 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): | |||
231 | f.write(line) | 234 | f.write(line) |
232 | 235 | ||
233 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) | 236 | bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) |
234 | bb.process.run('git tag -f %s' % basetag, cwd=repodir) | 237 | bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir) |
238 | |||
239 | # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, | ||
240 | # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe | ||
241 | stdout, _ = bb.process.run("git status --porcelain", cwd=repodir) | ||
242 | found = False | ||
243 | for line in stdout.splitlines(): | ||
244 | if line.endswith("/"): | ||
245 | new_dir = line.split()[1] | ||
246 | for root, dirs, files in os.walk(os.path.join(repodir, new_dir)): | ||
247 | if ".git" in dirs + files: | ||
248 | (stdout, _) = bb.process.run('git remote', cwd=root) | ||
249 | remote = stdout.splitlines()[0] | ||
250 | (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root) | ||
251 | remote_url = stdout.splitlines()[0] | ||
252 | logger.error(os.path.relpath(os.path.join(root, ".."), root)) | ||
253 | bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, "..")) | ||
254 | found = True | ||
255 | if found: | ||
256 | oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) | ||
257 | found = False | ||
258 | if os.path.exists(os.path.join(repodir, '.gitmodules')): | ||
259 | bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir) | ||
235 | 260 | ||
236 | def recipe_to_append(recipefile, config, wildcard=False): | 261 | def recipe_to_append(recipefile, config, wildcard=False): |
237 | """ | 262 | """ |
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py index 935ffab46c..0b2c3d33dc 100644 --- a/scripts/lib/devtool/build.py +++ b/scripts/lib/devtool/build.py | |||
@@ -49,7 +49,7 @@ def build(args, config, basepath, workspace): | |||
49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) | 49 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) |
50 | if not rd: | 50 | if not rd: |
51 | return 1 | 51 | return 1 |
52 | deploytask = 'do_deploy' in rd.getVar('__BBTASKS') | 52 | deploytask = 'do_deploy' in bb.build.listtasks(rd) |
53 | finally: | 53 | finally: |
54 | tinfoil.shutdown() | 54 | tinfoil.shutdown() |
55 | 55 | ||
diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py index 9388abbacf..980f90ddd6 100644 --- a/scripts/lib/devtool/build_image.py +++ b/scripts/lib/devtool/build_image.py | |||
@@ -113,7 +113,7 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task | |||
113 | with open(appendfile, 'w') as afile: | 113 | with open(appendfile, 'w') as afile: |
114 | if packages: | 114 | if packages: |
115 | # include packages from workspace recipes into the image | 115 | # include packages from workspace recipes into the image |
116 | afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages)) | 116 | afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages)) |
117 | if not task: | 117 | if not task: |
118 | logger.info('Building image %s with the following ' | 118 | logger.info('Building image %s with the following ' |
119 | 'additional packages: %s', image, ' '.join(packages)) | 119 | 'additional packages: %s', image, ' '.join(packages)) |
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py index 6fe02fff2a..990303982c 100644 --- a/scripts/lib/devtool/build_sdk.py +++ b/scripts/lib/devtool/build_sdk.py | |||
@@ -5,15 +5,8 @@ | |||
5 | # SPDX-License-Identifier: GPL-2.0-only | 5 | # SPDX-License-Identifier: GPL-2.0-only |
6 | # | 6 | # |
7 | 7 | ||
8 | import os | ||
9 | import subprocess | ||
10 | import logging | 8 | import logging |
11 | import glob | 9 | from devtool import DevtoolError |
12 | import shutil | ||
13 | import errno | ||
14 | import sys | ||
15 | import tempfile | ||
16 | from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError | ||
17 | from devtool import build_image | 10 | from devtool import build_image |
18 | 11 | ||
19 | logger = logging.getLogger('devtool') | 12 | logger = logging.getLogger('devtool') |
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py index e5af2c95ae..b5ca8f2c2f 100644 --- a/scripts/lib/devtool/deploy.py +++ b/scripts/lib/devtool/deploy.py | |||
@@ -16,7 +16,7 @@ import bb.utils | |||
16 | import argparse_oe | 16 | import argparse_oe |
17 | import oe.types | 17 | import oe.types |
18 | 18 | ||
19 | from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError | 19 | from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError |
20 | 20 | ||
21 | logger = logging.getLogger('devtool') | 21 | logger = logging.getLogger('devtool') |
22 | 22 | ||
@@ -133,16 +133,38 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals | |||
133 | 133 | ||
134 | return '\n'.join(lines) | 134 | return '\n'.join(lines) |
135 | 135 | ||
136 | |||
137 | |||
138 | def deploy(args, config, basepath, workspace): | 136 | def deploy(args, config, basepath, workspace): |
139 | """Entry point for the devtool 'deploy' subcommand""" | 137 | """Entry point for the devtool 'deploy' subcommand""" |
140 | import math | 138 | import oe.utils |
141 | import oe.recipeutils | ||
142 | import oe.package | ||
143 | 139 | ||
144 | check_workspace_recipe(workspace, args.recipename, checksrc=False) | 140 | check_workspace_recipe(workspace, args.recipename, checksrc=False) |
145 | 141 | ||
142 | tinfoil = setup_tinfoil(basepath=basepath) | ||
143 | try: | ||
144 | try: | ||
145 | rd = tinfoil.parse_recipe(args.recipename) | ||
146 | except Exception as e: | ||
147 | raise DevtoolError('Exception parsing recipe %s: %s' % | ||
148 | (args.recipename, e)) | ||
149 | |||
150 | srcdir = rd.getVar('D') | ||
151 | workdir = rd.getVar('WORKDIR') | ||
152 | path = rd.getVar('PATH') | ||
153 | strip_cmd = rd.getVar('STRIP') | ||
154 | libdir = rd.getVar('libdir') | ||
155 | base_libdir = rd.getVar('base_libdir') | ||
156 | max_process = oe.utils.get_bb_number_threads(rd) | ||
157 | fakerootcmd = rd.getVar('FAKEROOTCMD') | ||
158 | fakerootenv = rd.getVar('FAKEROOTENV') | ||
159 | finally: | ||
160 | tinfoil.shutdown() | ||
161 | |||
162 | return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args) | ||
163 | |||
164 | def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args): | ||
165 | import math | ||
166 | import oe.package | ||
167 | |||
146 | try: | 168 | try: |
147 | host, destdir = args.target.split(':') | 169 | host, destdir = args.target.split(':') |
148 | except ValueError: | 170 | except ValueError: |
@@ -152,118 +174,108 @@ def deploy(args, config, basepath, workspace): | |||
152 | if not destdir.endswith('/'): | 174 | if not destdir.endswith('/'): |
153 | destdir += '/' | 175 | destdir += '/' |
154 | 176 | ||
155 | tinfoil = setup_tinfoil(basepath=basepath) | 177 | recipe_outdir = srcdir |
156 | try: | 178 | if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): |
157 | try: | 179 | raise DevtoolError('No files to deploy - have you built the %s ' |
158 | rd = tinfoil.parse_recipe(args.recipename) | 180 | 'recipe? If so, the install step has not installed ' |
159 | except Exception as e: | 181 | 'any files.' % args.recipename) |
160 | raise DevtoolError('Exception parsing recipe %s: %s' % | 182 | |
161 | (args.recipename, e)) | 183 | if args.strip and not args.dry_run: |
162 | recipe_outdir = rd.getVar('D') | 184 | # Fakeroot copy to new destination |
163 | if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): | 185 | srcdir = recipe_outdir |
164 | raise DevtoolError('No files to deploy - have you built the %s ' | 186 | recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped') |
165 | 'recipe? If so, the install step has not installed ' | 187 | if os.path.isdir(recipe_outdir): |
166 | 'any files.' % args.recipename) | 188 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True) |
167 | 189 | exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) | |
168 | if args.strip and not args.dry_run: | 190 | os.environ['PATH'] = ':'.join([os.environ['PATH'], path or '']) |
169 | # Fakeroot copy to new destination | 191 | oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process) |
170 | srcdir = recipe_outdir | 192 | |
171 | recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped') | 193 | filelist = [] |
172 | if os.path.isdir(recipe_outdir): | 194 | inodes = set({}) |
173 | bb.utils.remove(recipe_outdir, True) | 195 | ftotalsize = 0 |
174 | exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) | 196 | for root, _, files in os.walk(recipe_outdir): |
175 | os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) | 197 | for fn in files: |
176 | oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), | 198 | fstat = os.lstat(os.path.join(root, fn)) |
177 | rd.getVar('base_libdir'), rd) | 199 | # Get the size in kiB (since we'll be comparing it to the output of du -k) |
178 | 200 | # MUST use lstat() here not stat() or getfilesize() since we don't want to | |
179 | filelist = [] | 201 | # dereference symlinks |
180 | inodes = set({}) | 202 | if fstat.st_ino in inodes: |
181 | ftotalsize = 0 | 203 | fsize = 0 |
182 | for root, _, files in os.walk(recipe_outdir): | 204 | else: |
183 | for fn in files: | 205 | fsize = int(math.ceil(float(fstat.st_size)/1024)) |
184 | fstat = os.lstat(os.path.join(root, fn)) | 206 | inodes.add(fstat.st_ino) |
185 | # Get the size in kiB (since we'll be comparing it to the output of du -k) | 207 | ftotalsize += fsize |
186 | # MUST use lstat() here not stat() or getfilesize() since we don't want to | 208 | # The path as it would appear on the target |
187 | # dereference symlinks | 209 | fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn) |
188 | if fstat.st_ino in inodes: | 210 | filelist.append((fpath, fsize)) |
189 | fsize = 0 | 211 | |
190 | else: | 212 | if args.dry_run: |
191 | fsize = int(math.ceil(float(fstat.st_size)/1024)) | 213 | print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) |
192 | inodes.add(fstat.st_ino) | 214 | for item, _ in filelist: |
193 | ftotalsize += fsize | 215 | print(' %s' % item) |
194 | # The path as it would appear on the target | 216 | return 0 |
195 | fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn) | ||
196 | filelist.append((fpath, fsize)) | ||
197 | |||
198 | if args.dry_run: | ||
199 | print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) | ||
200 | for item, _ in filelist: | ||
201 | print(' %s' % item) | ||
202 | return 0 | ||
203 | |||
204 | extraoptions = '' | ||
205 | if args.no_host_check: | ||
206 | extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | ||
207 | if not args.show_status: | ||
208 | extraoptions += ' -q' | ||
209 | |||
210 | scp_sshexec = '' | ||
211 | ssh_sshexec = 'ssh' | ||
212 | if args.ssh_exec: | ||
213 | scp_sshexec = "-S %s" % args.ssh_exec | ||
214 | ssh_sshexec = args.ssh_exec | ||
215 | scp_port = '' | ||
216 | ssh_port = '' | ||
217 | if args.port: | ||
218 | scp_port = "-P %s" % args.port | ||
219 | ssh_port = "-p %s" % args.port | ||
220 | |||
221 | if args.key: | ||
222 | extraoptions += ' -i %s' % args.key | ||
223 | |||
224 | # In order to delete previously deployed files and have the manifest file on | ||
225 | # the target, we write out a shell script and then copy it to the target | ||
226 | # so we can then run it (piping tar output to it). | ||
227 | # (We cannot use scp here, because it doesn't preserve symlinks.) | ||
228 | tmpdir = tempfile.mkdtemp(prefix='devtool') | ||
229 | try: | ||
230 | tmpscript = '/tmp/devtool_deploy.sh' | ||
231 | tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list') | ||
232 | shellscript = _prepare_remote_script(deploy=True, | ||
233 | verbose=args.show_status, | ||
234 | nopreserve=args.no_preserve, | ||
235 | nocheckspace=args.no_check_space) | ||
236 | # Write out the script to a file | ||
237 | with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: | ||
238 | f.write(shellscript) | ||
239 | # Write out the file list | ||
240 | with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f: | ||
241 | f.write('%d\n' % ftotalsize) | ||
242 | for fpath, fsize in filelist: | ||
243 | f.write('%s %d\n' % (fpath, fsize)) | ||
244 | # Copy them to the target | ||
245 | ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) | ||
246 | if ret != 0: | ||
247 | raise DevtoolError('Failed to copy script to %s - rerun with -s to ' | ||
248 | 'get a complete error message' % args.target) | ||
249 | finally: | ||
250 | shutil.rmtree(tmpdir) | ||
251 | 217 | ||
252 | # Now run the script | 218 | extraoptions = '' |
253 | ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) | 219 | if args.no_host_check: |
254 | if ret != 0: | 220 | extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' |
255 | raise DevtoolError('Deploy failed - rerun with -s to get a complete ' | 221 | if not args.show_status: |
256 | 'error message') | 222 | extraoptions += ' -q' |
257 | 223 | ||
258 | logger.info('Successfully deployed %s' % recipe_outdir) | 224 | scp_sshexec = '' |
225 | ssh_sshexec = 'ssh' | ||
226 | if args.ssh_exec: | ||
227 | scp_sshexec = "-S %s" % args.ssh_exec | ||
228 | ssh_sshexec = args.ssh_exec | ||
229 | scp_port = '' | ||
230 | ssh_port = '' | ||
231 | if args.port: | ||
232 | scp_port = "-P %s" % args.port | ||
233 | ssh_port = "-p %s" % args.port | ||
234 | |||
235 | if args.key: | ||
236 | extraoptions += ' -i %s' % args.key | ||
259 | 237 | ||
260 | files_list = [] | 238 | # In order to delete previously deployed files and have the manifest file on |
261 | for root, _, files in os.walk(recipe_outdir): | 239 | # the target, we write out a shell script and then copy it to the target |
262 | for filename in files: | 240 | # so we can then run it (piping tar output to it). |
263 | filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) | 241 | # (We cannot use scp here, because it doesn't preserve symlinks.) |
264 | files_list.append(os.path.join(destdir, filename)) | 242 | tmpdir = tempfile.mkdtemp(prefix='devtool') |
243 | try: | ||
244 | tmpscript = '/tmp/devtool_deploy.sh' | ||
245 | tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list') | ||
246 | shellscript = _prepare_remote_script(deploy=True, | ||
247 | verbose=args.show_status, | ||
248 | nopreserve=args.no_preserve, | ||
249 | nocheckspace=args.no_check_space) | ||
250 | # Write out the script to a file | ||
251 | with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: | ||
252 | f.write(shellscript) | ||
253 | # Write out the file list | ||
254 | with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f: | ||
255 | f.write('%d\n' % ftotalsize) | ||
256 | for fpath, fsize in filelist: | ||
257 | f.write('%s %d\n' % (fpath, fsize)) | ||
258 | # Copy them to the target | ||
259 | ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) | ||
260 | if ret != 0: | ||
261 | raise DevtoolError('Failed to copy script to %s - rerun with -s to ' | ||
262 | 'get a complete error message' % args.target) | ||
265 | finally: | 263 | finally: |
266 | tinfoil.shutdown() | 264 | shutil.rmtree(tmpdir) |
265 | |||
266 | # Now run the script | ||
267 | ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) | ||
268 | if ret != 0: | ||
269 | raise DevtoolError('Deploy failed - rerun with -s to get a complete ' | ||
270 | 'error message') | ||
271 | |||
272 | logger.info('Successfully deployed %s' % recipe_outdir) | ||
273 | |||
274 | files_list = [] | ||
275 | for root, _, files in os.walk(recipe_outdir): | ||
276 | for filename in files: | ||
277 | filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) | ||
278 | files_list.append(os.path.join(destdir, filename)) | ||
267 | 279 | ||
268 | return 0 | 280 | return 0 |
269 | 281 | ||
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py new file mode 100644 index 0000000000..19c2f61c5f --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/__init__.py | |||
@@ -0,0 +1,282 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk IDE plugin interface definition and helper functions""" | ||
7 | |||
8 | import errno | ||
9 | import json | ||
10 | import logging | ||
11 | import os | ||
12 | import stat | ||
13 | from enum import Enum, auto | ||
14 | from devtool import DevtoolError | ||
15 | from bb.utils import mkdirhier | ||
16 | |||
17 | logger = logging.getLogger('devtool') | ||
18 | |||
19 | |||
20 | class BuildTool(Enum): | ||
21 | UNDEFINED = auto() | ||
22 | CMAKE = auto() | ||
23 | MESON = auto() | ||
24 | |||
25 | @property | ||
26 | def is_c_ccp(self): | ||
27 | if self is BuildTool.CMAKE: | ||
28 | return True | ||
29 | if self is BuildTool.MESON: | ||
30 | return True | ||
31 | return False | ||
32 | |||
33 | |||
34 | class GdbCrossConfig: | ||
35 | """Base class defining the GDB configuration generator interface | ||
36 | |||
37 | Generate a GDB configuration for a binary on the target device. | ||
38 | Only one instance per binary is allowed. This allows to assign unique port | ||
39 | numbers for all gdbserver instances. | ||
40 | """ | ||
41 | _gdbserver_port_next = 1234 | ||
42 | _binaries = [] | ||
43 | |||
44 | def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True): | ||
45 | self.image_recipe = image_recipe | ||
46 | self.modified_recipe = modified_recipe | ||
47 | self.gdb_cross = modified_recipe.gdb_cross | ||
48 | self.binary = binary | ||
49 | if binary in GdbCrossConfig._binaries: | ||
50 | raise DevtoolError( | ||
51 | "gdbserver config for binary %s is already generated" % binary) | ||
52 | GdbCrossConfig._binaries.append(binary) | ||
53 | self.script_dir = modified_recipe.ide_sdk_scripts_dir | ||
54 | self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit') | ||
55 | self.gdbserver_multi = gdbserver_multi | ||
56 | self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-') | ||
57 | self.gdbserver_port = GdbCrossConfig._gdbserver_port_next | ||
58 | GdbCrossConfig._gdbserver_port_next += 1 | ||
59 | self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty) | ||
60 | # gdbserver start script | ||
61 | gdbserver_script_file = 'gdbserver_' + self.id_pretty | ||
62 | if self.gdbserver_multi: | ||
63 | gdbserver_script_file += "_m" | ||
64 | self.gdbserver_script = os.path.join( | ||
65 | self.script_dir, gdbserver_script_file) | ||
66 | # gdbinit file | ||
67 | self.gdbinit = os.path.join( | ||
68 | self.gdbinit_dir, 'gdbinit_' + self.id_pretty) | ||
69 | # gdb start script | ||
70 | self.gdb_script = os.path.join( | ||
71 | self.script_dir, 'gdb_' + self.id_pretty) | ||
72 | |||
73 | def _gen_gdbserver_start_script(self): | ||
74 | """Generate a shell command starting the gdbserver on the remote device via ssh | ||
75 | |||
76 | GDB supports two modes: | ||
77 | multi: gdbserver remains running over several debug sessions | ||
78 | once: gdbserver terminates after the debugged process terminates | ||
79 | """ | ||
80 | cmd_lines = ['#!/bin/sh'] | ||
81 | if self.gdbserver_multi: | ||
82 | temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty | ||
83 | gdbserver_cmd_start = temp_dir | ||
84 | gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; " | ||
85 | gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; " | ||
86 | gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % ( | ||
87 | self.gdb_cross.gdbserver_path, self.gdbserver_port) | ||
88 | gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;" | ||
89 | |||
90 | gdbserver_cmd_stop = temp_dir | ||
91 | gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); " | ||
92 | gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; " | ||
93 | |||
94 | gdbserver_cmd_l = [] | ||
95 | gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then') | ||
96 | gdbserver_cmd_l.append(' shift') | ||
97 | gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % ( | ||
98 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop)) | ||
99 | gdbserver_cmd_l.append('else') | ||
100 | gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % ( | ||
101 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)) | ||
102 | gdbserver_cmd_l.append('fi') | ||
103 | gdbserver_cmd = os.linesep.join(gdbserver_cmd_l) | ||
104 | else: | ||
105 | gdbserver_cmd_start = "%s --once :%s %s" % ( | ||
106 | self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary) | ||
107 | gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % ( | ||
108 | self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start) | ||
109 | cmd_lines.append(gdbserver_cmd) | ||
110 | GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True) | ||
111 | |||
112 | def _gen_gdbinit_config(self): | ||
113 | """Generate a gdbinit file for this binary and the corresponding gdbserver configuration""" | ||
114 | gdbinit_lines = ['# This file is generated by devtool ide-sdk'] | ||
115 | if self.gdbserver_multi: | ||
116 | target_help = '# gdbserver --multi :%d' % self.gdbserver_port | ||
117 | remote_cmd = 'target extended-remote' | ||
118 | else: | ||
119 | target_help = '# gdbserver :%d %s' % ( | ||
120 | self.gdbserver_port, self.binary) | ||
121 | remote_cmd = 'target remote' | ||
122 | gdbinit_lines.append('# On the remote target:') | ||
123 | gdbinit_lines.append(target_help) | ||
124 | gdbinit_lines.append('# On the build machine:') | ||
125 | gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree) | ||
126 | gdbinit_lines.append( | ||
127 | '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit) | ||
128 | |||
129 | gdbinit_lines.append('set sysroot ' + self.modified_recipe.d) | ||
130 | gdbinit_lines.append('set substitute-path "/usr/include" "' + | ||
131 | os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"') | ||
132 | # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir. | ||
133 | gdbinit_lines.append('set debuginfod enabled off') | ||
134 | if self.image_recipe.rootfs_dbg: | ||
135 | gdbinit_lines.append( | ||
136 | 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"') | ||
137 | # First: Search for sources of this recipe in the workspace folder | ||
138 | if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir: | ||
139 | gdbinit_lines.append('set substitute-path "%s" "%s"' % | ||
140 | (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree)) | ||
141 | else: | ||
142 | logger.error( | ||
143 | "TARGET_DBGSRC_DIR must contain the recipe name PN.") | ||
144 | # Second: Search for sources of other recipes in the rootfs-dbg | ||
145 | if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"): | ||
146 | gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join( | ||
147 | self.image_recipe.rootfs_dbg, "usr", "src", "debug")) | ||
148 | else: | ||
149 | logger.error( | ||
150 | "TARGET_DBGSRC_DIR must start with /usr/src/debug.") | ||
151 | else: | ||
152 | logger.warning( | ||
153 | "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.") | ||
154 | gdbinit_lines.append( | ||
155 | '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port)) | ||
156 | gdbinit_lines.append('set remote exec-file ' + self.binary) | ||
157 | gdbinit_lines.append( | ||
158 | 'run ' + os.path.join(self.modified_recipe.d, self.binary)) | ||
159 | |||
160 | GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines) | ||
161 | |||
162 | def _gen_gdb_start_script(self): | ||
163 | """Generate a script starting GDB with the corresponding gdbinit configuration.""" | ||
164 | cmd_lines = ['#!/bin/sh'] | ||
165 | cmd_lines.append('cd ' + self.modified_recipe.real_srctree) | ||
166 | cmd_lines.append(self.gdb_cross.gdb + ' -ix ' + | ||
167 | self.gdbinit + ' "$@"') | ||
168 | GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True) | ||
169 | |||
170 | def initialize(self): | ||
171 | self._gen_gdbserver_start_script() | ||
172 | self._gen_gdbinit_config() | ||
173 | self._gen_gdb_start_script() | ||
174 | |||
175 | @staticmethod | ||
176 | def write_file(script_file, cmd_lines, executable=False): | ||
177 | script_dir = os.path.dirname(script_file) | ||
178 | mkdirhier(script_dir) | ||
179 | with open(script_file, 'w') as script_f: | ||
180 | script_f.write(os.linesep.join(cmd_lines)) | ||
181 | script_f.write(os.linesep) | ||
182 | if executable: | ||
183 | st = os.stat(script_file) | ||
184 | os.chmod(script_file, st.st_mode | stat.S_IEXEC) | ||
185 | logger.info("Created: %s" % script_file) | ||
186 | |||
187 | |||
188 | class IdeBase: | ||
189 | """Base class defining the interface for IDE plugins""" | ||
190 | |||
191 | def __init__(self): | ||
192 | self.ide_name = 'undefined' | ||
193 | self.gdb_cross_configs = [] | ||
194 | |||
195 | @classmethod | ||
196 | def ide_plugin_priority(cls): | ||
197 | """Used to find the default ide handler if --ide is not passed""" | ||
198 | return 10 | ||
199 | |||
200 | def setup_shared_sysroots(self, shared_env): | ||
201 | logger.warn("Shared sysroot mode is not supported for IDE %s" % | ||
202 | self.ide_name) | ||
203 | |||
204 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
205 | logger.warn("Modified recipe mode is not supported for IDE %s" % | ||
206 | self.ide_name) | ||
207 | |||
208 | def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig): | ||
209 | binaries = modified_recipe.find_installed_binaries() | ||
210 | for binary in binaries: | ||
211 | gdb_cross_config = gdb_cross_config_class( | ||
212 | image_recipe, modified_recipe, binary) | ||
213 | gdb_cross_config.initialize() | ||
214 | self.gdb_cross_configs.append(gdb_cross_config) | ||
215 | |||
216 | @staticmethod | ||
217 | def gen_oe_scrtips_sym_link(modified_recipe): | ||
218 | # create a sym-link from sources to the scripts directory | ||
219 | if os.path.isdir(modified_recipe.ide_sdk_scripts_dir): | ||
220 | IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir, | ||
221 | os.path.join(modified_recipe.real_srctree, 'oe-scripts')) | ||
222 | |||
223 | @staticmethod | ||
224 | def update_json_file(json_dir, json_file, update_dict): | ||
225 | """Update a json file | ||
226 | |||
227 | By default it uses the dict.update function. If this is not sutiable | ||
228 | the update function might be passed via update_func parameter. | ||
229 | """ | ||
230 | json_path = os.path.join(json_dir, json_file) | ||
231 | logger.info("Updating IDE config file: %s (%s)" % | ||
232 | (json_file, json_path)) | ||
233 | if not os.path.exists(json_dir): | ||
234 | os.makedirs(json_dir) | ||
235 | try: | ||
236 | with open(json_path) as f: | ||
237 | orig_dict = json.load(f) | ||
238 | except json.decoder.JSONDecodeError: | ||
239 | logger.info( | ||
240 | "Decoding %s failed. Probably because of comments in the json file" % json_path) | ||
241 | orig_dict = {} | ||
242 | except FileNotFoundError: | ||
243 | orig_dict = {} | ||
244 | orig_dict.update(update_dict) | ||
245 | with open(json_path, 'w') as f: | ||
246 | json.dump(orig_dict, f, indent=4) | ||
247 | |||
248 | @staticmethod | ||
249 | def symlink_force(tgt, dst): | ||
250 | try: | ||
251 | os.symlink(tgt, dst) | ||
252 | except OSError as err: | ||
253 | if err.errno == errno.EEXIST: | ||
254 | if os.readlink(dst) != tgt: | ||
255 | os.remove(dst) | ||
256 | os.symlink(tgt, dst) | ||
257 | else: | ||
258 | raise err | ||
259 | |||
260 | |||
261 | def get_devtool_deploy_opts(args): | ||
262 | """Filter args for devtool deploy-target args""" | ||
263 | if not args.target: | ||
264 | return None | ||
265 | devtool_deploy_opts = [args.target] | ||
266 | if args.no_host_check: | ||
267 | devtool_deploy_opts += ["-c"] | ||
268 | if args.show_status: | ||
269 | devtool_deploy_opts += ["-s"] | ||
270 | if args.no_preserve: | ||
271 | devtool_deploy_opts += ["-p"] | ||
272 | if args.no_check_space: | ||
273 | devtool_deploy_opts += ["--no-check-space"] | ||
274 | if args.ssh_exec: | ||
275 | devtool_deploy_opts += ["-e", args.ssh.exec] | ||
276 | if args.port: | ||
277 | devtool_deploy_opts += ["-P", args.port] | ||
278 | if args.key: | ||
279 | devtool_deploy_opts += ["-I", args.key] | ||
280 | if args.strip is False: | ||
281 | devtool_deploy_opts += ["--no-strip"] | ||
282 | return devtool_deploy_opts | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py new file mode 100644 index 0000000000..ee5bb57265 --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/ide_code.py | |||
@@ -0,0 +1,462 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk IDE plugin for VSCode and VSCodium""" | ||
7 | |||
8 | import json | ||
9 | import logging | ||
10 | import os | ||
11 | import shutil | ||
12 | from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts | ||
13 | |||
14 | logger = logging.getLogger('devtool') | ||
15 | |||
16 | |||
17 | class GdbCrossConfigVSCode(GdbCrossConfig): | ||
18 | def __init__(self, image_recipe, modified_recipe, binary): | ||
19 | super().__init__(image_recipe, modified_recipe, binary, False) | ||
20 | |||
21 | def initialize(self): | ||
22 | self._gen_gdbserver_start_script() | ||
23 | |||
24 | |||
25 | class IdeVSCode(IdeBase): | ||
26 | """Manage IDE configurations for VSCode | ||
27 | |||
28 | Modified recipe mode: | ||
29 | - cmake: use the cmake-preset generated by devtool ide-sdk | ||
30 | - meson: meson is called via a wrapper script generated by devtool ide-sdk | ||
31 | |||
32 | Shared sysroot mode: | ||
33 | In shared sysroot mode, the cross tool-chain is exported to the user's global configuration. | ||
34 | A workspace cannot be created because there is no recipe that defines how a workspace could | ||
35 | be set up. | ||
36 | - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json | ||
37 | The cmake-kit uses the environment script and the tool-chain file | ||
38 | generated by meta-ide-support. | ||
39 | - meson: Meson needs manual workspace configuration. | ||
40 | """ | ||
41 | |||
42 | @classmethod | ||
43 | def ide_plugin_priority(cls): | ||
44 | """If --ide is not passed this is the default plugin""" | ||
45 | if shutil.which('code'): | ||
46 | return 100 | ||
47 | return 0 | ||
48 | |||
49 | def setup_shared_sysroots(self, shared_env): | ||
50 | """Expose the toolchain of the shared sysroots SDK""" | ||
51 | datadir = shared_env.ide_support.datadir | ||
52 | deploy_dir_image = shared_env.ide_support.deploy_dir_image | ||
53 | real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys | ||
54 | standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native | ||
55 | vscode_ws_path = os.path.join( | ||
56 | os.environ['HOME'], '.local', 'share', 'CMakeTools') | ||
57 | cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json') | ||
58 | oecmake_generator = "Ninja" | ||
59 | env_script = os.path.join( | ||
60 | deploy_dir_image, 'environment-setup-' + real_multimach_target_sys) | ||
61 | |||
62 | if not os.path.isdir(vscode_ws_path): | ||
63 | os.makedirs(vscode_ws_path) | ||
64 | cmake_kits_old = [] | ||
65 | if os.path.exists(cmake_kits_path): | ||
66 | with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file: | ||
67 | cmake_kits_old = json.load(cmake_kits_file) | ||
68 | cmake_kits = cmake_kits_old.copy() | ||
69 | |||
70 | cmake_kit_new = { | ||
71 | "name": "OE " + real_multimach_target_sys, | ||
72 | "environmentSetupScript": env_script, | ||
73 | "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake", | ||
74 | "preferredGenerator": { | ||
75 | "name": oecmake_generator | ||
76 | } | ||
77 | } | ||
78 | |||
79 | def merge_kit(cmake_kits, cmake_kit_new): | ||
80 | i = 0 | ||
81 | while i < len(cmake_kits): | ||
82 | if 'environmentSetupScript' in cmake_kits[i] and \ | ||
83 | cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']: | ||
84 | cmake_kits[i] = cmake_kit_new | ||
85 | return | ||
86 | i += 1 | ||
87 | cmake_kits.append(cmake_kit_new) | ||
88 | merge_kit(cmake_kits, cmake_kit_new) | ||
89 | |||
90 | if cmake_kits != cmake_kits_old: | ||
91 | logger.info("Updating: %s" % cmake_kits_path) | ||
92 | with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file: | ||
93 | json.dump(cmake_kits, cmake_kits_file, indent=4) | ||
94 | else: | ||
95 | logger.info("Already up to date: %s" % cmake_kits_path) | ||
96 | |||
97 | cmake_native = os.path.join( | ||
98 | shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake') | ||
99 | if os.path.isfile(cmake_native): | ||
100 | logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native) | ||
101 | else: | ||
102 | logger.error("Cannot find cmake native at: %s" % cmake_native) | ||
103 | |||
104 | def dot_code_dir(self, modified_recipe): | ||
105 | return os.path.join(modified_recipe.srctree, '.vscode') | ||
106 | |||
107 | def __vscode_settings_meson(self, settings_dict, modified_recipe): | ||
108 | if modified_recipe.build_tool is not BuildTool.MESON: | ||
109 | return | ||
110 | settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper | ||
111 | |||
112 | confopts = modified_recipe.mesonopts.split() | ||
113 | confopts += modified_recipe.meson_cross_file.split() | ||
114 | confopts += modified_recipe.extra_oemeson.split() | ||
115 | settings_dict["mesonbuild.configureOptions"] = confopts | ||
116 | settings_dict["mesonbuild.buildFolder"] = modified_recipe.b | ||
117 | |||
118 | def __vscode_settings_cmake(self, settings_dict, modified_recipe): | ||
119 | """Add cmake specific settings to settings.json. | ||
120 | |||
121 | Note: most settings are passed to the cmake preset. | ||
122 | """ | ||
123 | if modified_recipe.build_tool is not BuildTool.CMAKE: | ||
124 | return | ||
125 | settings_dict["cmake.configureOnOpen"] = True | ||
126 | settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree | ||
127 | |||
128 | def vscode_settings(self, modified_recipe, image_recipe): | ||
129 | files_excludes = { | ||
130 | "**/.git/**": True, | ||
131 | "**/oe-logs/**": True, | ||
132 | "**/oe-workdir/**": True, | ||
133 | "**/source-date-epoch/**": True | ||
134 | } | ||
135 | python_exclude = [ | ||
136 | "**/.git/**", | ||
137 | "**/oe-logs/**", | ||
138 | "**/oe-workdir/**", | ||
139 | "**/source-date-epoch/**" | ||
140 | ] | ||
141 | files_readonly = { | ||
142 | modified_recipe.recipe_sysroot + '/**': True, | ||
143 | modified_recipe.recipe_sysroot_native + '/**': True, | ||
144 | } | ||
145 | if image_recipe.rootfs_dbg is not None: | ||
146 | files_readonly[image_recipe.rootfs_dbg + '/**'] = True | ||
147 | settings_dict = { | ||
148 | "files.watcherExclude": files_excludes, | ||
149 | "files.exclude": files_excludes, | ||
150 | "files.readonlyInclude": files_readonly, | ||
151 | "python.analysis.exclude": python_exclude | ||
152 | } | ||
153 | self.__vscode_settings_cmake(settings_dict, modified_recipe) | ||
154 | self.__vscode_settings_meson(settings_dict, modified_recipe) | ||
155 | |||
156 | settings_file = 'settings.json' | ||
157 | IdeBase.update_json_file( | ||
158 | self.dot_code_dir(modified_recipe), settings_file, settings_dict) | ||
159 | |||
160 | def __vscode_extensions_cmake(self, modified_recipe, recommendations): | ||
161 | if modified_recipe.build_tool is not BuildTool.CMAKE: | ||
162 | return | ||
163 | recommendations += [ | ||
164 | "ms-vscode.cmake-tools", | ||
165 | "ms-vscode.cpptools", | ||
166 | "ms-vscode.cpptools-extension-pack", | ||
167 | "ms-vscode.cpptools-themes" | ||
168 | ] | ||
169 | |||
170 | def __vscode_extensions_meson(self, modified_recipe, recommendations): | ||
171 | if modified_recipe.build_tool is not BuildTool.MESON: | ||
172 | return | ||
173 | recommendations += [ | ||
174 | 'mesonbuild.mesonbuild', | ||
175 | "ms-vscode.cpptools", | ||
176 | "ms-vscode.cpptools-extension-pack", | ||
177 | "ms-vscode.cpptools-themes" | ||
178 | ] | ||
179 | |||
180 | def vscode_extensions(self, modified_recipe): | ||
181 | recommendations = [] | ||
182 | self.__vscode_extensions_cmake(modified_recipe, recommendations) | ||
183 | self.__vscode_extensions_meson(modified_recipe, recommendations) | ||
184 | extensions_file = 'extensions.json' | ||
185 | IdeBase.update_json_file( | ||
186 | self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations}) | ||
187 | |||
188 | def vscode_c_cpp_properties(self, modified_recipe): | ||
189 | properties_dict = { | ||
190 | "name": modified_recipe.recipe_id_pretty, | ||
191 | } | ||
192 | if modified_recipe.build_tool is BuildTool.CMAKE: | ||
193 | properties_dict["configurationProvider"] = "ms-vscode.cmake-tools" | ||
194 | elif modified_recipe.build_tool is BuildTool.MESON: | ||
195 | properties_dict["configurationProvider"] = "mesonbuild.mesonbuild" | ||
196 | properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0]) | ||
197 | else: # no C/C++ build | ||
198 | return | ||
199 | |||
200 | properties_dicts = { | ||
201 | "configurations": [ | ||
202 | properties_dict | ||
203 | ], | ||
204 | "version": 4 | ||
205 | } | ||
206 | prop_file = 'c_cpp_properties.json' | ||
207 | IdeBase.update_json_file( | ||
208 | self.dot_code_dir(modified_recipe), prop_file, properties_dicts) | ||
209 | |||
210 | def vscode_launch_bin_dbg(self, gdb_cross_config): | ||
211 | modified_recipe = gdb_cross_config.modified_recipe | ||
212 | |||
213 | launch_config = { | ||
214 | "name": gdb_cross_config.id_pretty, | ||
215 | "type": "cppdbg", | ||
216 | "request": "launch", | ||
217 | "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')), | ||
218 | "stopAtEntry": True, | ||
219 | "cwd": "${workspaceFolder}", | ||
220 | "environment": [], | ||
221 | "externalConsole": False, | ||
222 | "MIMode": "gdb", | ||
223 | "preLaunchTask": gdb_cross_config.id_pretty, | ||
224 | "miDebuggerPath": modified_recipe.gdb_cross.gdb, | ||
225 | "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port) | ||
226 | } | ||
227 | |||
228 | # Search for header files in recipe-sysroot. | ||
229 | src_file_map = { | ||
230 | "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include") | ||
231 | } | ||
232 | # First of all search for not stripped binaries in the image folder. | ||
233 | # These binaries are copied (and optionally stripped) by deploy-target | ||
234 | setup_commands = [ | ||
235 | { | ||
236 | "description": "sysroot", | ||
237 | "text": "set sysroot " + modified_recipe.d | ||
238 | } | ||
239 | ] | ||
240 | |||
241 | if gdb_cross_config.image_recipe.rootfs_dbg: | ||
242 | launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str( | ||
243 | gdb_cross_config.image_recipe) | ||
244 | # First: Search for sources of this recipe in the workspace folder | ||
245 | if modified_recipe.pn in modified_recipe.target_dbgsrc_dir: | ||
246 | src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}" | ||
247 | else: | ||
248 | logger.error( | ||
249 | "TARGET_DBGSRC_DIR must contain the recipe name PN.") | ||
250 | # Second: Search for sources of other recipes in the rootfs-dbg | ||
251 | if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"): | ||
252 | src_file_map["/usr/src/debug"] = os.path.join( | ||
253 | gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug") | ||
254 | else: | ||
255 | logger.error( | ||
256 | "TARGET_DBGSRC_DIR must start with /usr/src/debug.") | ||
257 | else: | ||
258 | logger.warning( | ||
259 | "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.") | ||
260 | |||
261 | launch_config['sourceFileMap'] = src_file_map | ||
262 | launch_config['setupCommands'] = setup_commands | ||
263 | return launch_config | ||
264 | |||
265 | def vscode_launch(self, modified_recipe): | ||
266 | """GDB Launch configuration for binaries (elf files)""" | ||
267 | |||
268 | configurations = [] | ||
269 | for gdb_cross_config in self.gdb_cross_configs: | ||
270 | if gdb_cross_config.modified_recipe is modified_recipe: | ||
271 | configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config)) | ||
272 | launch_dict = { | ||
273 | "version": "0.2.0", | ||
274 | "configurations": configurations | ||
275 | } | ||
276 | launch_file = 'launch.json' | ||
277 | IdeBase.update_json_file( | ||
278 | self.dot_code_dir(modified_recipe), launch_file, launch_dict) | ||
279 | |||
280 | def vscode_tasks_cpp(self, args, modified_recipe): | ||
281 | run_install_deploy = modified_recipe.gen_install_deploy_script(args) | ||
282 | install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty | ||
283 | tasks_dict = { | ||
284 | "version": "2.0.0", | ||
285 | "tasks": [ | ||
286 | { | ||
287 | "label": install_task_name, | ||
288 | "type": "shell", | ||
289 | "command": run_install_deploy, | ||
290 | "problemMatcher": [] | ||
291 | } | ||
292 | ] | ||
293 | } | ||
294 | for gdb_cross_config in self.gdb_cross_configs: | ||
295 | if gdb_cross_config.modified_recipe is not modified_recipe: | ||
296 | continue | ||
297 | tasks_dict['tasks'].append( | ||
298 | { | ||
299 | "label": gdb_cross_config.id_pretty, | ||
300 | "type": "shell", | ||
301 | "isBackground": True, | ||
302 | "dependsOn": [ | ||
303 | install_task_name | ||
304 | ], | ||
305 | "command": gdb_cross_config.gdbserver_script, | ||
306 | "problemMatcher": [ | ||
307 | { | ||
308 | "pattern": [ | ||
309 | { | ||
310 | "regexp": ".", | ||
311 | "file": 1, | ||
312 | "location": 2, | ||
313 | "message": 3 | ||
314 | } | ||
315 | ], | ||
316 | "background": { | ||
317 | "activeOnStart": True, | ||
318 | "beginsPattern": ".", | ||
319 | "endsPattern": ".", | ||
320 | } | ||
321 | } | ||
322 | ] | ||
323 | }) | ||
324 | tasks_file = 'tasks.json' | ||
325 | IdeBase.update_json_file( | ||
326 | self.dot_code_dir(modified_recipe), tasks_file, tasks_dict) | ||
327 | |||
328 | def vscode_tasks_fallback(self, args, modified_recipe): | ||
329 | oe_init_dir = modified_recipe.oe_init_dir | ||
330 | oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir) | ||
331 | dt_build = "devtool build " | ||
332 | dt_build_label = dt_build + modified_recipe.recipe_id_pretty | ||
333 | dt_build_cmd = dt_build + modified_recipe.bpn | ||
334 | clean_opt = " --clean" | ||
335 | dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt | ||
336 | dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt | ||
337 | dt_deploy = "devtool deploy-target " | ||
338 | dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty | ||
339 | dt_deploy_cmd = dt_deploy + modified_recipe.bpn | ||
340 | dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty | ||
341 | deploy_opts = ' '.join(get_devtool_deploy_opts(args)) | ||
342 | tasks_dict = { | ||
343 | "version": "2.0.0", | ||
344 | "tasks": [ | ||
345 | { | ||
346 | "label": dt_build_label, | ||
347 | "type": "shell", | ||
348 | "command": "bash", | ||
349 | "linux": { | ||
350 | "options": { | ||
351 | "cwd": oe_init_dir | ||
352 | } | ||
353 | }, | ||
354 | "args": [ | ||
355 | "--login", | ||
356 | "-c", | ||
357 | "%s%s" % (oe_init, dt_build_cmd) | ||
358 | ], | ||
359 | "problemMatcher": [] | ||
360 | }, | ||
361 | { | ||
362 | "label": dt_deploy_label, | ||
363 | "type": "shell", | ||
364 | "command": "bash", | ||
365 | "linux": { | ||
366 | "options": { | ||
367 | "cwd": oe_init_dir | ||
368 | } | ||
369 | }, | ||
370 | "args": [ | ||
371 | "--login", | ||
372 | "-c", | ||
373 | "%s%s %s" % ( | ||
374 | oe_init, dt_deploy_cmd, deploy_opts) | ||
375 | ], | ||
376 | "problemMatcher": [] | ||
377 | }, | ||
378 | { | ||
379 | "label": dt_build_deploy_label, | ||
380 | "dependsOrder": "sequence", | ||
381 | "dependsOn": [ | ||
382 | dt_build_label, | ||
383 | dt_deploy_label | ||
384 | ], | ||
385 | "problemMatcher": [], | ||
386 | "group": { | ||
387 | "kind": "build", | ||
388 | "isDefault": True | ||
389 | } | ||
390 | }, | ||
391 | { | ||
392 | "label": dt_build_clean_label, | ||
393 | "type": "shell", | ||
394 | "command": "bash", | ||
395 | "linux": { | ||
396 | "options": { | ||
397 | "cwd": oe_init_dir | ||
398 | } | ||
399 | }, | ||
400 | "args": [ | ||
401 | "--login", | ||
402 | "-c", | ||
403 | "%s%s" % (oe_init, dt_build_clean_cmd) | ||
404 | ], | ||
405 | "problemMatcher": [] | ||
406 | } | ||
407 | ] | ||
408 | } | ||
409 | if modified_recipe.gdb_cross: | ||
410 | for gdb_cross_config in self.gdb_cross_configs: | ||
411 | if gdb_cross_config.modified_recipe is not modified_recipe: | ||
412 | continue | ||
413 | tasks_dict['tasks'].append( | ||
414 | { | ||
415 | "label": gdb_cross_config.id_pretty, | ||
416 | "type": "shell", | ||
417 | "isBackground": True, | ||
418 | "dependsOn": [ | ||
419 | dt_build_deploy_label | ||
420 | ], | ||
421 | "command": gdb_cross_config.gdbserver_script, | ||
422 | "problemMatcher": [ | ||
423 | { | ||
424 | "pattern": [ | ||
425 | { | ||
426 | "regexp": ".", | ||
427 | "file": 1, | ||
428 | "location": 2, | ||
429 | "message": 3 | ||
430 | } | ||
431 | ], | ||
432 | "background": { | ||
433 | "activeOnStart": True, | ||
434 | "beginsPattern": ".", | ||
435 | "endsPattern": ".", | ||
436 | } | ||
437 | } | ||
438 | ] | ||
439 | }) | ||
440 | tasks_file = 'tasks.json' | ||
441 | IdeBase.update_json_file( | ||
442 | self.dot_code_dir(modified_recipe), tasks_file, tasks_dict) | ||
443 | |||
444 | def vscode_tasks(self, args, modified_recipe): | ||
445 | if modified_recipe.build_tool.is_c_ccp: | ||
446 | self.vscode_tasks_cpp(args, modified_recipe) | ||
447 | else: | ||
448 | self.vscode_tasks_fallback(args, modified_recipe) | ||
449 | |||
450 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
451 | self.vscode_settings(modified_recipe, image_recipe) | ||
452 | self.vscode_extensions(modified_recipe) | ||
453 | self.vscode_c_cpp_properties(modified_recipe) | ||
454 | if args.target: | ||
455 | self.initialize_gdb_cross_configs( | ||
456 | image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode) | ||
457 | self.vscode_launch(modified_recipe) | ||
458 | self.vscode_tasks(args, modified_recipe) | ||
459 | |||
460 | |||
461 | def register_ide_plugin(ide_plugins): | ||
462 | ide_plugins['code'] = IdeVSCode | ||
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py new file mode 100644 index 0000000000..f106c5a026 --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/ide_none.py | |||
@@ -0,0 +1,53 @@ | |||
1 | # | ||
2 | # Copyright (C) 2023-2024 Siemens AG | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | """Devtool ide-sdk generic IDE plugin""" | ||
7 | |||
8 | import os | ||
9 | import logging | ||
10 | from devtool.ide_plugins import IdeBase, GdbCrossConfig | ||
11 | |||
12 | logger = logging.getLogger('devtool') | ||
13 | |||
14 | |||
15 | class IdeNone(IdeBase): | ||
16 | """Generate some generic helpers for other IDEs | ||
17 | |||
18 | Modified recipe mode: | ||
19 | Generate some helper scripts for remote debugging with GDB | ||
20 | |||
21 | Shared sysroot mode: | ||
22 | A wrapper for bitbake meta-ide-support and bitbake build-sysroots | ||
23 | """ | ||
24 | |||
25 | def __init__(self): | ||
26 | super().__init__() | ||
27 | |||
28 | def setup_shared_sysroots(self, shared_env): | ||
29 | real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys | ||
30 | deploy_dir_image = shared_env.ide_support.deploy_dir_image | ||
31 | env_script = os.path.join( | ||
32 | deploy_dir_image, 'environment-setup-' + real_multimach_target_sys) | ||
33 | logger.info( | ||
34 | "To use this SDK please source this: %s" % env_script) | ||
35 | |||
36 | def setup_modified_recipe(self, args, image_recipe, modified_recipe): | ||
37 | """generate some helper scripts and config files | ||
38 | |||
39 | - Execute the do_install task | ||
40 | - Execute devtool deploy-target | ||
41 | - Generate a gdbinit file per executable | ||
42 | - Generate the oe-scripts sym-link | ||
43 | """ | ||
44 | script_path = modified_recipe.gen_install_deploy_script(args) | ||
45 | logger.info("Created: %s" % script_path) | ||
46 | |||
47 | self.initialize_gdb_cross_configs(image_recipe, modified_recipe) | ||
48 | |||
49 | IdeBase.gen_oe_scrtips_sym_link(modified_recipe) | ||
50 | |||
51 | |||
52 | def register_ide_plugin(ide_plugins): | ||
53 | ide_plugins['none'] = IdeNone | ||
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py new file mode 100755 index 0000000000..931408fa74 --- /dev/null +++ b/scripts/lib/devtool/ide_sdk.py | |||
@@ -0,0 +1,1009 @@ | |||
1 | # Development tool - ide-sdk command plugin | ||
2 | # | ||
3 | # Copyright (C) 2023-2024 Siemens AG | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | """Devtool ide-sdk plugin""" | ||
8 | |||
9 | import json | ||
10 | import logging | ||
11 | import os | ||
12 | import re | ||
13 | import shutil | ||
14 | import stat | ||
15 | import subprocess | ||
16 | import sys | ||
17 | from argparse import RawTextHelpFormatter | ||
18 | from enum import Enum | ||
19 | |||
20 | import scriptutils | ||
21 | import bb | ||
22 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe | ||
23 | from devtool.standard import get_real_srctree | ||
24 | from devtool.ide_plugins import BuildTool | ||
25 | |||
26 | |||
27 | logger = logging.getLogger('devtool') | ||
28 | |||
29 | # dict of classes derived from IdeBase | ||
30 | ide_plugins = {} | ||
31 | |||
32 | |||
33 | class DevtoolIdeMode(Enum): | ||
34 | """Different modes are supported by the ide-sdk plugin. | ||
35 | |||
36 | The enum might be extended by more advanced modes in the future. Some ideas: | ||
37 | - auto: modified if all recipes are modified, shared if none of the recipes is modified. | ||
38 | - mixed: modified mode for modified recipes, shared mode for all other recipes. | ||
39 | """ | ||
40 | |||
41 | modified = 'modified' | ||
42 | shared = 'shared' | ||
43 | |||
44 | |||
45 | class TargetDevice: | ||
46 | """SSH remote login parameters""" | ||
47 | |||
48 | def __init__(self, args): | ||
49 | self.extraoptions = '' | ||
50 | if args.no_host_check: | ||
51 | self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | ||
52 | self.ssh_sshexec = 'ssh' | ||
53 | if args.ssh_exec: | ||
54 | self.ssh_sshexec = args.ssh_exec | ||
55 | self.ssh_port = '' | ||
56 | if args.port: | ||
57 | self.ssh_port = "-p %s" % args.port | ||
58 | if args.key: | ||
59 | self.extraoptions += ' -i %s' % args.key | ||
60 | |||
61 | self.target = args.target | ||
62 | target_sp = args.target.split('@') | ||
63 | if len(target_sp) == 1: | ||
64 | self.login = "" | ||
65 | self.host = target_sp[0] | ||
66 | elif len(target_sp) == 2: | ||
67 | self.login = target_sp[0] | ||
68 | self.host = target_sp[1] | ||
69 | else: | ||
70 | logger.error("Invalid target argument: %s" % args.target) | ||
71 | |||
72 | |||
73 | class RecipeNative: | ||
74 | """Base class for calling bitbake to provide a -native recipe""" | ||
75 | |||
76 | def __init__(self, name, target_arch=None): | ||
77 | self.name = name | ||
78 | self.target_arch = target_arch | ||
79 | self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot'] | ||
80 | self.staging_bindir_native = None | ||
81 | self.target_sys = None | ||
82 | self.__native_bin = None | ||
83 | |||
84 | def _initialize(self, config, workspace, tinfoil): | ||
85 | """Get the parsed recipe""" | ||
86 | recipe_d = parse_recipe( | ||
87 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
88 | if not recipe_d: | ||
89 | raise DevtoolError("Parsing %s recipe failed" % self.name) | ||
90 | self.staging_bindir_native = os.path.realpath( | ||
91 | recipe_d.getVar('STAGING_BINDIR_NATIVE')) | ||
92 | self.target_sys = recipe_d.getVar('TARGET_SYS') | ||
93 | return recipe_d | ||
94 | |||
95 | def initialize(self, config, workspace, tinfoil): | ||
96 | """Basic initialization that can be overridden by a derived class""" | ||
97 | self._initialize(config, workspace, tinfoil) | ||
98 | |||
99 | @property | ||
100 | def native_bin(self): | ||
101 | if not self.__native_bin: | ||
102 | raise DevtoolError("native binary name is not defined.") | ||
103 | return self.__native_bin | ||
104 | |||
105 | |||
106 | class RecipeGdbCross(RecipeNative): | ||
107 | """Handle handle gdb-cross on the host and the gdbserver on the target device""" | ||
108 | |||
109 | def __init__(self, args, target_arch, target_device): | ||
110 | super().__init__('gdb-cross-' + target_arch, target_arch) | ||
111 | self.target_device = target_device | ||
112 | self.gdb = None | ||
113 | self.gdbserver_port_next = int(args.gdbserver_port_start) | ||
114 | self.config_db = {} | ||
115 | |||
116 | def __find_gdbserver(self, config, tinfoil): | ||
117 | """Absolute path of the gdbserver""" | ||
118 | recipe_d_gdb = parse_recipe( | ||
119 | config, tinfoil, 'gdb', appends=True, filter_workspace=False) | ||
120 | if not recipe_d_gdb: | ||
121 | raise DevtoolError("Parsing gdb recipe failed") | ||
122 | return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver') | ||
123 | |||
124 | def initialize(self, config, workspace, tinfoil): | ||
125 | super()._initialize(config, workspace, tinfoil) | ||
126 | gdb_bin = self.target_sys + '-gdb' | ||
127 | gdb_path = os.path.join( | ||
128 | self.staging_bindir_native, self.target_sys, gdb_bin) | ||
129 | self.gdb = gdb_path | ||
130 | self.gdbserver_path = self.__find_gdbserver(config, tinfoil) | ||
131 | |||
132 | @property | ||
133 | def host(self): | ||
134 | return self.target_device.host | ||
135 | |||
136 | |||
137 | class RecipeImage: | ||
138 | """Handle some image recipe related properties | ||
139 | |||
140 | Most workflows require firmware that runs on the target device. | ||
141 | This firmware must be consistent with the setup of the host system. | ||
142 | In particular, the debug symbols must be compatible. For this, the | ||
143 | rootfs must be created as part of the SDK. | ||
144 | """ | ||
145 | |||
146 | def __init__(self, name): | ||
147 | self.combine_dbg_image = False | ||
148 | self.gdbserver_missing = False | ||
149 | self.name = name | ||
150 | self.rootfs = None | ||
151 | self.__rootfs_dbg = None | ||
152 | self.bootstrap_tasks = [self.name + ':do_build'] | ||
153 | |||
154 | def initialize(self, config, tinfoil): | ||
155 | image_d = parse_recipe( | ||
156 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
157 | if not image_d: | ||
158 | raise DevtoolError( | ||
159 | "Parsing image recipe %s failed" % self.name) | ||
160 | |||
161 | self.combine_dbg_image = bb.data.inherits_class( | ||
162 | 'image-combined-dbg', image_d) | ||
163 | |||
164 | workdir = image_d.getVar('WORKDIR') | ||
165 | self.rootfs = os.path.join(workdir, 'rootfs') | ||
166 | if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1": | ||
167 | self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') | ||
168 | |||
169 | self.gdbserver_missing = 'gdbserver' not in image_d.getVar( | ||
170 | 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES') | ||
171 | |||
172 | @property | ||
173 | def debug_support(self): | ||
174 | return bool(self.rootfs_dbg) | ||
175 | |||
176 | @property | ||
177 | def rootfs_dbg(self): | ||
178 | if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg): | ||
179 | return self.__rootfs_dbg | ||
180 | return None | ||
181 | |||
182 | |||
183 | class RecipeMetaIdeSupport: | ||
184 | """For the shared sysroots mode meta-ide-support is needed | ||
185 | |||
186 | For use cases where just a cross tool-chain is required but | ||
187 | no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support | ||
188 | and bitbake build-sysroots. This also allows to expose the cross-toolchains | ||
189 | to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits. | ||
190 | """ | ||
191 | |||
192 | def __init__(self): | ||
193 | self.bootstrap_tasks = ['meta-ide-support:do_build'] | ||
194 | self.topdir = None | ||
195 | self.datadir = None | ||
196 | self.deploy_dir_image = None | ||
197 | self.build_sys = None | ||
198 | # From toolchain-scripts | ||
199 | self.real_multimach_target_sys = None | ||
200 | |||
201 | def initialize(self, config, tinfoil): | ||
202 | meta_ide_support_d = parse_recipe( | ||
203 | config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False) | ||
204 | if not meta_ide_support_d: | ||
205 | raise DevtoolError("Parsing meta-ide-support recipe failed") | ||
206 | |||
207 | self.topdir = meta_ide_support_d.getVar('TOPDIR') | ||
208 | self.datadir = meta_ide_support_d.getVar('datadir') | ||
209 | self.deploy_dir_image = meta_ide_support_d.getVar( | ||
210 | 'DEPLOY_DIR_IMAGE') | ||
211 | self.build_sys = meta_ide_support_d.getVar('BUILD_SYS') | ||
212 | self.real_multimach_target_sys = meta_ide_support_d.getVar( | ||
213 | 'REAL_MULTIMACH_TARGET_SYS') | ||
214 | |||
215 | |||
216 | class RecipeBuildSysroots: | ||
217 | """For the shared sysroots mode build-sysroots is needed""" | ||
218 | |||
219 | def __init__(self): | ||
220 | self.standalone_sysroot = None | ||
221 | self.standalone_sysroot_native = None | ||
222 | self.bootstrap_tasks = [ | ||
223 | 'build-sysroots:do_build_target_sysroot', | ||
224 | 'build-sysroots:do_build_native_sysroot' | ||
225 | ] | ||
226 | |||
227 | def initialize(self, config, tinfoil): | ||
228 | build_sysroots_d = parse_recipe( | ||
229 | config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False) | ||
230 | if not build_sysroots_d: | ||
231 | raise DevtoolError("Parsing build-sysroots recipe failed") | ||
232 | self.standalone_sysroot = build_sysroots_d.getVar( | ||
233 | 'STANDALONE_SYSROOT') | ||
234 | self.standalone_sysroot_native = build_sysroots_d.getVar( | ||
235 | 'STANDALONE_SYSROOT_NATIVE') | ||
236 | |||
237 | |||
238 | class SharedSysrootsEnv: | ||
239 | """Handle the shared sysroots based workflow | ||
240 | |||
241 | Support the workflow with just a tool-chain without a recipe. | ||
242 | It's basically like: | ||
243 | bitbake some-dependencies | ||
244 | bitbake meta-ide-support | ||
245 | bitbake build-sysroots | ||
246 | Use the environment-* file found in the deploy folder | ||
247 | """ | ||
248 | |||
249 | def __init__(self): | ||
250 | self.ide_support = None | ||
251 | self.build_sysroots = None | ||
252 | |||
253 | def initialize(self, ide_support, build_sysroots): | ||
254 | self.ide_support = ide_support | ||
255 | self.build_sysroots = build_sysroots | ||
256 | |||
257 | def setup_ide(self, ide): | ||
258 | ide.setup(self) | ||
259 | |||
260 | |||
261 | class RecipeNotModified: | ||
262 | """Handling of recipes added to the Direct DSK shared sysroots.""" | ||
263 | |||
264 | def __init__(self, name): | ||
265 | self.name = name | ||
266 | self.bootstrap_tasks = [name + ':do_populate_sysroot'] | ||
267 | |||
268 | |||
269 | class RecipeModified: | ||
270 | """Handling of recipes in the workspace created by devtool modify""" | ||
271 | OE_INIT_BUILD_ENV = 'oe-init-build-env' | ||
272 | |||
273 | VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$") | ||
274 | |||
275 | def __init__(self, name): | ||
276 | self.name = name | ||
277 | self.bootstrap_tasks = [name + ':do_install'] | ||
278 | self.gdb_cross = None | ||
279 | # workspace | ||
280 | self.real_srctree = None | ||
281 | self.srctree = None | ||
282 | self.ide_sdk_dir = None | ||
283 | self.ide_sdk_scripts_dir = None | ||
284 | self.bbappend = None | ||
285 | # recipe variables from d.getVar | ||
286 | self.b = None | ||
287 | self.base_libdir = None | ||
288 | self.bblayers = None | ||
289 | self.bpn = None | ||
290 | self.d = None | ||
291 | self.debug_build = None | ||
292 | self.fakerootcmd = None | ||
293 | self.fakerootenv = None | ||
294 | self.libdir = None | ||
295 | self.max_process = None | ||
296 | self.package_arch = None | ||
297 | self.package_debug_split_style = None | ||
298 | self.path = None | ||
299 | self.pn = None | ||
300 | self.recipe_sysroot = None | ||
301 | self.recipe_sysroot_native = None | ||
302 | self.staging_incdir = None | ||
303 | self.strip_cmd = None | ||
304 | self.target_arch = None | ||
305 | self.target_dbgsrc_dir = None | ||
306 | self.topdir = None | ||
307 | self.workdir = None | ||
308 | self.recipe_id = None | ||
309 | # replicate bitbake build environment | ||
310 | self.exported_vars = None | ||
311 | self.cmd_compile = None | ||
312 | self.__oe_init_dir = None | ||
313 | # main build tool used by this recipe | ||
314 | self.build_tool = BuildTool.UNDEFINED | ||
315 | # build_tool = cmake | ||
316 | self.oecmake_generator = None | ||
317 | self.cmake_cache_vars = None | ||
318 | # build_tool = meson | ||
319 | self.meson_buildtype = None | ||
320 | self.meson_wrapper = None | ||
321 | self.mesonopts = None | ||
322 | self.extra_oemeson = None | ||
323 | self.meson_cross_file = None | ||
324 | |||
325 | def initialize(self, config, workspace, tinfoil): | ||
326 | recipe_d = parse_recipe( | ||
327 | config, tinfoil, self.name, appends=True, filter_workspace=False) | ||
328 | if not recipe_d: | ||
329 | raise DevtoolError("Parsing %s recipe failed" % self.name) | ||
330 | |||
331 | # Verify this recipe is built as externalsrc setup by devtool modify | ||
332 | workspacepn = check_workspace_recipe( | ||
333 | workspace, self.name, bbclassextend=True) | ||
334 | self.srctree = workspace[workspacepn]['srctree'] | ||
335 | # Need to grab this here in case the source is within a subdirectory | ||
336 | self.real_srctree = get_real_srctree( | ||
337 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR')) | ||
338 | self.bbappend = workspace[workspacepn]['bbappend'] | ||
339 | |||
340 | self.ide_sdk_dir = os.path.join( | ||
341 | config.workspace_path, 'ide-sdk', self.name) | ||
342 | if os.path.exists(self.ide_sdk_dir): | ||
343 | shutil.rmtree(self.ide_sdk_dir) | ||
344 | self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts') | ||
345 | |||
346 | self.b = recipe_d.getVar('B') | ||
347 | self.base_libdir = recipe_d.getVar('base_libdir') | ||
348 | self.bblayers = recipe_d.getVar('BBLAYERS').split() | ||
349 | self.bpn = recipe_d.getVar('BPN') | ||
350 | self.cxx = recipe_d.getVar('CXX') | ||
351 | self.d = recipe_d.getVar('D') | ||
352 | self.debug_build = recipe_d.getVar('DEBUG_BUILD') | ||
353 | self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') | ||
354 | self.fakerootenv = recipe_d.getVar('FAKEROOTENV') | ||
355 | self.libdir = recipe_d.getVar('libdir') | ||
356 | self.max_process = int(recipe_d.getVar( | ||
357 | "BB_NUMBER_THREADS") or os.cpu_count() or 1) | ||
358 | self.package_arch = recipe_d.getVar('PACKAGE_ARCH') | ||
359 | self.package_debug_split_style = recipe_d.getVar( | ||
360 | 'PACKAGE_DEBUG_SPLIT_STYLE') | ||
361 | self.path = recipe_d.getVar('PATH') | ||
362 | self.pn = recipe_d.getVar('PN') | ||
363 | self.recipe_sysroot = os.path.realpath( | ||
364 | recipe_d.getVar('RECIPE_SYSROOT')) | ||
365 | self.recipe_sysroot_native = os.path.realpath( | ||
366 | recipe_d.getVar('RECIPE_SYSROOT_NATIVE')) | ||
367 | self.staging_bindir_toolchain = os.path.realpath( | ||
368 | recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN')) | ||
369 | self.staging_incdir = os.path.realpath( | ||
370 | recipe_d.getVar('STAGING_INCDIR')) | ||
371 | self.strip_cmd = recipe_d.getVar('STRIP') | ||
372 | self.target_arch = recipe_d.getVar('TARGET_ARCH') | ||
373 | self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR') | ||
374 | self.topdir = recipe_d.getVar('TOPDIR') | ||
375 | self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR')) | ||
376 | |||
377 | self.__init_exported_variables(recipe_d) | ||
378 | |||
379 | if bb.data.inherits_class('cmake', recipe_d): | ||
380 | self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR') | ||
381 | self.__init_cmake_preset_cache(recipe_d) | ||
382 | self.build_tool = BuildTool.CMAKE | ||
383 | elif bb.data.inherits_class('meson', recipe_d): | ||
384 | self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE') | ||
385 | self.mesonopts = recipe_d.getVar('MESONOPTS') | ||
386 | self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON') | ||
387 | self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE') | ||
388 | self.build_tool = BuildTool.MESON | ||
389 | |||
390 | # Recipe ID is the identifier for IDE config sections | ||
391 | self.recipe_id = self.bpn + "-" + self.package_arch | ||
392 | self.recipe_id_pretty = self.bpn + ": " + self.package_arch | ||
393 | |||
394 | @staticmethod | ||
395 | def is_valid_shell_variable(var): | ||
396 | """Skip strange shell variables like systemd | ||
397 | |||
398 | prevent from strange bugs because of strange variables which | ||
399 | are not used in this context but break various tools. | ||
400 | """ | ||
401 | if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var): | ||
402 | bb.debug(1, "ignoring variable: %s" % var) | ||
403 | return True | ||
404 | return False | ||
405 | |||
406 | def solib_search_path(self, image): | ||
407 | """Search for debug symbols in the rootfs and rootfs-dbg | ||
408 | |||
409 | The debug symbols of shared libraries which are provided by other packages | ||
410 | are grabbed from the -dbg packages in the rootfs-dbg. | ||
411 | |||
412 | But most cross debugging tools like gdb, perf, and systemtap need to find | ||
413 | executable/library first and through it debuglink note find corresponding | ||
414 | symbols file. Therefore the library paths from the rootfs are added as well. | ||
415 | |||
416 | Note: For the devtool modified recipe compiled from the IDE, the debug | ||
417 | symbols are taken from the unstripped binaries in the image folder. | ||
418 | Also, devtool deploy-target takes the files from the image folder. | ||
419 | debug symbols in the image folder refer to the corresponding source files | ||
420 | with absolute paths of the build machine. Debug symbols found in the | ||
421 | rootfs-dbg are relocated and contain paths which refer to the source files | ||
422 | installed on the target device e.g. /usr/src/... | ||
423 | """ | ||
424 | base_libdir = self.base_libdir.lstrip('/') | ||
425 | libdir = self.libdir.lstrip('/') | ||
426 | so_paths = [ | ||
427 | # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug | ||
428 | os.path.join(image.rootfs_dbg, base_libdir, ".debug"), | ||
429 | os.path.join(image.rootfs_dbg, libdir, ".debug"), | ||
430 | # debug symbols for package_debug_split_style: debug-file-directory | ||
431 | os.path.join(image.rootfs_dbg, "usr", "lib", "debug"), | ||
432 | |||
433 | # The binaries are required as well, the debug packages are not enough | ||
434 | # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg | ||
435 | os.path.join(image.rootfs_dbg, base_libdir), | ||
436 | os.path.join(image.rootfs_dbg, libdir), | ||
437 | # Without image-combined-dbg.bbclass the binaries are only in rootfs. | ||
438 | # Note: Stepping into source files located in rootfs-dbg does not | ||
439 | # work without image-combined-dbg.bbclass yet. | ||
440 | os.path.join(image.rootfs, base_libdir), | ||
441 | os.path.join(image.rootfs, libdir) | ||
442 | ] | ||
443 | return so_paths | ||
444 | |||
445 | def solib_search_path_str(self, image): | ||
446 | """Return a : separated list of paths usable by GDB's set solib-search-path""" | ||
447 | return ':'.join(self.solib_search_path(image)) | ||
448 | |||
449 | def __init_exported_variables(self, d): | ||
450 | """Find all variables with export flag set. | ||
451 | |||
452 | This allows to generate IDE configurations which compile with the same | ||
453 | environment as bitbake does. That's at least a reasonable default behavior. | ||
454 | """ | ||
455 | exported_vars = {} | ||
456 | |||
457 | vars = (key for key in d.keys() if not key.startswith( | ||
458 | "__") and not d.getVarFlag(key, "func", False)) | ||
459 | for var in sorted(vars): | ||
460 | func = d.getVarFlag(var, "func", False) | ||
461 | if d.getVarFlag(var, 'python', False) and func: | ||
462 | continue | ||
463 | export = d.getVarFlag(var, "export", False) | ||
464 | unexport = d.getVarFlag(var, "unexport", False) | ||
465 | if not export and not unexport and not func: | ||
466 | continue | ||
467 | if unexport: | ||
468 | continue | ||
469 | |||
470 | val = d.getVar(var) | ||
471 | if val is None: | ||
472 | continue | ||
473 | if set(var) & set("-.{}+"): | ||
474 | logger.warn( | ||
475 | "Warning: Found invalid character in variable name %s", str(var)) | ||
476 | continue | ||
477 | varExpanded = d.expand(var) | ||
478 | val = str(val) | ||
479 | |||
480 | if not RecipeModified.is_valid_shell_variable(varExpanded): | ||
481 | continue | ||
482 | |||
483 | if func: | ||
484 | code_line = "line: {0}, file: {1}\n".format( | ||
485 | d.getVarFlag(var, "lineno", False), | ||
486 | d.getVarFlag(var, "filename", False)) | ||
487 | val = val.rstrip('\n') | ||
488 | logger.warn("Warning: exported shell function %s() is not exported (%s)" % | ||
489 | (varExpanded, code_line)) | ||
490 | continue | ||
491 | |||
492 | if export: | ||
493 | exported_vars[varExpanded] = val.strip() | ||
494 | continue | ||
495 | |||
496 | self.exported_vars = exported_vars | ||
497 | |||
498 | def __init_cmake_preset_cache(self, d): | ||
499 | """Get the arguments passed to cmake | ||
500 | |||
501 | Replicate the cmake configure arguments with all details to | ||
502 | share on build folder between bitbake and SDK. | ||
503 | """ | ||
504 | site_file = os.path.join(self.workdir, 'site-file.cmake') | ||
505 | if os.path.exists(site_file): | ||
506 | print("Warning: site-file.cmake is not supported") | ||
507 | |||
508 | cache_vars = {} | ||
509 | oecmake_args = d.getVar('OECMAKE_ARGS').split() | ||
510 | extra_oecmake = d.getVar('EXTRA_OECMAKE').split() | ||
511 | for param in sorted(oecmake_args + extra_oecmake): | ||
512 | d_pref = "-D" | ||
513 | if param.startswith(d_pref): | ||
514 | param = param[len(d_pref):] | ||
515 | else: | ||
516 | print("Error: expected a -D") | ||
517 | param_s = param.split('=', 1) | ||
518 | param_nt = param_s[0].split(':', 1) | ||
519 | |||
520 | def handle_undefined_variable(var): | ||
521 | if var.startswith('${') and var.endswith('}'): | ||
522 | return '' | ||
523 | else: | ||
524 | return var | ||
525 | # Example: FOO=ON | ||
526 | if len(param_nt) == 1: | ||
527 | cache_vars[param_s[0]] = handle_undefined_variable(param_s[1]) | ||
528 | # Example: FOO:PATH=/tmp | ||
529 | elif len(param_nt) == 2: | ||
530 | cache_vars[param_nt[0]] = { | ||
531 | "type": param_nt[1], | ||
532 | "value": handle_undefined_variable(param_s[1]), | ||
533 | } | ||
534 | else: | ||
535 | print("Error: cannot parse %s" % param) | ||
536 | self.cmake_cache_vars = cache_vars | ||
537 | |||
538 | def cmake_preset(self): | ||
539 | """Create a preset for cmake that mimics how bitbake calls cmake""" | ||
540 | toolchain_file = os.path.join(self.workdir, 'toolchain.cmake') | ||
541 | cmake_executable = os.path.join( | ||
542 | self.recipe_sysroot_native, 'usr', 'bin', 'cmake') | ||
543 | self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id | ||
544 | |||
545 | preset_dict_configure = { | ||
546 | "name": self.recipe_id, | ||
547 | "displayName": self.recipe_id_pretty, | ||
548 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
549 | "binaryDir": self.b, | ||
550 | "generator": self.oecmake_generator, | ||
551 | "toolchainFile": toolchain_file, | ||
552 | "cacheVariables": self.cmake_cache_vars, | ||
553 | "environment": self.exported_vars, | ||
554 | "cmakeExecutable": cmake_executable | ||
555 | } | ||
556 | |||
557 | preset_dict_build = { | ||
558 | "name": self.recipe_id, | ||
559 | "displayName": self.recipe_id_pretty, | ||
560 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
561 | "configurePreset": self.recipe_id, | ||
562 | "inheritConfigureEnvironment": True | ||
563 | } | ||
564 | |||
565 | preset_dict_test = { | ||
566 | "name": self.recipe_id, | ||
567 | "displayName": self.recipe_id_pretty, | ||
568 | "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), | ||
569 | "configurePreset": self.recipe_id, | ||
570 | "inheritConfigureEnvironment": True | ||
571 | } | ||
572 | |||
573 | preset_dict = { | ||
574 | "version": 3, # cmake 3.21, backward compatible with kirkstone | ||
575 | "configurePresets": [preset_dict_configure], | ||
576 | "buildPresets": [preset_dict_build], | ||
577 | "testPresets": [preset_dict_test] | ||
578 | } | ||
579 | |||
580 | # Finally write the json file | ||
581 | json_file = 'CMakeUserPresets.json' | ||
582 | json_path = os.path.join(self.real_srctree, json_file) | ||
583 | logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path)) | ||
584 | if not os.path.exists(self.real_srctree): | ||
585 | os.makedirs(self.real_srctree) | ||
586 | try: | ||
587 | with open(json_path) as f: | ||
588 | orig_dict = json.load(f) | ||
589 | except json.decoder.JSONDecodeError: | ||
590 | logger.info( | ||
591 | "Decoding %s failed. Probably because of comments in the json file" % json_path) | ||
592 | orig_dict = {} | ||
593 | except FileNotFoundError: | ||
594 | orig_dict = {} | ||
595 | |||
596 | # Add or update the presets for the recipe and keep other presets | ||
597 | for k, v in preset_dict.items(): | ||
598 | if isinstance(v, list): | ||
599 | update_preset = v[0] | ||
600 | preset_added = False | ||
601 | if k in orig_dict: | ||
602 | for index, orig_preset in enumerate(orig_dict[k]): | ||
603 | if 'name' in orig_preset: | ||
604 | if orig_preset['name'] == update_preset['name']: | ||
605 | logger.debug("Updating preset: %s" % | ||
606 | orig_preset['name']) | ||
607 | orig_dict[k][index] = update_preset | ||
608 | preset_added = True | ||
609 | break | ||
610 | else: | ||
611 | logger.debug("keeping preset: %s" % | ||
612 | orig_preset['name']) | ||
613 | else: | ||
614 | logger.warn("preset without a name found") | ||
615 | if not preset_added: | ||
616 | if not k in orig_dict: | ||
617 | orig_dict[k] = [] | ||
618 | orig_dict[k].append(update_preset) | ||
619 | logger.debug("Added preset: %s" % | ||
620 | update_preset['name']) | ||
621 | else: | ||
622 | orig_dict[k] = v | ||
623 | |||
624 | with open(json_path, 'w') as f: | ||
625 | json.dump(orig_dict, f, indent=4) | ||
626 | |||
627 | def gen_meson_wrapper(self): | ||
628 | """Generate a wrapper script to call meson with the cross environment""" | ||
629 | bb.utils.mkdirhier(self.ide_sdk_scripts_dir) | ||
630 | meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson') | ||
631 | meson_real = os.path.join( | ||
632 | self.recipe_sysroot_native, 'usr', 'bin', 'meson.real') | ||
633 | with open(meson_wrapper, 'w') as mwrap: | ||
634 | mwrap.write("#!/bin/sh" + os.linesep) | ||
635 | for var, val in self.exported_vars.items(): | ||
636 | mwrap.write('export %s="%s"' % (var, val) + os.linesep) | ||
637 | mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep) | ||
638 | private_temp = os.path.join(self.b, "meson-private", "tmp") | ||
639 | mwrap.write('mkdir -p "%s"' % private_temp + os.linesep) | ||
640 | mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep) | ||
641 | mwrap.write('exec "%s" "$@"' % meson_real + os.linesep) | ||
642 | st = os.stat(meson_wrapper) | ||
643 | os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC) | ||
644 | self.meson_wrapper = meson_wrapper | ||
645 | self.cmd_compile = meson_wrapper + " compile -C " + self.b | ||
646 | |||
647 | def which(self, executable): | ||
648 | bin_path = shutil.which(executable, path=self.path) | ||
649 | if not bin_path: | ||
650 | raise DevtoolError( | ||
651 | 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn)) | ||
652 | return bin_path | ||
653 | |||
654 | @staticmethod | ||
655 | def is_elf_file(file_path): | ||
656 | with open(file_path, "rb") as f: | ||
657 | data = f.read(4) | ||
658 | if data == b'\x7fELF': | ||
659 | return True | ||
660 | return False | ||
661 | |||
662 | def find_installed_binaries(self): | ||
663 | """find all executable elf files in the image directory""" | ||
664 | binaries = [] | ||
665 | d_len = len(self.d) | ||
666 | re_so = re.compile(r'.*\.so[.0-9]*$') | ||
667 | for root, _, files in os.walk(self.d, followlinks=False): | ||
668 | for file in files: | ||
669 | if os.path.islink(file): | ||
670 | continue | ||
671 | if re_so.match(file): | ||
672 | continue | ||
673 | abs_name = os.path.join(root, file) | ||
674 | if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name): | ||
675 | binaries.append(abs_name[d_len:]) | ||
676 | return sorted(binaries) | ||
677 | |||
678 | def gen_deploy_target_script(self, args): | ||
679 | """Generate a script which does what devtool deploy-target does | ||
680 | |||
681 | This script is much quicker than devtool target-deploy. Because it | ||
682 | does not need to start a bitbake server. All information from tinfoil | ||
683 | is hard-coded in the generated script. | ||
684 | """ | ||
685 | cmd_lines = ['#!%s' % str(sys.executable)] | ||
686 | cmd_lines.append('import sys') | ||
687 | cmd_lines.append('devtool_sys_path = %s' % str(sys.path)) | ||
688 | cmd_lines.append('devtool_sys_path.reverse()') | ||
689 | cmd_lines.append('for p in devtool_sys_path:') | ||
690 | cmd_lines.append(' if p not in sys.path:') | ||
691 | cmd_lines.append(' sys.path.insert(0, p)') | ||
692 | cmd_lines.append('from devtool.deploy import deploy_no_d') | ||
693 | args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check', | ||
694 | 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target'] | ||
695 | filtered_args_dict = {key: value for key, value in vars( | ||
696 | args).items() if key in args_filter} | ||
697 | cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict)) | ||
698 | cmd_lines.append('class Dict2Class(object):') | ||
699 | cmd_lines.append(' def __init__(self, my_dict):') | ||
700 | cmd_lines.append(' for key in my_dict:') | ||
701 | cmd_lines.append(' setattr(self, key, my_dict[key])') | ||
702 | cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)') | ||
703 | cmd_lines.append( | ||
704 | 'setattr(filtered_args, "recipename", "%s")' % self.bpn) | ||
705 | cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' % | ||
706 | (self.d, self.workdir, self.path, self.strip_cmd, | ||
707 | self.libdir, self.base_libdir, self.max_process, | ||
708 | self.fakerootcmd, self.fakerootenv)) | ||
709 | return self.write_script(cmd_lines, 'deploy_target') | ||
710 | |||
711 | def gen_install_deploy_script(self, args): | ||
712 | """Generate a script which does install and deploy""" | ||
713 | cmd_lines = ['#!/bin/bash'] | ||
714 | |||
715 | # . oe-init-build-env $BUILDDIR | ||
716 | # Note: Sourcing scripts with arguments requires bash | ||
717 | cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( | ||
718 | self.oe_init_dir, self.oe_init_dir)) | ||
719 | cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % ( | ||
720 | self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir)) | ||
721 | |||
722 | # bitbake -c install | ||
723 | cmd_lines.append( | ||
724 | 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn)) | ||
725 | |||
726 | # Self contained devtool deploy-target | ||
727 | cmd_lines.append(self.gen_deploy_target_script(args)) | ||
728 | |||
729 | return self.write_script(cmd_lines, 'install_and_deploy') | ||
730 | |||
731 | def write_script(self, cmd_lines, script_name): | ||
732 | bb.utils.mkdirhier(self.ide_sdk_scripts_dir) | ||
733 | script_name_arch = script_name + '_' + self.recipe_id | ||
734 | script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch) | ||
735 | with open(script_file, 'w') as script_f: | ||
736 | script_f.write(os.linesep.join(cmd_lines)) | ||
737 | st = os.stat(script_file) | ||
738 | os.chmod(script_file, st.st_mode | stat.S_IEXEC) | ||
739 | return script_file | ||
740 | |||
741 | @property | ||
742 | def oe_init_build_env(self): | ||
743 | """Find the oe-init-build-env used for this setup""" | ||
744 | oe_init_dir = self.oe_init_dir | ||
745 | if oe_init_dir: | ||
746 | return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV) | ||
747 | return None | ||
748 | |||
749 | @property | ||
750 | def oe_init_dir(self): | ||
751 | """Find the directory where the oe-init-build-env is located | ||
752 | |||
753 | Assumption: There might be a layer with higher priority than poky | ||
754 | which provides to oe-init-build-env in the layer's toplevel folder. | ||
755 | """ | ||
756 | if not self.__oe_init_dir: | ||
757 | for layer in reversed(self.bblayers): | ||
758 | result = subprocess.run( | ||
759 | ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True) | ||
760 | if result.returncode == 0: | ||
761 | oe_init_dir = result.stdout.decode('utf-8').strip() | ||
762 | oe_init_path = os.path.join( | ||
763 | oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV) | ||
764 | if os.path.exists(oe_init_path): | ||
765 | logger.debug("Using %s from: %s" % ( | ||
766 | RecipeModified.OE_INIT_BUILD_ENV, oe_init_path)) | ||
767 | self.__oe_init_dir = oe_init_dir | ||
768 | break | ||
769 | if not self.__oe_init_dir: | ||
770 | logger.error("Cannot find the bitbake top level folder") | ||
771 | return self.__oe_init_dir | ||
772 | |||
773 | |||
774 | def ide_setup(args, config, basepath, workspace): | ||
775 | """Generate the IDE configuration for the workspace""" | ||
776 | |||
777 | # Explicitely passing some special recipes does not make sense | ||
778 | for recipe in args.recipenames: | ||
779 | if recipe in ['meta-ide-support', 'build-sysroots']: | ||
780 | raise DevtoolError("Invalid recipe: %s." % recipe) | ||
781 | |||
782 | # Collect information about tasks which need to be bitbaked | ||
783 | bootstrap_tasks = [] | ||
784 | bootstrap_tasks_late = [] | ||
785 | tinfoil = setup_tinfoil(config_only=False, basepath=basepath) | ||
786 | try: | ||
787 | # define mode depending on recipes which need to be processed | ||
788 | recipes_image_names = [] | ||
789 | recipes_modified_names = [] | ||
790 | recipes_other_names = [] | ||
791 | for recipe in args.recipenames: | ||
792 | try: | ||
793 | check_workspace_recipe( | ||
794 | workspace, recipe, bbclassextend=True) | ||
795 | recipes_modified_names.append(recipe) | ||
796 | except DevtoolError: | ||
797 | recipe_d = parse_recipe( | ||
798 | config, tinfoil, recipe, appends=True, filter_workspace=False) | ||
799 | if not recipe_d: | ||
800 | raise DevtoolError("Parsing recipe %s failed" % recipe) | ||
801 | if bb.data.inherits_class('image', recipe_d): | ||
802 | recipes_image_names.append(recipe) | ||
803 | else: | ||
804 | recipes_other_names.append(recipe) | ||
805 | |||
806 | invalid_params = False | ||
807 | if args.mode == DevtoolIdeMode.shared: | ||
808 | if len(recipes_modified_names): | ||
809 | logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str( | ||
810 | recipes_modified_names)) | ||
811 | invalid_params = True | ||
812 | if args.mode == DevtoolIdeMode.modified: | ||
813 | if len(recipes_other_names): | ||
814 | logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str( | ||
815 | recipes_other_names)) | ||
816 | invalid_params = True | ||
817 | if len(recipes_image_names) != 1: | ||
818 | logger.error( | ||
819 | "One image recipe is required as the rootfs for the remote development.") | ||
820 | invalid_params = True | ||
821 | for modified_recipe_name in recipes_modified_names: | ||
822 | if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'): | ||
823 | logger.error( | ||
824 | "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name) | ||
825 | invalid_params = True | ||
826 | |||
827 | if invalid_params: | ||
828 | raise DevtoolError("Invalid parameters are passed.") | ||
829 | |||
830 | # For the shared sysroots mode, add all dependencies of all the images to the sysroots | ||
831 | # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg | ||
832 | recipes_images = [] | ||
833 | for recipes_image_name in recipes_image_names: | ||
834 | logger.info("Using image: %s" % recipes_image_name) | ||
835 | recipe_image = RecipeImage(recipes_image_name) | ||
836 | recipe_image.initialize(config, tinfoil) | ||
837 | bootstrap_tasks += recipe_image.bootstrap_tasks | ||
838 | recipes_images.append(recipe_image) | ||
839 | |||
840 | # Provide a Direct SDK with shared sysroots | ||
841 | recipes_not_modified = [] | ||
842 | if args.mode == DevtoolIdeMode.shared: | ||
843 | ide_support = RecipeMetaIdeSupport() | ||
844 | ide_support.initialize(config, tinfoil) | ||
845 | bootstrap_tasks += ide_support.bootstrap_tasks | ||
846 | |||
847 | logger.info("Adding %s to the Direct SDK sysroots." % | ||
848 | str(recipes_other_names)) | ||
849 | for recipe_name in recipes_other_names: | ||
850 | recipe_not_modified = RecipeNotModified(recipe_name) | ||
851 | bootstrap_tasks += recipe_not_modified.bootstrap_tasks | ||
852 | recipes_not_modified.append(recipe_not_modified) | ||
853 | |||
854 | build_sysroots = RecipeBuildSysroots() | ||
855 | build_sysroots.initialize(config, tinfoil) | ||
856 | bootstrap_tasks_late += build_sysroots.bootstrap_tasks | ||
857 | shared_env = SharedSysrootsEnv() | ||
858 | shared_env.initialize(ide_support, build_sysroots) | ||
859 | |||
860 | recipes_modified = [] | ||
861 | if args.mode == DevtoolIdeMode.modified: | ||
862 | logger.info("Setting up workspaces for modified recipe: %s" % | ||
863 | str(recipes_modified_names)) | ||
864 | gdbs_cross = {} | ||
865 | for recipe_name in recipes_modified_names: | ||
866 | recipe_modified = RecipeModified(recipe_name) | ||
867 | recipe_modified.initialize(config, workspace, tinfoil) | ||
868 | bootstrap_tasks += recipe_modified.bootstrap_tasks | ||
869 | recipes_modified.append(recipe_modified) | ||
870 | |||
871 | if recipe_modified.target_arch not in gdbs_cross: | ||
872 | target_device = TargetDevice(args) | ||
873 | gdb_cross = RecipeGdbCross( | ||
874 | args, recipe_modified.target_arch, target_device) | ||
875 | gdb_cross.initialize(config, workspace, tinfoil) | ||
876 | bootstrap_tasks += gdb_cross.bootstrap_tasks | ||
877 | gdbs_cross[recipe_modified.target_arch] = gdb_cross | ||
878 | recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch] | ||
879 | |||
880 | finally: | ||
881 | tinfoil.shutdown() | ||
882 | |||
883 | if not args.skip_bitbake: | ||
884 | bb_cmd = 'bitbake ' | ||
885 | if args.bitbake_k: | ||
886 | bb_cmd += "-k " | ||
887 | bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks) | ||
888 | exec_build_env_command( | ||
889 | config.init_path, basepath, bb_cmd_early, watch=True) | ||
890 | if bootstrap_tasks_late: | ||
891 | bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late) | ||
892 | exec_build_env_command( | ||
893 | config.init_path, basepath, bb_cmd_late, watch=True) | ||
894 | |||
895 | for recipe_image in recipes_images: | ||
896 | if (recipe_image.gdbserver_missing): | ||
897 | logger.warning( | ||
898 | "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image) | ||
899 | |||
900 | if recipe_image.combine_dbg_image is False: | ||
901 | logger.warning( | ||
902 | 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image) | ||
903 | |||
904 | # Instantiate the active IDE plugin | ||
905 | ide = ide_plugins[args.ide]() | ||
906 | if args.mode == DevtoolIdeMode.shared: | ||
907 | ide.setup_shared_sysroots(shared_env) | ||
908 | elif args.mode == DevtoolIdeMode.modified: | ||
909 | for recipe_modified in recipes_modified: | ||
910 | if recipe_modified.build_tool is BuildTool.CMAKE: | ||
911 | recipe_modified.cmake_preset() | ||
912 | if recipe_modified.build_tool is BuildTool.MESON: | ||
913 | recipe_modified.gen_meson_wrapper() | ||
914 | ide.setup_modified_recipe( | ||
915 | args, recipe_image, recipe_modified) | ||
916 | |||
917 | if recipe_modified.debug_build != '1': | ||
918 | logger.warn( | ||
919 | 'Recipe %s is compiled with release build configuration. ' | ||
920 | 'You might want to add DEBUG_BUILD = "1" to %s. ' | ||
921 | 'Note that devtool modify --debug-build can do this automatically.', | ||
922 | recipe_modified.name, recipe_modified.bbappend) | ||
923 | else: | ||
924 | raise DevtoolError("Must not end up here.") | ||
925 | |||
926 | |||
927 | def register_commands(subparsers, context): | ||
928 | """Register devtool subcommands from this plugin""" | ||
929 | |||
930 | # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE | ||
931 | # configuration is generated. In the case of the eSDK, the bootstrapping is performed | ||
932 | # during the installation of the eSDK installer. Running the ide-sdk plugin from an | ||
933 | # eSDK installer-based setup would require skipping the bootstrapping and probably | ||
934 | # taking some other differences into account when generating the IDE configurations. | ||
935 | # This would be possible. But it is not implemented. | ||
936 | if context.fixed_setup: | ||
937 | return | ||
938 | |||
939 | global ide_plugins | ||
940 | |||
941 | # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. | ||
942 | pluginpaths = [os.path.join(path, 'ide_plugins') | ||
943 | for path in context.pluginpaths] | ||
944 | ide_plugin_modules = [] | ||
945 | for pluginpath in pluginpaths: | ||
946 | scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath) | ||
947 | |||
948 | for ide_plugin_module in ide_plugin_modules: | ||
949 | if hasattr(ide_plugin_module, 'register_ide_plugin'): | ||
950 | ide_plugin_module.register_ide_plugin(ide_plugins) | ||
951 | # Sort plugins according to their priority. The first entry is the default IDE plugin. | ||
952 | ide_plugins = dict(sorted(ide_plugins.items(), | ||
953 | key=lambda p: p[1].ide_plugin_priority(), reverse=True)) | ||
954 | |||
955 | parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter, | ||
956 | help='Setup the SDK and configure the IDE') | ||
957 | parser_ide_sdk.add_argument( | ||
958 | 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' | ||
959 | 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.') | ||
960 | parser_ide_sdk.add_argument( | ||
961 | '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, | ||
962 | help='Different SDK types are supported:\n' | ||
963 | '- "' + DevtoolIdeMode.modified.name + '" (default):\n' | ||
964 | ' devtool modify creates a workspace to work on the source code of a recipe.\n' | ||
965 | ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n' | ||
966 | ' Usage example:\n' | ||
967 | ' devtool modify cmake-example\n' | ||
968 | ' devtool ide-sdk cmake-example core-image-minimal\n' | ||
969 | ' Start the IDE in the workspace folder\n' | ||
970 | ' At least one devtool modified recipe plus one image recipe are required:\n' | ||
971 | ' The image recipe is used to generate the target image and the remote debug configuration.\n' | ||
972 | '- "' + DevtoolIdeMode.shared.name + '":\n' | ||
973 | ' Usage example:\n' | ||
974 | ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n' | ||
975 | ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n' | ||
976 | ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n' | ||
977 | ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit') | ||
978 | default_ide = list(ide_plugins.keys())[0] | ||
979 | parser_ide_sdk.add_argument( | ||
980 | '-i', '--ide', choices=ide_plugins.keys(), default=default_ide, | ||
981 | help='Setup the configuration for this IDE (default: %s)' % default_ide) | ||
982 | parser_ide_sdk.add_argument( | ||
983 | '-t', '--target', default='root@192.168.7.2', | ||
984 | help='Live target machine running an ssh server: user@hostname.') | ||
985 | parser_ide_sdk.add_argument( | ||
986 | '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.') | ||
987 | parser_ide_sdk.add_argument( | ||
988 | '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true') | ||
989 | parser_ide_sdk.add_argument( | ||
990 | '-e', '--ssh-exec', help='Executable to use in place of ssh') | ||
991 | parser_ide_sdk.add_argument( | ||
992 | '-P', '--port', help='Specify ssh port to use for connection to the target') | ||
993 | parser_ide_sdk.add_argument( | ||
994 | '-I', '--key', help='Specify ssh private key for connection to the target') | ||
995 | parser_ide_sdk.add_argument( | ||
996 | '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true') | ||
997 | parser_ide_sdk.add_argument( | ||
998 | '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true') | ||
999 | parser_ide_sdk.add_argument( | ||
1000 | '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false') | ||
1001 | parser_ide_sdk.add_argument( | ||
1002 | '-n', '--dry-run', help='List files to be undeployed only', action='store_true') | ||
1003 | parser_ide_sdk.add_argument( | ||
1004 | '-s', '--show-status', help='Show progress/status output', action='store_true') | ||
1005 | parser_ide_sdk.add_argument( | ||
1006 | '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') | ||
1007 | parser_ide_sdk.add_argument( | ||
1008 | '--no-check-space', help='Do not check for available space before deploying', action='store_true') | ||
1009 | parser_ide_sdk.set_defaults(func=ide_setup) | ||
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py index 95384c5333..1054960551 100644 --- a/scripts/lib/devtool/menuconfig.py +++ b/scripts/lib/devtool/menuconfig.py | |||
@@ -3,6 +3,8 @@ | |||
3 | # Copyright (C) 2018 Xilinx | 3 | # Copyright (C) 2018 Xilinx |
4 | # Written by: Chandana Kalluri <ckalluri@xilinx.com> | 4 | # Written by: Chandana Kalluri <ckalluri@xilinx.com> |
5 | # | 5 | # |
6 | # SPDX-License-Identifier: MIT | ||
7 | # | ||
6 | # This program is free software; you can redistribute it and/or modify | 8 | # This program is free software; you can redistribute it and/or modify |
7 | # it under the terms of the GNU General Public License version 2 as | 9 | # it under the terms of the GNU General Public License version 2 as |
8 | # published by the Free Software Foundation. | 10 | # published by the Free Software Foundation. |
@@ -21,9 +23,6 @@ | |||
21 | import os | 23 | import os |
22 | import bb | 24 | import bb |
23 | import logging | 25 | import logging |
24 | import argparse | ||
25 | import re | ||
26 | import glob | ||
27 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command | 26 | from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command |
28 | from devtool import check_workspace_recipe | 27 | from devtool import check_workspace_recipe |
29 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
@@ -32,7 +31,6 @@ def menuconfig(args, config, basepath, workspace): | |||
32 | """Entry point for the devtool 'menuconfig' subcommand""" | 31 | """Entry point for the devtool 'menuconfig' subcommand""" |
33 | 32 | ||
34 | rd = "" | 33 | rd = "" |
35 | kconfigpath = "" | ||
36 | pn_src = "" | 34 | pn_src = "" |
37 | localfilesdir = "" | 35 | localfilesdir = "" |
38 | workspace_dir = "" | 36 | workspace_dir = "" |
@@ -43,13 +41,12 @@ def menuconfig(args, config, basepath, workspace): | |||
43 | return 1 | 41 | return 1 |
44 | 42 | ||
45 | check_workspace_recipe(workspace, args.component) | 43 | check_workspace_recipe(workspace, args.component) |
46 | pn = rd.getVar('PN', True) | 44 | pn = rd.getVar('PN') |
47 | 45 | ||
48 | if not rd.getVarFlag('do_menuconfig','task'): | 46 | if not rd.getVarFlag('do_menuconfig','task'): |
49 | raise DevtoolError("This recipe does not support menuconfig option") | 47 | raise DevtoolError("This recipe does not support menuconfig option") |
50 | 48 | ||
51 | workspace_dir = os.path.join(config.workspace_path,'sources') | 49 | workspace_dir = os.path.join(config.workspace_path,'sources') |
52 | kconfigpath = rd.getVar('B') | ||
53 | pn_src = os.path.join(workspace_dir,pn) | 50 | pn_src = os.path.join(workspace_dir,pn) |
54 | 51 | ||
55 | # add check to see if oe_local_files exists or not | 52 | # add check to see if oe_local_files exists or not |
@@ -68,7 +65,7 @@ def menuconfig(args, config, basepath, workspace): | |||
68 | logger.info('Launching menuconfig') | 65 | logger.info('Launching menuconfig') |
69 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) | 66 | exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) |
70 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') | 67 | fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') |
71 | res = standard._create_kconfig_diff(pn_src,rd,fragment) | 68 | standard._create_kconfig_diff(pn_src,rd,fragment) |
72 | 69 | ||
73 | return 0 | 70 | return 0 |
74 | 71 | ||
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py index 3aa42a1466..9aefd7e354 100644 --- a/scripts/lib/devtool/sdk.py +++ b/scripts/lib/devtool/sdk.py | |||
@@ -207,7 +207,7 @@ def sdk_update(args, config, basepath, workspace): | |||
207 | if not sstate_mirrors: | 207 | if not sstate_mirrors: |
208 | with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: | 208 | with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: |
209 | f.write('SCONF_VERSION = "%s"\n' % site_conf_version) | 209 | f.write('SCONF_VERSION = "%s"\n' % site_conf_version) |
210 | f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) | 210 | f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver) |
211 | finally: | 211 | finally: |
212 | shutil.rmtree(tmpsdk_dir) | 212 | shutil.rmtree(tmpsdk_dir) |
213 | 213 | ||
@@ -300,7 +300,8 @@ def sdk_install(args, config, basepath, workspace): | |||
300 | return 2 | 300 | return 2 |
301 | 301 | ||
302 | try: | 302 | try: |
303 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True) | 303 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True) |
304 | exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True) | ||
304 | except bb.process.ExecutionError as e: | 305 | except bb.process.ExecutionError as e: |
305 | raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) | 306 | raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) |
306 | 307 | ||
diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py index d24040df37..70b81cac5e 100644 --- a/scripts/lib/devtool/search.py +++ b/scripts/lib/devtool/search.py | |||
@@ -62,10 +62,11 @@ def search(args, config, basepath, workspace): | |||
62 | with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f: | 62 | with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f: |
63 | for line in f: | 63 | for line in f: |
64 | if ': ' in line: | 64 | if ': ' in line: |
65 | splitline = line.split(':', 1) | 65 | splitline = line.split(': ', 1) |
66 | key = splitline[0] | 66 | key = splitline[0] |
67 | value = splitline[1].strip() | 67 | value = splitline[1].strip() |
68 | if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'): | 68 | key = key.replace(":" + pkg, "") |
69 | if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']: | ||
69 | if keyword_rc.search(value): | 70 | if keyword_rc.search(value): |
70 | match = True | 71 | match = True |
71 | break | 72 | break |
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 7b62b7e7b8..1fd5947c41 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -18,11 +18,13 @@ import argparse_oe | |||
18 | import scriptutils | 18 | import scriptutils |
19 | import errno | 19 | import errno |
20 | import glob | 20 | import glob |
21 | import filecmp | ||
22 | from collections import OrderedDict | 21 | from collections import OrderedDict |
22 | |||
23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError | 23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError |
24 | from devtool import parse_recipe | 24 | from devtool import parse_recipe |
25 | 25 | ||
26 | import bb.utils | ||
27 | |||
26 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
27 | 29 | ||
28 | override_branch_prefix = 'devtool-override-' | 30 | override_branch_prefix = 'devtool-override-' |
@@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-' | |||
30 | 32 | ||
31 | def add(args, config, basepath, workspace): | 33 | def add(args, config, basepath, workspace): |
32 | """Entry point for the devtool 'add' subcommand""" | 34 | """Entry point for the devtool 'add' subcommand""" |
33 | import bb | 35 | import bb.data |
36 | import bb.process | ||
34 | import oe.recipeutils | 37 | import oe.recipeutils |
35 | 38 | ||
36 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: | 39 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: |
@@ -147,6 +150,8 @@ def add(args, config, basepath, workspace): | |||
147 | extracmdopts += ' -a' | 150 | extracmdopts += ' -a' |
148 | if args.npm_dev: | 151 | if args.npm_dev: |
149 | extracmdopts += ' --npm-dev' | 152 | extracmdopts += ' --npm-dev' |
153 | if args.no_pypi: | ||
154 | extracmdopts += ' --no-pypi' | ||
150 | if args.mirrors: | 155 | if args.mirrors: |
151 | extracmdopts += ' --mirrors' | 156 | extracmdopts += ' --mirrors' |
152 | if args.srcrev: | 157 | if args.srcrev: |
@@ -204,7 +209,7 @@ def add(args, config, basepath, workspace): | |||
204 | for fn in os.listdir(tempdir): | 209 | for fn in os.listdir(tempdir): |
205 | shutil.move(os.path.join(tempdir, fn), recipedir) | 210 | shutil.move(os.path.join(tempdir, fn), recipedir) |
206 | else: | 211 | else: |
207 | raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) | 212 | raise DevtoolError(f'Failed to create a recipe file for source {source}') |
208 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) | 213 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) |
209 | if os.path.exists(attic_recipe): | 214 | if os.path.exists(attic_recipe): |
210 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) | 215 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) |
@@ -234,10 +239,14 @@ def add(args, config, basepath, workspace): | |||
234 | if args.fetchuri and not args.no_git: | 239 | if args.fetchuri and not args.no_git: |
235 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) | 240 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) |
236 | 241 | ||
237 | initial_rev = None | 242 | initial_rev = {} |
238 | if os.path.exists(os.path.join(srctree, '.git')): | 243 | if os.path.exists(os.path.join(srctree, '.git')): |
239 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 244 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
240 | initial_rev = stdout.rstrip() | 245 | initial_rev["."] = stdout.rstrip() |
246 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree) | ||
247 | for line in stdout.splitlines(): | ||
248 | (rev, submodule) = line.split() | ||
249 | initial_rev[os.path.relpath(submodule, srctree)] = rev | ||
241 | 250 | ||
242 | if args.src_subdir: | 251 | if args.src_subdir: |
243 | srctree = os.path.join(srctree, args.src_subdir) | 252 | srctree = os.path.join(srctree, args.src_subdir) |
@@ -251,16 +260,17 @@ def add(args, config, basepath, workspace): | |||
251 | if b_is_s: | 260 | if b_is_s: |
252 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) | 261 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) |
253 | if initial_rev: | 262 | if initial_rev: |
254 | f.write('\n# initial_rev: %s\n' % initial_rev) | 263 | for key, value in initial_rev.items(): |
264 | f.write('\n# initial_rev %s: %s\n' % (key, value)) | ||
255 | 265 | ||
256 | if args.binary: | 266 | if args.binary: |
257 | f.write('do_install_append() {\n') | 267 | f.write('do_install:append() {\n') |
258 | f.write(' rm -rf ${D}/.git\n') | 268 | f.write(' rm -rf ${D}/.git\n') |
259 | f.write(' rm -f ${D}/singletask.lock\n') | 269 | f.write(' rm -f ${D}/singletask.lock\n') |
260 | f.write('}\n') | 270 | f.write('}\n') |
261 | 271 | ||
262 | if bb.data.inherits_class('npm', rd): | 272 | if bb.data.inherits_class('npm', rd): |
263 | f.write('python do_configure_append() {\n') | 273 | f.write('python do_configure:append() {\n') |
264 | f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n') | 274 | f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n') |
265 | f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n') | 275 | f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n') |
266 | f.write(' bb.utils.remove(lockfile)\n') | 276 | f.write(' bb.utils.remove(lockfile)\n') |
@@ -298,6 +308,7 @@ def add(args, config, basepath, workspace): | |||
298 | 308 | ||
299 | def _check_compatible_recipe(pn, d): | 309 | def _check_compatible_recipe(pn, d): |
300 | """Check if the recipe is supported by devtool""" | 310 | """Check if the recipe is supported by devtool""" |
311 | import bb.data | ||
301 | if pn == 'perf': | 312 | if pn == 'perf': |
302 | raise DevtoolError("The perf recipe does not actually check out " | 313 | raise DevtoolError("The perf recipe does not actually check out " |
303 | "source and thus cannot be supported by this tool", | 314 | "source and thus cannot be supported by this tool", |
@@ -318,10 +329,6 @@ def _check_compatible_recipe(pn, d): | |||
318 | raise DevtoolError("The %s recipe is a packagegroup, and therefore is " | 329 | raise DevtoolError("The %s recipe is a packagegroup, and therefore is " |
319 | "not supported by this tool" % pn, 4) | 330 | "not supported by this tool" % pn, 4) |
320 | 331 | ||
321 | if bb.data.inherits_class('meta', d): | ||
322 | raise DevtoolError("The %s recipe is a meta-recipe, and therefore is " | ||
323 | "not supported by this tool" % pn, 4) | ||
324 | |||
325 | if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): | 332 | if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): |
326 | # Not an incompatibility error per se, so we don't pass the error code | 333 | # Not an incompatibility error per se, so we don't pass the error code |
327 | raise DevtoolError("externalsrc is currently enabled for the %s " | 334 | raise DevtoolError("externalsrc is currently enabled for the %s " |
@@ -357,7 +364,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
357 | bb.utils.mkdirhier(dst_d) | 364 | bb.utils.mkdirhier(dst_d) |
358 | shutil.move(src, dst) | 365 | shutil.move(src, dst) |
359 | 366 | ||
360 | def _copy_file(src, dst, dry_run_outdir=None): | 367 | def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): |
361 | """Copy a file. Creates all the directory components of destination path.""" | 368 | """Copy a file. Creates all the directory components of destination path.""" |
362 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 369 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
363 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) | 370 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) |
@@ -371,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None): | |||
371 | 378 | ||
372 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | 379 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): |
373 | """List contents of a git treeish""" | 380 | """List contents of a git treeish""" |
374 | import bb | 381 | import bb.process |
375 | cmd = ['git', 'ls-tree', '-z', treeish] | 382 | cmd = ['git', 'ls-tree', '-z', treeish] |
376 | if recursive: | 383 | if recursive: |
377 | cmd.append('-r') | 384 | cmd.append('-r') |
@@ -384,6 +391,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | |||
384 | ret[split[3]] = split[0:3] | 391 | ret[split[3]] = split[0:3] |
385 | return ret | 392 | return ret |
386 | 393 | ||
394 | def _git_modified(repodir): | ||
395 | """List the difference between HEAD and the index""" | ||
396 | import bb.process | ||
397 | cmd = ['git', 'status', '--porcelain'] | ||
398 | out, _ = bb.process.run(cmd, cwd=repodir) | ||
399 | ret = [] | ||
400 | if out: | ||
401 | for line in out.split("\n"): | ||
402 | if line and not line.startswith('??'): | ||
403 | ret.append(line[3:]) | ||
404 | return ret | ||
405 | |||
406 | |||
387 | def _git_exclude_path(srctree, path): | 407 | def _git_exclude_path(srctree, path): |
388 | """Return pathspec (list of paths) that excludes certain path""" | 408 | """Return pathspec (list of paths) that excludes certain path""" |
389 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - | 409 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - |
@@ -411,8 +431,6 @@ def _ls_tree(directory): | |||
411 | 431 | ||
412 | def extract(args, config, basepath, workspace): | 432 | def extract(args, config, basepath, workspace): |
413 | """Entry point for the devtool 'extract' subcommand""" | 433 | """Entry point for the devtool 'extract' subcommand""" |
414 | import bb | ||
415 | |||
416 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 434 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
417 | if not tinfoil: | 435 | if not tinfoil: |
418 | # Error already shown | 436 | # Error already shown |
@@ -435,8 +453,6 @@ def extract(args, config, basepath, workspace): | |||
435 | 453 | ||
436 | def sync(args, config, basepath, workspace): | 454 | def sync(args, config, basepath, workspace): |
437 | """Entry point for the devtool 'sync' subcommand""" | 455 | """Entry point for the devtool 'sync' subcommand""" |
438 | import bb | ||
439 | |||
440 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 456 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
441 | if not tinfoil: | 457 | if not tinfoil: |
442 | # Error already shown | 458 | # Error already shown |
@@ -457,41 +473,11 @@ def sync(args, config, basepath, workspace): | |||
457 | finally: | 473 | finally: |
458 | tinfoil.shutdown() | 474 | tinfoil.shutdown() |
459 | 475 | ||
460 | def symlink_oelocal_files_srctree(rd,srctree): | ||
461 | import oe.patch | ||
462 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | ||
463 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | ||
464 | # (otherwise the recipe won't build as expected) | ||
465 | local_files_dir = os.path.join(srctree, 'oe-local-files') | ||
466 | addfiles = [] | ||
467 | for root, _, files in os.walk(local_files_dir): | ||
468 | relpth = os.path.relpath(root, local_files_dir) | ||
469 | if relpth != '.': | ||
470 | bb.utils.mkdirhier(os.path.join(srctree, relpth)) | ||
471 | for fn in files: | ||
472 | if fn == '.gitignore': | ||
473 | continue | ||
474 | destpth = os.path.join(srctree, relpth, fn) | ||
475 | if os.path.exists(destpth): | ||
476 | os.unlink(destpth) | ||
477 | if relpth != '.': | ||
478 | back_relpth = os.path.relpath(local_files_dir, root) | ||
479 | os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) | ||
480 | else: | ||
481 | os.symlink('oe-local-files/%s' % fn, destpth) | ||
482 | addfiles.append(os.path.join(relpth, fn)) | ||
483 | if addfiles: | ||
484 | bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree) | ||
485 | useroptions = [] | ||
486 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | ||
487 | bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree) | ||
488 | |||
489 | |||
490 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 476 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
491 | """Extract sources of a recipe""" | 477 | """Extract sources of a recipe""" |
492 | import oe.recipeutils | ||
493 | import oe.patch | ||
494 | import oe.path | 478 | import oe.path |
479 | import bb.data | ||
480 | import bb.process | ||
495 | 481 | ||
496 | pn = d.getVar('PN') | 482 | pn = d.getVar('PN') |
497 | 483 | ||
@@ -523,8 +509,10 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
523 | history = d.varhistory.variable('SRC_URI') | 509 | history = d.varhistory.variable('SRC_URI') |
524 | for event in history: | 510 | for event in history: |
525 | if not 'flag' in event: | 511 | if not 'flag' in event: |
526 | if event['op'].startswith(('_append[', '_prepend[')): | 512 | if event['op'].startswith((':append[', ':prepend[')): |
527 | extra_overrides.append(event['op'].split('[')[1].split(']')[0]) | 513 | override = event['op'].split('[')[1].split(']')[0] |
514 | if not override.startswith('pn-'): | ||
515 | extra_overrides.append(override) | ||
528 | # We want to remove duplicate overrides. If a recipe had multiple | 516 | # We want to remove duplicate overrides. If a recipe had multiple |
529 | # SRC_URI_override += values it would cause mulitple instances of | 517 | # SRC_URI_override += values it would cause mulitple instances of |
530 | # overrides. This doesn't play nicely with things like creating a | 518 | # overrides. This doesn't play nicely with things like creating a |
@@ -554,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
554 | tempbasedir = d.getVar('WORKDIR') | 542 | tempbasedir = d.getVar('WORKDIR') |
555 | bb.utils.mkdirhier(tempbasedir) | 543 | bb.utils.mkdirhier(tempbasedir) |
556 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) | 544 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) |
545 | appendbackup = None | ||
557 | try: | 546 | try: |
558 | tinfoil.logger.setLevel(logging.WARNING) | 547 | tinfoil.logger.setLevel(logging.WARNING) |
559 | 548 | ||
@@ -564,11 +553,13 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
564 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') | 553 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') |
565 | shutil.copyfile(appendfile, appendbackup) | 554 | shutil.copyfile(appendfile, appendbackup) |
566 | else: | 555 | else: |
567 | appendbackup = None | ||
568 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 556 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
569 | logger.debug('writing append file %s' % appendfile) | 557 | logger.debug('writing append file %s' % appendfile) |
570 | with open(appendfile, 'a') as f: | 558 | with open(appendfile, 'a') as f: |
571 | f.write('###--- _extract_source\n') | 559 | f.write('###--- _extract_source\n') |
560 | f.write('deltask do_recipe_qa\n') | ||
561 | f.write('deltask do_recipe_qa_setscene\n') | ||
562 | f.write('ERROR_QA:remove = "patch-fuzz"\n') | ||
572 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) | 563 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) |
573 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) | 564 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) |
574 | if not is_kernel_yocto: | 565 | if not is_kernel_yocto: |
@@ -586,6 +577,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
586 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') | 577 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') |
587 | with open(preservestampfile, 'w') as f: | 578 | with open(preservestampfile, 'w') as f: |
588 | f.write(d.getVar('STAMP')) | 579 | f.write(d.getVar('STAMP')) |
580 | tinfoil.modified_files() | ||
589 | try: | 581 | try: |
590 | if is_kernel_yocto: | 582 | if is_kernel_yocto: |
591 | # We need to generate the kernel config | 583 | # We need to generate the kernel config |
@@ -633,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
633 | srcsubdir = f.read() | 625 | srcsubdir = f.read() |
634 | except FileNotFoundError as e: | 626 | except FileNotFoundError as e: |
635 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) | 627 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) |
636 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) | 628 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR')))) |
637 | 629 | ||
638 | # Check if work-shared is empty, if yes | 630 | # Check if work-shared is empty, if yes |
639 | # find source and copy to work-shared | 631 | # find source and copy to work-shared |
@@ -648,39 +640,26 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
648 | 640 | ||
649 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): | 641 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): |
650 | shutil.rmtree(workshareddir) | 642 | shutil.rmtree(workshareddir) |
651 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 643 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
652 | elif not os.path.exists(workshareddir): | 644 | elif not os.path.exists(workshareddir): |
653 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 645 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
654 | |||
655 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | ||
656 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | ||
657 | 646 | ||
658 | if sync: | 647 | if sync: |
659 | bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | 648 | try: |
660 | 649 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | |
661 | # Move oe-local-files directory to srctree | 650 | bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree) |
662 | # As the oe-local-files is not part of the constructed git tree, | 651 | |
663 | # remove them directly during the synchrounizating might surprise | 652 | # Use git fetch to update the source with the current recipe |
664 | # the users. Instead, we move it to oe-local-files.bak and remind | 653 | # To be able to update the currently checked out branch with |
665 | # user in the log message. | 654 | # possibly new history (no fast-forward) git needs to be told |
666 | if os.path.exists(srctree_localdir + '.bak'): | 655 | # that's ok |
667 | shutil.rmtree(srctree_localdir, srctree_localdir + '.bak') | 656 | logger.info('Syncing source files including patches to git branch: %s' % devbranch) |
668 | 657 | bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | |
669 | if os.path.exists(srctree_localdir): | 658 | except bb.process.ExecutionError as e: |
670 | logger.info('Backing up current local file directory %s' % srctree_localdir) | 659 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) |
671 | shutil.move(srctree_localdir, srctree_localdir + '.bak') | ||
672 | |||
673 | if os.path.exists(tempdir_localdir): | ||
674 | logger.info('Syncing local source files to srctree...') | ||
675 | shutil.copytree(tempdir_localdir, srctree_localdir) | ||
676 | else: | ||
677 | # Move oe-local-files directory to srctree | ||
678 | if os.path.exists(tempdir_localdir): | ||
679 | logger.info('Adding local source files to srctree...') | ||
680 | shutil.move(tempdir_localdir, srcsubdir) | ||
681 | 660 | ||
661 | else: | ||
682 | shutil.move(srcsubdir, srctree) | 662 | shutil.move(srcsubdir, srctree) |
683 | symlink_oelocal_files_srctree(d,srctree) | ||
684 | 663 | ||
685 | if is_kernel_yocto: | 664 | if is_kernel_yocto: |
686 | logger.info('Copying kernel config to srctree') | 665 | logger.info('Copying kernel config to srctree') |
@@ -699,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
699 | 678 | ||
700 | def _add_md5(config, recipename, filename): | 679 | def _add_md5(config, recipename, filename): |
701 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" | 680 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" |
702 | import bb.utils | ||
703 | |||
704 | def addfile(fn): | 681 | def addfile(fn): |
705 | md5 = bb.utils.md5_file(fn) | 682 | md5 = bb.utils.md5_file(fn) |
706 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: | 683 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: |
@@ -719,7 +696,6 @@ def _add_md5(config, recipename, filename): | |||
719 | def _check_preserve(config, recipename): | 696 | def _check_preserve(config, recipename): |
720 | """Check if a file was manually changed and needs to be saved in 'attic' | 697 | """Check if a file was manually changed and needs to be saved in 'attic' |
721 | directory""" | 698 | directory""" |
722 | import bb.utils | ||
723 | origfile = os.path.join(config.workspace_path, '.devtool_md5') | 699 | origfile = os.path.join(config.workspace_path, '.devtool_md5') |
724 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') | 700 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') |
725 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) | 701 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) |
@@ -746,30 +722,40 @@ def _check_preserve(config, recipename): | |||
746 | os.remove(removefile) | 722 | os.remove(removefile) |
747 | else: | 723 | else: |
748 | tf.write(line) | 724 | tf.write(line) |
749 | os.rename(newfile, origfile) | 725 | bb.utils.rename(newfile, origfile) |
750 | 726 | ||
751 | def get_staging_kver(srcdir): | 727 | def get_staging_kver(srcdir): |
752 | # Kernel version from work-shared | 728 | # Kernel version from work-shared |
753 | kerver = [] | 729 | import itertools |
754 | staging_kerVer="" | 730 | try: |
755 | if os.path.exists(srcdir) and os.listdir(srcdir): | 731 | with open(os.path.join(srcdir, "Makefile")) as f: |
756 | with open(os.path.join(srcdir,"Makefile")) as f: | 732 | # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3 |
757 | version = [next(f) for x in range(5)][1:4] | 733 | return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4)) |
758 | for word in version: | 734 | except FileNotFoundError: |
759 | kerver.append(word.split('= ')[1].split('\n')[0]) | 735 | return "" |
760 | staging_kerVer = ".".join(kerver) | ||
761 | return staging_kerVer | ||
762 | 736 | ||
763 | def get_staging_kbranch(srcdir): | 737 | def get_staging_kbranch(srcdir): |
738 | import bb.process | ||
764 | staging_kbranch = "" | 739 | staging_kbranch = "" |
765 | if os.path.exists(srcdir) and os.listdir(srcdir): | 740 | if os.path.exists(srcdir) and os.listdir(srcdir): |
766 | (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir) | 741 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) |
767 | staging_kbranch = "".join(branch.split('\n')[0]) | 742 | staging_kbranch = "".join(branch.split('\n')[0]) |
768 | return staging_kbranch | 743 | return staging_kbranch |
769 | 744 | ||
745 | def get_real_srctree(srctree, s, unpackdir): | ||
746 | # Check that recipe isn't using a shared workdir | ||
747 | s = os.path.abspath(s) | ||
748 | unpackdir = os.path.abspath(unpackdir) | ||
749 | if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir: | ||
750 | # Handle if S is set to a subdirectory of the source | ||
751 | srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1] | ||
752 | srctree = os.path.join(srctree, srcsubdir) | ||
753 | return srctree | ||
754 | |||
770 | def modify(args, config, basepath, workspace): | 755 | def modify(args, config, basepath, workspace): |
771 | """Entry point for the devtool 'modify' subcommand""" | 756 | """Entry point for the devtool 'modify' subcommand""" |
772 | import bb | 757 | import bb.data |
758 | import bb.process | ||
773 | import oe.recipeutils | 759 | import oe.recipeutils |
774 | import oe.patch | 760 | import oe.patch |
775 | import oe.path | 761 | import oe.path |
@@ -811,8 +797,8 @@ def modify(args, config, basepath, workspace): | |||
811 | 797 | ||
812 | _check_compatible_recipe(pn, rd) | 798 | _check_compatible_recipe(pn, rd) |
813 | 799 | ||
814 | initial_rev = None | 800 | initial_revs = {} |
815 | commits = [] | 801 | commits = {} |
816 | check_commits = False | 802 | check_commits = False |
817 | 803 | ||
818 | if bb.data.inherits_class('kernel-yocto', rd): | 804 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -824,36 +810,22 @@ def modify(args, config, basepath, workspace): | |||
824 | staging_kerVer = get_staging_kver(srcdir) | 810 | staging_kerVer = get_staging_kver(srcdir) |
825 | staging_kbranch = get_staging_kbranch(srcdir) | 811 | staging_kbranch = get_staging_kbranch(srcdir) |
826 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 812 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
827 | oe.path.copyhardlinktree(srcdir,srctree) | 813 | oe.path.copyhardlinktree(srcdir, srctree) |
828 | workdir = rd.getVar('WORKDIR') | 814 | unpackdir = rd.getVar('UNPACKDIR') |
829 | srcsubdir = rd.getVar('S') | 815 | srcsubdir = rd.getVar('S') |
830 | localfilesdir = os.path.join(srctree,'oe-local-files') | ||
831 | # Move local source files into separate subdir | ||
832 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | ||
833 | local_files = oe.recipeutils.get_recipe_local_files(rd) | ||
834 | 816 | ||
835 | for key in local_files.copy(): | 817 | # Add locally copied files to gitignore as we add back to the metadata directly |
836 | if key.endswith('scc'): | 818 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
837 | sccfile = open(local_files[key], 'r') | ||
838 | for l in sccfile: | ||
839 | line = l.split() | ||
840 | if line and line[0] in ('kconf', 'patch'): | ||
841 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
842 | if not cfg in local_files.values(): | ||
843 | local_files[line[-1]] = cfg | ||
844 | shutil.copy2(cfg, workdir) | ||
845 | sccfile.close() | ||
846 | |||
847 | # Ignore local files with subdir={BP} | ||
848 | srcabspath = os.path.abspath(srcsubdir) | 819 | srcabspath = os.path.abspath(srcsubdir) |
849 | local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] | 820 | local_files = [fname for fname in local_files if |
821 | os.path.exists(os.path.join(unpackdir, fname)) and | ||
822 | srcabspath == unpackdir] | ||
850 | if local_files: | 823 | if local_files: |
851 | for fname in local_files: | 824 | with open(os.path.join(srctree, '.gitignore'), 'a+') as f: |
852 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 825 | f.write('# Ignore local files, by default. Remove following lines' |
853 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 826 | 'if you want to commit the directory to Git\n') |
854 | f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n') | 827 | for fname in local_files: |
855 | 828 | f.write('%s\n' % fname) | |
856 | symlink_oelocal_files_srctree(rd,srctree) | ||
857 | 829 | ||
858 | task = 'do_configure' | 830 | task = 'do_configure' |
859 | res = tinfoil.build_targets(pn, task, handle_events=True) | 831 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -861,22 +833,33 @@ def modify(args, config, basepath, workspace): | |||
861 | # Copy .config to workspace | 833 | # Copy .config to workspace |
862 | kconfpath = rd.getVar('B') | 834 | kconfpath = rd.getVar('B') |
863 | logger.info('Copying kernel config to workspace') | 835 | logger.info('Copying kernel config to workspace') |
864 | shutil.copy2(os.path.join(kconfpath, '.config'),srctree) | 836 | shutil.copy2(os.path.join(kconfpath, '.config'), srctree) |
865 | 837 | ||
866 | # Set this to true, we still need to get initial_rev | 838 | # Set this to true, we still need to get initial_rev |
867 | # by parsing the git repo | 839 | # by parsing the git repo |
868 | args.no_extract = True | 840 | args.no_extract = True |
869 | 841 | ||
870 | if not args.no_extract: | 842 | if not args.no_extract: |
871 | initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | 843 | initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) |
872 | if not initial_rev: | 844 | if not initial_revs["."]: |
873 | return 1 | 845 | return 1 |
874 | logger.info('Source tree extracted to %s' % srctree) | 846 | logger.info('Source tree extracted to %s' % srctree) |
847 | |||
875 | if os.path.exists(os.path.join(srctree, '.git')): | 848 | if os.path.exists(os.path.join(srctree, '.git')): |
876 | # Get list of commits since this revision | 849 | # Get list of commits since this revision |
877 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree) | 850 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) |
878 | commits = stdout.split() | 851 | commits["."] = stdout.split() |
879 | check_commits = True | 852 | check_commits = True |
853 | try: | ||
854 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | ||
855 | except bb.process.ExecutionError: | ||
856 | stdout = "" | ||
857 | for line in stdout.splitlines(): | ||
858 | (rev, submodule_path) = line.split() | ||
859 | submodule = os.path.relpath(submodule_path, srctree) | ||
860 | initial_revs[submodule] = rev | ||
861 | (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path) | ||
862 | commits[submodule] = stdout.split() | ||
880 | else: | 863 | else: |
881 | if os.path.exists(os.path.join(srctree, '.git')): | 864 | if os.path.exists(os.path.join(srctree, '.git')): |
882 | # Check if it's a tree previously extracted by us. This is done | 865 | # Check if it's a tree previously extracted by us. This is done |
@@ -893,11 +876,11 @@ def modify(args, config, basepath, workspace): | |||
893 | for line in stdout.splitlines(): | 876 | for line in stdout.splitlines(): |
894 | if line.startswith('*'): | 877 | if line.startswith('*'): |
895 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) | 878 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) |
896 | initial_rev = stdout.rstrip() | 879 | initial_revs["."] = stdout.rstrip() |
897 | if not initial_rev: | 880 | if "." not in initial_revs: |
898 | # Otherwise, just grab the head revision | 881 | # Otherwise, just grab the head revision |
899 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 882 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
900 | initial_rev = stdout.rstrip() | 883 | initial_revs["."] = stdout.rstrip() |
901 | 884 | ||
902 | branch_patches = {} | 885 | branch_patches = {} |
903 | if check_commits: | 886 | if check_commits: |
@@ -914,67 +897,81 @@ def modify(args, config, basepath, workspace): | |||
914 | seen_patches = [] | 897 | seen_patches = [] |
915 | for branch in branches: | 898 | for branch in branches: |
916 | branch_patches[branch] = [] | 899 | branch_patches[branch] = [] |
917 | (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree) | 900 | (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree) |
918 | for line in stdout.splitlines(): | 901 | for sha1 in stdout.splitlines(): |
919 | line = line.strip() | 902 | notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip()) |
920 | if line.startswith(oe.patch.GitApplyTree.patch_line_prefix): | 903 | origpatch = notes.get(oe.patch.GitApplyTree.original_patch) |
921 | origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip() | 904 | if origpatch and origpatch not in seen_patches: |
922 | if not origpatch in seen_patches: | 905 | seen_patches.append(origpatch) |
923 | seen_patches.append(origpatch) | 906 | branch_patches[branch].append(origpatch) |
924 | branch_patches[branch].append(origpatch) | ||
925 | 907 | ||
926 | # Need to grab this here in case the source is within a subdirectory | 908 | # Need to grab this here in case the source is within a subdirectory |
927 | srctreebase = srctree | 909 | srctreebase = srctree |
928 | 910 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) | |
929 | # Check that recipe isn't using a shared workdir | ||
930 | s = os.path.abspath(rd.getVar('S')) | ||
931 | workdir = os.path.abspath(rd.getVar('WORKDIR')) | ||
932 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | ||
933 | # Handle if S is set to a subdirectory of the source | ||
934 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | ||
935 | srctree = os.path.join(srctree, srcsubdir) | ||
936 | 911 | ||
937 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 912 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
938 | with open(appendfile, 'w') as f: | 913 | with open(appendfile, 'w') as f: |
939 | f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n') | 914 | # if not present, add type=git-dependency to the secondary sources |
915 | # (non local files) so they can be extracted correctly when building a recipe after | ||
916 | # doing a devtool modify on it | ||
917 | src_uri = rd.getVar('SRC_URI').split() | ||
918 | src_uri_append = [] | ||
919 | src_uri_remove = [] | ||
920 | |||
921 | # Assume first entry is main source extracted in ${S} so skip it | ||
922 | src_uri = src_uri[1::] | ||
923 | |||
924 | # Add "type=git-dependency" to all non local sources | ||
925 | for url in src_uri: | ||
926 | if not url.startswith('file://') and not 'type=' in url: | ||
927 | src_uri_remove.append(url) | ||
928 | src_uri_append.append('%s;type=git-dependency' % url) | ||
929 | |||
930 | if src_uri_remove: | ||
931 | f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove)) | ||
932 | f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append)) | ||
933 | |||
934 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') | ||
940 | # Local files can be modified/tracked in separate subdir under srctree | 935 | # Local files can be modified/tracked in separate subdir under srctree |
941 | # Mostly useful for packages with S != WORKDIR | 936 | # Mostly useful for packages with S != WORKDIR |
942 | f.write('FILESPATH_prepend := "%s:"\n' % | 937 | f.write('FILESPATH:prepend := "%s:"\n' % |
943 | os.path.join(srctreebase, 'oe-local-files')) | 938 | os.path.join(srctreebase, 'oe-local-files')) |
944 | f.write('# srctreebase: %s\n' % srctreebase) | 939 | f.write('# srctreebase: %s\n' % srctreebase) |
945 | 940 | ||
946 | f.write('\ninherit externalsrc\n') | 941 | f.write('\ninherit externalsrc\n') |
947 | f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n') | 942 | f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n') |
948 | f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) | 943 | f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree)) |
949 | 944 | ||
950 | b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) | 945 | b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) |
951 | if b_is_s: | 946 | if b_is_s: |
952 | f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) | 947 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
953 | 948 | ||
954 | if bb.data.inherits_class('kernel', rd): | 949 | if bb.data.inherits_class('kernel', rd): |
955 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | 950 | f.write('\ndo_kernel_configme:prepend() {\n' |
956 | 'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n') | 951 | ' if [ -e ${S}/.config ]; then\n' |
957 | f.write('\ndo_patch[noexec] = "1"\n') | 952 | ' mv ${S}/.config ${S}/.config.old\n' |
958 | f.write('\ndo_configure_append() {\n' | 953 | ' fi\n' |
959 | ' cp ${B}/.config ${S}/.config.baseline\n' | ||
960 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | ||
961 | '}\n') | 954 | '}\n') |
962 | if rd.getVarFlag('do_menuconfig','task'): | 955 | if rd.getVarFlag('do_menuconfig', 'task'): |
963 | f.write('\ndo_configure_append() {\n' | 956 | f.write('\ndo_configure:append() {\n' |
964 | ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' | 957 | ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n' |
965 | ' cp ${B}/.config ${S}/.config.baseline\n' | 958 | ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n' |
966 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | 959 | ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n' |
967 | ' fi\n' | 960 | ' fi\n' |
968 | '}\n') | 961 | '}\n') |
969 | if initial_rev: | 962 | if initial_revs: |
970 | f.write('\n# initial_rev: %s\n' % initial_rev) | 963 | for name, rev in initial_revs.items(): |
971 | for commit in commits: | 964 | f.write('\n# initial_rev %s: %s\n' % (name, rev)) |
972 | f.write('# commit: %s\n' % commit) | 965 | if name in commits: |
966 | for commit in commits[name]: | ||
967 | f.write('# commit %s: %s\n' % (name, commit)) | ||
973 | if branch_patches: | 968 | if branch_patches: |
974 | for branch in branch_patches: | 969 | for branch in branch_patches: |
975 | if branch == args.branch: | 970 | if branch == args.branch: |
976 | continue | 971 | continue |
977 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) | 972 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) |
973 | if args.debug_build: | ||
974 | f.write('\nDEBUG_BUILD = "1"\n') | ||
978 | 975 | ||
979 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 976 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
980 | 977 | ||
@@ -1019,6 +1016,7 @@ def rename(args, config, basepath, workspace): | |||
1019 | origfnver = '' | 1016 | origfnver = '' |
1020 | 1017 | ||
1021 | recipefilemd5 = None | 1018 | recipefilemd5 = None |
1019 | newrecipefilemd5 = None | ||
1022 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 1020 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
1023 | try: | 1021 | try: |
1024 | rd = parse_recipe(config, tinfoil, args.recipename, True) | 1022 | rd = parse_recipe(config, tinfoil, args.recipename, True) |
@@ -1089,13 +1087,14 @@ def rename(args, config, basepath, workspace): | |||
1089 | 1087 | ||
1090 | # Rename bbappend | 1088 | # Rename bbappend |
1091 | logger.info('Renaming %s to %s' % (append, newappend)) | 1089 | logger.info('Renaming %s to %s' % (append, newappend)) |
1092 | os.rename(append, newappend) | 1090 | bb.utils.rename(append, newappend) |
1093 | # Rename recipe file | 1091 | # Rename recipe file |
1094 | logger.info('Renaming %s to %s' % (recipefile, newfile)) | 1092 | logger.info('Renaming %s to %s' % (recipefile, newfile)) |
1095 | os.rename(recipefile, newfile) | 1093 | bb.utils.rename(recipefile, newfile) |
1096 | 1094 | ||
1097 | # Rename source tree if it's the default path | 1095 | # Rename source tree if it's the default path |
1098 | appendmd5 = None | 1096 | appendmd5 = None |
1097 | newappendmd5 = None | ||
1099 | if not args.no_srctree: | 1098 | if not args.no_srctree: |
1100 | srctree = workspace[args.recipename]['srctree'] | 1099 | srctree = workspace[args.recipename]['srctree'] |
1101 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): | 1100 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): |
@@ -1184,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1184 | """Get initial and update rev of a recipe. These are the start point of the | 1183 | """Get initial and update rev of a recipe. These are the start point of the |
1185 | whole patchset and start point for the patches to be re-generated/updated. | 1184 | whole patchset and start point for the patches to be re-generated/updated. |
1186 | """ | 1185 | """ |
1187 | import bb | 1186 | import bb.process |
1188 | 1187 | ||
1189 | # Get current branch | 1188 | # Get current branch |
1190 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', | 1189 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
@@ -1192,44 +1191,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1192 | branchname = stdout.rstrip() | 1191 | branchname = stdout.rstrip() |
1193 | 1192 | ||
1194 | # Parse initial rev from recipe if not specified | 1193 | # Parse initial rev from recipe if not specified |
1195 | commits = [] | 1194 | commits = {} |
1196 | patches = [] | 1195 | patches = [] |
1196 | initial_revs = {} | ||
1197 | with open(recipe_path, 'r') as f: | 1197 | with open(recipe_path, 'r') as f: |
1198 | for line in f: | 1198 | for line in f: |
1199 | if line.startswith('# initial_rev:'): | 1199 | pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$' |
1200 | if not initial_rev: | 1200 | match = re.search(pattern, line) |
1201 | initial_rev = line.split(':')[-1].strip() | 1201 | if match: |
1202 | elif line.startswith('# commit:') and not force_patch_refresh: | 1202 | name = match.group(1) |
1203 | commits.append(line.split(':')[-1].strip()) | 1203 | rev = match.group(2) |
1204 | elif line.startswith('# patches_%s:' % branchname): | 1204 | if line.startswith('# initial_rev'): |
1205 | patches = line.split(':')[-1].strip().split(',') | 1205 | if not (name == "." and initial_rev): |
1206 | 1206 | initial_revs[name] = rev | |
1207 | update_rev = initial_rev | 1207 | elif line.startswith('# commit') and not force_patch_refresh: |
1208 | changed_revs = None | 1208 | if name not in commits: |
1209 | if initial_rev: | 1209 | commits[name] = [rev] |
1210 | else: | ||
1211 | commits[name].append(rev) | ||
1212 | elif line.startswith('# patches_%s:' % branchname): | ||
1213 | patches = line.split(':')[-1].strip().split(',') | ||
1214 | |||
1215 | update_revs = dict(initial_revs) | ||
1216 | changed_revs = {} | ||
1217 | for name, rev in initial_revs.items(): | ||
1210 | # Find first actually changed revision | 1218 | # Find first actually changed revision |
1211 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % | 1219 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % |
1212 | initial_rev, cwd=srctree) | 1220 | rev, cwd=os.path.join(srctree, name)) |
1213 | newcommits = stdout.split() | 1221 | newcommits = stdout.split() |
1214 | for i in range(min(len(commits), len(newcommits))): | 1222 | if name in commits: |
1215 | if newcommits[i] == commits[i]: | 1223 | for i in range(min(len(commits[name]), len(newcommits))): |
1216 | update_rev = commits[i] | 1224 | if newcommits[i] == commits[name][i]: |
1225 | update_revs[name] = commits[name][i] | ||
1217 | 1226 | ||
1218 | try: | 1227 | try: |
1219 | stdout, _ = bb.process.run('git cherry devtool-patched', | 1228 | stdout, _ = bb.process.run('git cherry devtool-patched', |
1220 | cwd=srctree) | 1229 | cwd=os.path.join(srctree, name)) |
1221 | except bb.process.ExecutionError as err: | 1230 | except bb.process.ExecutionError as err: |
1222 | stdout = None | 1231 | stdout = None |
1223 | 1232 | ||
1224 | if stdout is not None and not force_patch_refresh: | 1233 | if stdout is not None and not force_patch_refresh: |
1225 | changed_revs = [] | ||
1226 | for line in stdout.splitlines(): | 1234 | for line in stdout.splitlines(): |
1227 | if line.startswith('+ '): | 1235 | if line.startswith('+ '): |
1228 | rev = line.split()[1] | 1236 | rev = line.split()[1] |
1229 | if rev in newcommits: | 1237 | if rev in newcommits: |
1230 | changed_revs.append(rev) | 1238 | if name not in changed_revs: |
1239 | changed_revs[name] = [rev] | ||
1240 | else: | ||
1241 | changed_revs[name].append(rev) | ||
1231 | 1242 | ||
1232 | return initial_rev, update_rev, changed_revs, patches | 1243 | return initial_revs, update_revs, changed_revs, patches |
1233 | 1244 | ||
1234 | def _remove_file_entries(srcuri, filelist): | 1245 | def _remove_file_entries(srcuri, filelist): |
1235 | """Remove file:// entries from SRC_URI""" | 1246 | """Remove file:// entries from SRC_URI""" |
@@ -1284,17 +1295,21 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru | |||
1284 | raise | 1295 | raise |
1285 | 1296 | ||
1286 | 1297 | ||
1287 | def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | 1298 | def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): |
1288 | """Export patches from srctree to given location. | 1299 | """Export patches from srctree to given location. |
1289 | Returns three-tuple of dicts: | 1300 | Returns three-tuple of dicts: |
1290 | 1. updated - patches that already exist in SRCURI | 1301 | 1. updated - patches that already exist in SRCURI |
1291 | 2. added - new patches that don't exist in SRCURI | 1302 | 2. added - new patches that don't exist in SRCURI |
1292 | 3 removed - patches that exist in SRCURI but not in exported patches | 1303 | 3 removed - patches that exist in SRCURI but not in exported patches |
1293 | In each dict the key is the 'basepath' of the URI and value is the | 1304 | In each dict the key is the 'basepath' of the URI and value is: |
1294 | absolute path to the existing file in recipe space (if any). | 1305 | - for updated and added dicts, a dict with 2 optionnal keys: |
1306 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1307 | - 'patchdir': the directory in wich the patch should be applied (if any) | ||
1308 | - for removed dict, the absolute path to the existing file in recipe space | ||
1295 | """ | 1309 | """ |
1296 | import oe.recipeutils | 1310 | import oe.recipeutils |
1297 | from oe.patch import GitApplyTree | 1311 | from oe.patch import GitApplyTree |
1312 | import bb.process | ||
1298 | updated = OrderedDict() | 1313 | updated = OrderedDict() |
1299 | added = OrderedDict() | 1314 | added = OrderedDict() |
1300 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') | 1315 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') |
@@ -1305,59 +1320,67 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | |||
1305 | 1320 | ||
1306 | # Generate patches from Git, exclude local files directory | 1321 | # Generate patches from Git, exclude local files directory |
1307 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') | 1322 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') |
1308 | GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec) | 1323 | GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec) |
1309 | 1324 | for dirpath, dirnames, filenames in os.walk(destdir): | |
1310 | new_patches = sorted(os.listdir(destdir)) | 1325 | new_patches = filenames |
1311 | for new_patch in new_patches: | 1326 | reldirpath = os.path.relpath(dirpath, destdir) |
1312 | # Strip numbering from patch names. If it's a git sequence named patch, | 1327 | for new_patch in new_patches: |
1313 | # the numbers might not match up since we are starting from a different | 1328 | # Strip numbering from patch names. If it's a git sequence named patch, |
1314 | # revision This does assume that people are using unique shortlog | 1329 | # the numbers might not match up since we are starting from a different |
1315 | # values, but they ought to be anyway... | 1330 | # revision This does assume that people are using unique shortlog |
1316 | new_basename = seqpatch_re.match(new_patch).group(2) | 1331 | # values, but they ought to be anyway... |
1317 | match_name = None | 1332 | new_basename = seqpatch_re.match(new_patch).group(2) |
1318 | for old_patch in existing_patches: | 1333 | match_name = None |
1319 | old_basename = seqpatch_re.match(old_patch).group(2) | 1334 | old_patch = None |
1320 | old_basename_splitext = os.path.splitext(old_basename) | 1335 | for old_patch in existing_patches: |
1321 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: | 1336 | old_basename = seqpatch_re.match(old_patch).group(2) |
1322 | old_patch_noext = os.path.splitext(old_patch)[0] | 1337 | old_basename_splitext = os.path.splitext(old_basename) |
1323 | match_name = old_patch_noext | 1338 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: |
1324 | break | 1339 | old_patch_noext = os.path.splitext(old_patch)[0] |
1325 | elif new_basename == old_basename: | 1340 | match_name = old_patch_noext |
1326 | match_name = old_patch | 1341 | break |
1327 | break | 1342 | elif new_basename == old_basename: |
1328 | if match_name: | 1343 | match_name = old_patch |
1329 | # Rename patch files | 1344 | break |
1330 | if new_patch != match_name: | 1345 | if match_name: |
1331 | os.rename(os.path.join(destdir, new_patch), | 1346 | # Rename patch files |
1332 | os.path.join(destdir, match_name)) | 1347 | if new_patch != match_name: |
1333 | # Need to pop it off the list now before checking changed_revs | 1348 | bb.utils.rename(os.path.join(destdir, new_patch), |
1334 | oldpath = existing_patches.pop(old_patch) | 1349 | os.path.join(destdir, match_name)) |
1335 | if changed_revs is not None: | 1350 | # Need to pop it off the list now before checking changed_revs |
1336 | # Avoid updating patches that have not actually changed | 1351 | oldpath = existing_patches.pop(old_patch) |
1337 | with open(os.path.join(destdir, match_name), 'r') as f: | 1352 | if changed_revs is not None and dirpath in changed_revs: |
1338 | firstlineitems = f.readline().split() | 1353 | # Avoid updating patches that have not actually changed |
1339 | # Looking for "From <hash>" line | 1354 | with open(os.path.join(dirpath, match_name), 'r') as f: |
1340 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: | 1355 | firstlineitems = f.readline().split() |
1341 | if not firstlineitems[1] in changed_revs: | 1356 | # Looking for "From <hash>" line |
1342 | continue | 1357 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: |
1343 | # Recompress if necessary | 1358 | if not firstlineitems[1] in changed_revs[dirpath]: |
1344 | if oldpath.endswith(('.gz', '.Z')): | 1359 | continue |
1345 | bb.process.run(['gzip', match_name], cwd=destdir) | 1360 | # Recompress if necessary |
1346 | if oldpath.endswith('.gz'): | 1361 | if oldpath.endswith(('.gz', '.Z')): |
1347 | match_name += '.gz' | 1362 | bb.process.run(['gzip', match_name], cwd=destdir) |
1348 | else: | 1363 | if oldpath.endswith('.gz'): |
1349 | match_name += '.Z' | 1364 | match_name += '.gz' |
1350 | elif oldpath.endswith('.bz2'): | 1365 | else: |
1351 | bb.process.run(['bzip2', match_name], cwd=destdir) | 1366 | match_name += '.Z' |
1352 | match_name += '.bz2' | 1367 | elif oldpath.endswith('.bz2'): |
1353 | updated[match_name] = oldpath | 1368 | bb.process.run(['bzip2', match_name], cwd=destdir) |
1354 | else: | 1369 | match_name += '.bz2' |
1355 | added[new_patch] = None | 1370 | updated[match_name] = {'path' : oldpath} |
1371 | if reldirpath != ".": | ||
1372 | updated[match_name]['patchdir'] = reldirpath | ||
1373 | else: | ||
1374 | added[new_patch] = {} | ||
1375 | if reldirpath != ".": | ||
1376 | added[new_patch]['patchdir'] = reldirpath | ||
1377 | |||
1356 | return (updated, added, existing_patches) | 1378 | return (updated, added, existing_patches) |
1357 | 1379 | ||
1358 | 1380 | ||
1359 | def _create_kconfig_diff(srctree, rd, outfile): | 1381 | def _create_kconfig_diff(srctree, rd, outfile): |
1360 | """Create a kconfig fragment""" | 1382 | """Create a kconfig fragment""" |
1383 | import bb.process | ||
1361 | # Only update config fragment if both config files exist | 1384 | # Only update config fragment if both config files exist |
1362 | orig_config = os.path.join(srctree, '.config.baseline') | 1385 | orig_config = os.path.join(srctree, '.config.baseline') |
1363 | new_config = os.path.join(srctree, '.config.new') | 1386 | new_config = os.path.join(srctree, '.config.new') |
@@ -1389,38 +1412,59 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1389 | 1. updated - files that already exist in SRCURI | 1412 | 1. updated - files that already exist in SRCURI |
1390 | 2. added - new files files that don't exist in SRCURI | 1413 | 2. added - new files files that don't exist in SRCURI |
1391 | 3 removed - files that exist in SRCURI but not in exported files | 1414 | 3 removed - files that exist in SRCURI but not in exported files |
1392 | In each dict the key is the 'basepath' of the URI and value is the | 1415 | In each dict the key is the 'basepath' of the URI and value is: |
1393 | absolute path to the existing file in recipe space (if any). | 1416 | - for updated and added dicts, a dict with 1 optionnal key: |
1417 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1418 | - for removed dict, the absolute path to the existing file in recipe space | ||
1394 | """ | 1419 | """ |
1395 | import oe.recipeutils | 1420 | import oe.recipeutils |
1421 | import bb.data | ||
1422 | import bb.process | ||
1396 | 1423 | ||
1397 | # Find out local files (SRC_URI files that exist in the "recipe space"). | 1424 | # Find out local files (SRC_URI files that exist in the "recipe space"). |
1398 | # Local files that reside in srctree are not included in patch generation. | 1425 | # Local files that reside in srctree are not included in patch generation. |
1399 | # Instead they are directly copied over the original source files (in | 1426 | # Instead they are directly copied over the original source files (in |
1400 | # recipe space). | 1427 | # recipe space). |
1401 | existing_files = oe.recipeutils.get_recipe_local_files(rd) | 1428 | existing_files = oe.recipeutils.get_recipe_local_files(rd) |
1429 | |||
1402 | new_set = None | 1430 | new_set = None |
1403 | updated = OrderedDict() | 1431 | updated = OrderedDict() |
1404 | added = OrderedDict() | 1432 | added = OrderedDict() |
1405 | removed = OrderedDict() | 1433 | removed = OrderedDict() |
1406 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1434 | |
1407 | git_files = _git_ls_tree(srctree) | 1435 | # Get current branch and return early with empty lists |
1408 | if 'oe-local-files' in git_files: | 1436 | # if on one of the override branches |
1409 | # If tracked by Git, take the files from srctree HEAD. First get | 1437 | # (local files are provided only for the main branch and processing |
1410 | # the tree object of the directory | 1438 | # them against lists from recipe overrides will result in mismatches |
1411 | tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') | 1439 | # and broken modifications to recipes). |
1412 | tree = git_files['oe-local-files'][2] | 1440 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
1413 | bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, | 1441 | cwd=srctree) |
1414 | env=dict(os.environ, GIT_WORK_TREE=destdir, | 1442 | branchname = stdout.rstrip() |
1415 | GIT_INDEX_FILE=tmp_index)) | 1443 | if branchname.startswith(override_branch_prefix): |
1416 | new_set = list(_git_ls_tree(srctree, tree, True).keys()) | 1444 | return (updated, added, removed) |
1417 | elif os.path.isdir(local_files_dir): | 1445 | |
1418 | # If not tracked by Git, just copy from working copy | 1446 | files = _git_modified(srctree) |
1419 | new_set = _ls_tree(local_files_dir) | 1447 | #if not files: |
1420 | bb.process.run(['cp', '-ax', | 1448 | # files = _ls_tree(srctree) |
1421 | os.path.join(local_files_dir, '.'), destdir]) | 1449 | for f in files: |
1422 | else: | 1450 | fullfile = os.path.join(srctree, f) |
1423 | new_set = [] | 1451 | if os.path.exists(os.path.join(fullfile, ".git")): |
1452 | # submodules handled elsewhere | ||
1453 | continue | ||
1454 | if f not in existing_files: | ||
1455 | added[f] = {} | ||
1456 | if os.path.isdir(os.path.join(srctree, f)): | ||
1457 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1458 | else: | ||
1459 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1460 | elif not os.path.exists(fullfile): | ||
1461 | removed[f] = existing_files[f] | ||
1462 | elif f in existing_files: | ||
1463 | updated[f] = {'path' : existing_files[f]} | ||
1464 | if os.path.isdir(os.path.join(srctree, f)): | ||
1465 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1466 | else: | ||
1467 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1424 | 1468 | ||
1425 | # Special handling for kernel config | 1469 | # Special handling for kernel config |
1426 | if bb.data.inherits_class('kernel-yocto', rd): | 1470 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -1428,17 +1472,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1428 | fragment_path = os.path.join(destdir, fragment_fn) | 1472 | fragment_path = os.path.join(destdir, fragment_fn) |
1429 | if _create_kconfig_diff(srctree, rd, fragment_path): | 1473 | if _create_kconfig_diff(srctree, rd, fragment_path): |
1430 | if os.path.exists(fragment_path): | 1474 | if os.path.exists(fragment_path): |
1431 | if fragment_fn not in new_set: | 1475 | if fragment_fn in removed: |
1432 | new_set.append(fragment_fn) | 1476 | del removed[fragment_fn] |
1433 | # Copy fragment to local-files | 1477 | if fragment_fn not in updated and fragment_fn not in added: |
1434 | if os.path.isdir(local_files_dir): | 1478 | added[fragment_fn] = {} |
1435 | shutil.copy2(fragment_path, local_files_dir) | ||
1436 | else: | 1479 | else: |
1437 | if fragment_fn in new_set: | 1480 | if fragment_fn in updated: |
1438 | new_set.remove(fragment_fn) | 1481 | removed[fragment_fn] = updated[fragment_fn] |
1439 | # Remove fragment from local-files | 1482 | del updated[fragment_fn] |
1440 | if os.path.exists(os.path.join(local_files_dir, fragment_fn)): | ||
1441 | os.unlink(os.path.join(local_files_dir, fragment_fn)) | ||
1442 | 1483 | ||
1443 | # Special handling for cml1, ccmake, etc bbclasses that generated | 1484 | # Special handling for cml1, ccmake, etc bbclasses that generated |
1444 | # configuration fragment files that are consumed as source files | 1485 | # configuration fragment files that are consumed as source files |
@@ -1446,42 +1487,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1446 | if bb.data.inherits_class(frag_class, rd): | 1487 | if bb.data.inherits_class(frag_class, rd): |
1447 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) | 1488 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) |
1448 | if os.path.exists(srcpath): | 1489 | if os.path.exists(srcpath): |
1449 | if frag_name not in new_set: | 1490 | if frag_name in removed: |
1450 | new_set.append(frag_name) | 1491 | del removed[frag_name] |
1492 | if frag_name not in updated: | ||
1493 | added[frag_name] = {} | ||
1451 | # copy fragment into destdir | 1494 | # copy fragment into destdir |
1452 | shutil.copy2(srcpath, destdir) | 1495 | shutil.copy2(srcpath, destdir) |
1453 | # copy fragment into local files if exists | 1496 | |
1454 | if os.path.isdir(local_files_dir): | ||
1455 | shutil.copy2(srcpath, local_files_dir) | ||
1456 | |||
1457 | if new_set is not None: | ||
1458 | for fname in new_set: | ||
1459 | if fname in existing_files: | ||
1460 | origpath = existing_files.pop(fname) | ||
1461 | workpath = os.path.join(local_files_dir, fname) | ||
1462 | if not filecmp.cmp(origpath, workpath): | ||
1463 | updated[fname] = origpath | ||
1464 | elif fname != '.gitignore': | ||
1465 | added[fname] = None | ||
1466 | |||
1467 | workdir = rd.getVar('WORKDIR') | ||
1468 | s = rd.getVar('S') | ||
1469 | if not s.endswith(os.sep): | ||
1470 | s += os.sep | ||
1471 | |||
1472 | if workdir != s: | ||
1473 | # Handle files where subdir= was specified | ||
1474 | for fname in list(existing_files.keys()): | ||
1475 | # FIXME handle both subdir starting with BP and not? | ||
1476 | fworkpath = os.path.join(workdir, fname) | ||
1477 | if fworkpath.startswith(s): | ||
1478 | fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) | ||
1479 | if os.path.exists(fpath): | ||
1480 | origpath = existing_files.pop(fname) | ||
1481 | if not filecmp.cmp(origpath, fpath): | ||
1482 | updated[fpath] = origpath | ||
1483 | |||
1484 | removed = existing_files | ||
1485 | return (updated, added, removed) | 1497 | return (updated, added, removed) |
1486 | 1498 | ||
1487 | 1499 | ||
@@ -1499,7 +1511,7 @@ def _determine_files_dir(rd): | |||
1499 | 1511 | ||
1500 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): | 1512 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): |
1501 | """Implement the 'srcrev' mode of update-recipe""" | 1513 | """Implement the 'srcrev' mode of update-recipe""" |
1502 | import bb | 1514 | import bb.process |
1503 | import oe.recipeutils | 1515 | import oe.recipeutils |
1504 | 1516 | ||
1505 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 1517 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
@@ -1508,6 +1520,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1508 | recipedir = os.path.basename(recipefile) | 1520 | recipedir = os.path.basename(recipefile) |
1509 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) | 1521 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) |
1510 | 1522 | ||
1523 | # Get original SRCREV | ||
1524 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1525 | if old_srcrev == "INVALID": | ||
1526 | raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository') | ||
1527 | old_srcrev = {'.': old_srcrev} | ||
1528 | |||
1511 | # Get HEAD revision | 1529 | # Get HEAD revision |
1512 | try: | 1530 | try: |
1513 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) | 1531 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) |
@@ -1531,16 +1549,16 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1531 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1549 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1532 | srctreebase = workspace[recipename]['srctreebase'] | 1550 | srctreebase = workspace[recipename]['srctreebase'] |
1533 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | 1551 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1552 | removedentries = {} | ||
1534 | if not no_remove: | 1553 | if not no_remove: |
1535 | # Find list of existing patches in recipe file | 1554 | # Find list of existing patches in recipe file |
1536 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1555 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1537 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1538 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, | 1556 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, |
1539 | patches_dir) | 1557 | patches_dir) |
1540 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) | 1558 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) |
1541 | 1559 | ||
1542 | # Remove deleted local files and "overlapping" patches | 1560 | # Remove deleted local files and "overlapping" patches |
1543 | remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values()) | 1561 | remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value] |
1544 | if remove_files: | 1562 | if remove_files: |
1545 | removedentries = _remove_file_entries(srcuri, remove_files)[0] | 1563 | removedentries = _remove_file_entries(srcuri, remove_files)[0] |
1546 | update_srcuri = True | 1564 | update_srcuri = True |
@@ -1554,14 +1572,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1554 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) | 1572 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) |
1555 | if dry_run_outdir: | 1573 | if dry_run_outdir: |
1556 | logger.info('Creating bbappend (dry-run)') | 1574 | logger.info('Creating bbappend (dry-run)') |
1557 | else: | 1575 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1558 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1576 | rd, appendlayerdir, files, wildcardver=wildcard_version, |
1559 | rd, appendlayerdir, files, wildcardver=wildcard_version, | 1577 | extralines=patchfields, removevalues=removevalues, |
1560 | extralines=patchfields, removevalues=removevalues, | 1578 | redirect_output=dry_run_outdir) |
1561 | redirect_output=dry_run_outdir) | ||
1562 | else: | 1579 | else: |
1563 | files_dir = _determine_files_dir(rd) | 1580 | files_dir = _determine_files_dir(rd) |
1564 | for basepath, path in upd_f.items(): | 1581 | for basepath, param in upd_f.items(): |
1582 | path = param['path'] | ||
1565 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) | 1583 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) |
1566 | if os.path.isabs(basepath): | 1584 | if os.path.isabs(basepath): |
1567 | # Original file (probably with subdir pointing inside source tree) | 1585 | # Original file (probably with subdir pointing inside source tree) |
@@ -1571,7 +1589,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1571 | _move_file(os.path.join(local_files_dir, basepath), path, | 1589 | _move_file(os.path.join(local_files_dir, basepath), path, |
1572 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1590 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1573 | update_srcuri= True | 1591 | update_srcuri= True |
1574 | for basepath, path in new_f.items(): | 1592 | for basepath, param in new_f.items(): |
1593 | path = param['path'] | ||
1575 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1594 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1576 | _move_file(os.path.join(local_files_dir, basepath), | 1595 | _move_file(os.path.join(local_files_dir, basepath), |
1577 | os.path.join(files_dir, basepath), | 1596 | os.path.join(files_dir, basepath), |
@@ -1594,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1594 | 1613 | ||
1595 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): | 1614 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): |
1596 | """Implement the 'patch' mode of update-recipe""" | 1615 | """Implement the 'patch' mode of update-recipe""" |
1597 | import bb | ||
1598 | import oe.recipeutils | 1616 | import oe.recipeutils |
1599 | 1617 | ||
1600 | recipefile = rd.getVar('FILE') | 1618 | recipefile = rd.getVar('FILE') |
@@ -1603,9 +1621,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1603 | if not os.path.exists(append): | 1621 | if not os.path.exists(append): |
1604 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % | 1622 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % |
1605 | recipename) | 1623 | recipename) |
1624 | srctreebase = workspace[recipename]['srctreebase'] | ||
1625 | relpatchdir = os.path.relpath(srctreebase, srctree) | ||
1626 | if relpatchdir == '.': | ||
1627 | patchdir_params = {} | ||
1628 | else: | ||
1629 | patchdir_params = {'patchdir': relpatchdir} | ||
1630 | |||
1631 | def srcuri_entry(basepath, patchdir_params): | ||
1632 | if patchdir_params: | ||
1633 | paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items()) | ||
1634 | else: | ||
1635 | paramstr = '' | ||
1636 | return 'file://%s%s' % (basepath, paramstr) | ||
1606 | 1637 | ||
1607 | initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) | 1638 | initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) |
1608 | if not initial_rev: | 1639 | if not initial_revs: |
1609 | raise DevtoolError('Unable to find initial revision - please specify ' | 1640 | raise DevtoolError('Unable to find initial revision - please specify ' |
1610 | 'it with --initial-rev') | 1641 | 'it with --initial-rev') |
1611 | 1642 | ||
@@ -1619,61 +1650,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1619 | tempdir = tempfile.mkdtemp(prefix='devtool') | 1650 | tempdir = tempfile.mkdtemp(prefix='devtool') |
1620 | try: | 1651 | try: |
1621 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1652 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1622 | if filter_patches: | 1653 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1623 | upd_f = {} | ||
1624 | new_f = {} | ||
1625 | del_f = {} | ||
1626 | else: | ||
1627 | srctreebase = workspace[recipename]['srctreebase'] | ||
1628 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | ||
1629 | |||
1630 | remove_files = [] | ||
1631 | if not no_remove: | ||
1632 | # Get all patches from source tree and check if any should be removed | ||
1633 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1634 | _, _, del_p = _export_patches(srctree, rd, initial_rev, | ||
1635 | all_patches_dir) | ||
1636 | # Remove deleted local files and patches | ||
1637 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1638 | 1654 | ||
1639 | # Get updated patches from source tree | 1655 | # Get updated patches from source tree |
1640 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1656 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1641 | upd_p, new_p, _ = _export_patches(srctree, rd, update_rev, | 1657 | upd_p, new_p, _ = _export_patches(srctree, rd, update_revs, |
1642 | patches_dir, changed_revs) | 1658 | patches_dir, changed_revs) |
1659 | # Get all patches from source tree and check if any should be removed | ||
1660 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1661 | _, _, del_p = _export_patches(srctree, rd, initial_revs, | ||
1662 | all_patches_dir) | ||
1643 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) | 1663 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) |
1644 | if filter_patches: | 1664 | if filter_patches: |
1645 | new_p = OrderedDict() | 1665 | new_p = OrderedDict() |
1646 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) | 1666 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) |
1647 | remove_files = [f for f in remove_files if f in filter_patches] | 1667 | del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches) |
1668 | remove_files = [] | ||
1669 | if not no_remove: | ||
1670 | # Remove deleted local files and patches | ||
1671 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1648 | updatefiles = False | 1672 | updatefiles = False |
1649 | updaterecipe = False | 1673 | updaterecipe = False |
1650 | destpath = None | 1674 | destpath = None |
1651 | srcuri = (rd.getVar('SRC_URI', False) or '').split() | 1675 | srcuri = (rd.getVar('SRC_URI', False) or '').split() |
1676 | |||
1652 | if appendlayerdir: | 1677 | if appendlayerdir: |
1653 | files = OrderedDict((os.path.join(local_files_dir, key), val) for | 1678 | files = OrderedDict((os.path.join(local_files_dir, key), val) for |
1654 | key, val in list(upd_f.items()) + list(new_f.items())) | 1679 | key, val in list(upd_f.items()) + list(new_f.items())) |
1655 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for | 1680 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for |
1656 | key, val in list(upd_p.items()) + list(new_p.items()))) | 1681 | key, val in list(upd_p.items()) + list(new_p.items()))) |
1682 | |||
1683 | params = [] | ||
1684 | for file, param in files.items(): | ||
1685 | patchdir_param = dict(patchdir_params) | ||
1686 | patchdir = param.get('patchdir', ".") | ||
1687 | if patchdir != "." : | ||
1688 | if patchdir_param: | ||
1689 | patchdir_param['patchdir'] += patchdir | ||
1690 | else: | ||
1691 | patchdir_param['patchdir'] = patchdir | ||
1692 | params.append(patchdir_param) | ||
1693 | |||
1657 | if files or remove_files: | 1694 | if files or remove_files: |
1658 | removevalues = None | 1695 | removevalues = None |
1659 | if remove_files: | 1696 | if remove_files: |
1660 | removedentries, remaining = _remove_file_entries( | 1697 | removedentries, remaining = _remove_file_entries( |
1661 | srcuri, remove_files) | 1698 | srcuri, remove_files) |
1662 | if removedentries or remaining: | 1699 | if removedentries or remaining: |
1663 | remaining = ['file://' + os.path.basename(item) for | 1700 | remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for |
1664 | item in remaining] | 1701 | item in remaining] |
1665 | removevalues = {'SRC_URI': removedentries + remaining} | 1702 | removevalues = {'SRC_URI': removedentries + remaining} |
1666 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1703 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1667 | rd, appendlayerdir, files, | 1704 | rd, appendlayerdir, files, |
1668 | wildcardver=wildcard_version, | 1705 | wildcardver=wildcard_version, |
1669 | removevalues=removevalues, | 1706 | removevalues=removevalues, |
1670 | redirect_output=dry_run_outdir) | 1707 | redirect_output=dry_run_outdir, |
1708 | params=params) | ||
1671 | else: | 1709 | else: |
1672 | logger.info('No patches or local source files needed updating') | 1710 | logger.info('No patches or local source files needed updating') |
1673 | else: | 1711 | else: |
1674 | # Update existing files | 1712 | # Update existing files |
1675 | files_dir = _determine_files_dir(rd) | 1713 | files_dir = _determine_files_dir(rd) |
1676 | for basepath, path in upd_f.items(): | 1714 | for basepath, param in upd_f.items(): |
1715 | path = param['path'] | ||
1677 | logger.info('Updating file %s' % basepath) | 1716 | logger.info('Updating file %s' % basepath) |
1678 | if os.path.isabs(basepath): | 1717 | if os.path.isabs(basepath): |
1679 | # Original file (probably with subdir pointing inside source tree) | 1718 | # Original file (probably with subdir pointing inside source tree) |
@@ -1684,14 +1723,23 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1684 | _move_file(os.path.join(local_files_dir, basepath), path, | 1723 | _move_file(os.path.join(local_files_dir, basepath), path, |
1685 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1724 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1686 | updatefiles = True | 1725 | updatefiles = True |
1687 | for basepath, path in upd_p.items(): | 1726 | for basepath, param in upd_p.items(): |
1688 | patchfn = os.path.join(patches_dir, basepath) | 1727 | path = param['path'] |
1728 | patchdir = param.get('patchdir', ".") | ||
1729 | patchdir_param = {} | ||
1730 | if patchdir != "." : | ||
1731 | patchdir_param = dict(patchdir_params) | ||
1732 | if patchdir_param: | ||
1733 | patchdir_param['patchdir'] += patchdir | ||
1734 | else: | ||
1735 | patchdir_param['patchdir'] = patchdir | ||
1736 | patchfn = os.path.join(patches_dir, patchdir, basepath) | ||
1689 | if os.path.dirname(path) + '/' == dl_dir: | 1737 | if os.path.dirname(path) + '/' == dl_dir: |
1690 | # This is a a downloaded patch file - we now need to | 1738 | # This is a a downloaded patch file - we now need to |
1691 | # replace the entry in SRC_URI with our local version | 1739 | # replace the entry in SRC_URI with our local version |
1692 | logger.info('Replacing remote patch %s with updated local version' % basepath) | 1740 | logger.info('Replacing remote patch %s with updated local version' % basepath) |
1693 | path = os.path.join(files_dir, basepath) | 1741 | path = os.path.join(files_dir, basepath) |
1694 | _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) | 1742 | _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param)) |
1695 | updaterecipe = True | 1743 | updaterecipe = True |
1696 | else: | 1744 | else: |
1697 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) | 1745 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) |
@@ -1699,21 +1747,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1699 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1747 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1700 | updatefiles = True | 1748 | updatefiles = True |
1701 | # Add any new files | 1749 | # Add any new files |
1702 | for basepath, path in new_f.items(): | 1750 | for basepath, param in new_f.items(): |
1703 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1751 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1704 | _move_file(os.path.join(local_files_dir, basepath), | 1752 | _move_file(os.path.join(local_files_dir, basepath), |
1705 | os.path.join(files_dir, basepath), | 1753 | os.path.join(files_dir, basepath), |
1706 | dry_run_outdir=dry_run_outdir, | 1754 | dry_run_outdir=dry_run_outdir, |
1707 | base_outdir=recipedir) | 1755 | base_outdir=recipedir) |
1708 | srcuri.append('file://%s' % basepath) | 1756 | srcuri.append(srcuri_entry(basepath, patchdir_params)) |
1709 | updaterecipe = True | 1757 | updaterecipe = True |
1710 | for basepath, path in new_p.items(): | 1758 | for basepath, param in new_p.items(): |
1759 | patchdir = param.get('patchdir', ".") | ||
1711 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) | 1760 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) |
1712 | _move_file(os.path.join(patches_dir, basepath), | 1761 | _move_file(os.path.join(patches_dir, patchdir, basepath), |
1713 | os.path.join(files_dir, basepath), | 1762 | os.path.join(files_dir, basepath), |
1714 | dry_run_outdir=dry_run_outdir, | 1763 | dry_run_outdir=dry_run_outdir, |
1715 | base_outdir=recipedir) | 1764 | base_outdir=recipedir) |
1716 | srcuri.append('file://%s' % basepath) | 1765 | params = dict(patchdir_params) |
1766 | if patchdir != "." : | ||
1767 | if params: | ||
1768 | params['patchdir'] += patchdir | ||
1769 | else: | ||
1770 | params['patchdir'] = patchdir | ||
1771 | |||
1772 | srcuri.append(srcuri_entry(basepath, params)) | ||
1717 | updaterecipe = True | 1773 | updaterecipe = True |
1718 | # Update recipe, if needed | 1774 | # Update recipe, if needed |
1719 | if _remove_file_entries(srcuri, remove_files)[0]: | 1775 | if _remove_file_entries(srcuri, remove_files)[0]: |
@@ -1736,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1736 | 1792 | ||
1737 | def _guess_recipe_update_mode(srctree, rdata): | 1793 | def _guess_recipe_update_mode(srctree, rdata): |
1738 | """Guess the recipe update mode to use""" | 1794 | """Guess the recipe update mode to use""" |
1795 | import bb.process | ||
1739 | src_uri = (rdata.getVar('SRC_URI') or '').split() | 1796 | src_uri = (rdata.getVar('SRC_URI') or '').split() |
1740 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] | 1797 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] |
1741 | if not git_uris: | 1798 | if not git_uris: |
@@ -1757,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata): | |||
1757 | return 'patch' | 1814 | return 'patch' |
1758 | 1815 | ||
1759 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): | 1816 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): |
1817 | import bb.data | ||
1818 | import bb.process | ||
1760 | srctree = workspace[recipename]['srctree'] | 1819 | srctree = workspace[recipename]['srctree'] |
1761 | if mode == 'auto': | 1820 | if mode == 'auto': |
1762 | mode = _guess_recipe_update_mode(srctree, rd) | 1821 | mode = _guess_recipe_update_mode(srctree, rd) |
@@ -1770,6 +1829,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver | |||
1770 | for line in stdout.splitlines(): | 1829 | for line in stdout.splitlines(): |
1771 | branchname = line[2:] | 1830 | branchname = line[2:] |
1772 | if line.startswith('* '): | 1831 | if line.startswith('* '): |
1832 | if 'HEAD' in line: | ||
1833 | raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"') | ||
1773 | startbranch = branchname | 1834 | startbranch = branchname |
1774 | if branchname.startswith(override_branch_prefix): | 1835 | if branchname.startswith(override_branch_prefix): |
1775 | override_branches.append(branchname) | 1836 | override_branches.append(branchname) |
@@ -1877,6 +1938,7 @@ def status(args, config, basepath, workspace): | |||
1877 | 1938 | ||
1878 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | 1939 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): |
1879 | """Reset one or more recipes""" | 1940 | """Reset one or more recipes""" |
1941 | import bb.process | ||
1880 | import oe.path | 1942 | import oe.path |
1881 | 1943 | ||
1882 | def clean_preferred_provider(pn, layerconf_path): | 1944 | def clean_preferred_provider(pn, layerconf_path): |
@@ -1889,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1889 | lines = f.readlines() | 1951 | lines = f.readlines() |
1890 | with open(new_layerconf_file, 'a') as nf: | 1952 | with open(new_layerconf_file, 'a') as nf: |
1891 | for line in lines: | 1953 | for line in lines: |
1892 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' | 1954 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$' |
1893 | if not re.match(pprovider_exp, line): | 1955 | if not re.match(pprovider_exp, line): |
1894 | nf.write(line) | 1956 | nf.write(line) |
1895 | else: | 1957 | else: |
@@ -1959,9 +2021,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1959 | shutil.rmtree(srctreebase) | 2021 | shutil.rmtree(srctreebase) |
1960 | else: | 2022 | else: |
1961 | # We don't want to risk wiping out any work in progress | 2023 | # We don't want to risk wiping out any work in progress |
1962 | logger.info('Leaving source tree %s as-is; if you no ' | 2024 | if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')): |
1963 | 'longer need it then please delete it manually' | 2025 | from datetime import datetime |
1964 | % srctreebase) | 2026 | preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S"))) |
2027 | logger.info('Preserving source tree in %s\nIf you no ' | ||
2028 | 'longer need it then please delete it manually.\n' | ||
2029 | 'It is also possible to reuse it via devtool source tree argument.' | ||
2030 | % preservesrc) | ||
2031 | bb.utils.mkdirhier(os.path.dirname(preservesrc)) | ||
2032 | shutil.move(srctreebase, preservesrc) | ||
2033 | else: | ||
2034 | logger.info('Leaving source tree %s as-is; if you no ' | ||
2035 | 'longer need it then please delete it manually' | ||
2036 | % srctreebase) | ||
1965 | else: | 2037 | else: |
1966 | # This is unlikely, but if it's empty we can just remove it | 2038 | # This is unlikely, but if it's empty we can just remove it |
1967 | os.rmdir(srctreebase) | 2039 | os.rmdir(srctreebase) |
@@ -1970,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1970 | 2042 | ||
1971 | def reset(args, config, basepath, workspace): | 2043 | def reset(args, config, basepath, workspace): |
1972 | """Entry point for the devtool 'reset' subcommand""" | 2044 | """Entry point for the devtool 'reset' subcommand""" |
1973 | import bb | ||
1974 | import shutil | ||
1975 | 2045 | ||
1976 | recipes = "" | 2046 | recipes = "" |
1977 | 2047 | ||
@@ -2221,6 +2291,7 @@ def register_commands(subparsers, context): | |||
2221 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") | 2291 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") |
2222 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') | 2292 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') |
2223 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") | 2293 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") |
2294 | parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true") | ||
2224 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') | 2295 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') |
2225 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") | 2296 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") |
2226 | group = parser_add.add_mutually_exclusive_group() | 2297 | group = parser_add.add_mutually_exclusive_group() |
@@ -2249,6 +2320,7 @@ def register_commands(subparsers, context): | |||
2249 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') | 2320 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') |
2250 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') | 2321 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') |
2251 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") | 2322 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") |
2323 | parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe') | ||
2252 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) | 2324 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) |
2253 | 2325 | ||
2254 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', | 2326 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', |
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index 5a057e95f5..d9aca6e2db 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -32,9 +32,11 @@ def _run(cmd, cwd=''): | |||
32 | 32 | ||
33 | def _get_srctree(tmpdir): | 33 | def _get_srctree(tmpdir): |
34 | srctree = tmpdir | 34 | srctree = tmpdir |
35 | dirs = scriptutils.filter_src_subdirs(tmpdir) | 35 | dirs = os.listdir(tmpdir) |
36 | if len(dirs) == 1: | 36 | if len(dirs) == 1: |
37 | srctree = os.path.join(tmpdir, dirs[0]) | 37 | srctree = os.path.join(tmpdir, dirs[0]) |
38 | else: | ||
39 | raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs)) | ||
38 | return srctree | 40 | return srctree |
39 | 41 | ||
40 | def _copy_source_code(orig, dest): | 42 | def _copy_source_code(orig, dest): |
@@ -71,23 +73,24 @@ def _rename_recipe_dirs(oldpv, newpv, path): | |||
71 | if oldfile.find(oldpv) != -1: | 73 | if oldfile.find(oldpv) != -1: |
72 | newfile = oldfile.replace(oldpv, newpv) | 74 | newfile = oldfile.replace(oldpv, newpv) |
73 | if oldfile != newfile: | 75 | if oldfile != newfile: |
74 | os.rename(os.path.join(path, oldfile), os.path.join(path, newfile)) | 76 | bb.utils.rename(os.path.join(path, oldfile), |
77 | os.path.join(path, newfile)) | ||
75 | 78 | ||
76 | def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): | 79 | def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path): |
77 | oldrecipe = os.path.basename(oldrecipe) | 80 | oldrecipe = os.path.basename(oldrecipe) |
78 | if oldrecipe.endswith('_%s.bb' % oldpv): | 81 | if oldrecipe.endswith('_%s.bb' % oldpv): |
79 | newrecipe = '%s_%s.bb' % (bpn, newpv) | 82 | newrecipe = '%s_%s.bb' % (pn, newpv) |
80 | if oldrecipe != newrecipe: | 83 | if oldrecipe != newrecipe: |
81 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) | 84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) |
82 | else: | 85 | else: |
83 | newrecipe = oldrecipe | 86 | newrecipe = oldrecipe |
84 | return os.path.join(path, newrecipe) | 87 | return os.path.join(path, newrecipe) |
85 | 88 | ||
86 | def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): | 89 | def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path): |
87 | _rename_recipe_dirs(oldpv, newpv, path) | 90 | _rename_recipe_dirs(oldpv, newpv, path) |
88 | return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) | 91 | return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path) |
89 | 92 | ||
90 | def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d): | 93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): |
91 | """Writes an append file""" | 94 | """Writes an append file""" |
92 | if not os.path.exists(rc): | 95 | if not os.path.exists(rc): |
93 | raise DevtoolError("bbappend not created because %s does not exist" % rc) | 96 | raise DevtoolError("bbappend not created because %s does not exist" % rc) |
@@ -102,36 +105,38 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d) | |||
102 | pn = d.getVar('PN') | 105 | pn = d.getVar('PN') |
103 | af = os.path.join(appendpath, '%s.bbappend' % brf) | 106 | af = os.path.join(appendpath, '%s.bbappend' % brf) |
104 | with open(af, 'w') as f: | 107 | with open(af, 'w') as f: |
105 | f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n') | 108 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n') |
109 | # Local files can be modified/tracked in separate subdir under srctree | ||
110 | # Mostly useful for packages with S != WORKDIR | ||
111 | f.write('FILESPATH:prepend := "%s:"\n' % | ||
112 | os.path.join(srctreebase, 'oe-local-files')) | ||
113 | f.write('# srctreebase: %s\n' % srctreebase) | ||
106 | f.write('inherit externalsrc\n') | 114 | f.write('inherit externalsrc\n') |
107 | f.write(('# NOTE: We use pn- overrides here to avoid affecting' | 115 | f.write(('# NOTE: We use pn- overrides here to avoid affecting' |
108 | 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) | 116 | 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) |
109 | f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) | 117 | f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree)) |
110 | b_is_s = use_external_build(same_dir, no_same_dir, d) | 118 | b_is_s = use_external_build(same_dir, no_same_dir, d) |
111 | if b_is_s: | 119 | if b_is_s: |
112 | f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) | 120 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
113 | f.write('\n') | 121 | f.write('\n') |
114 | if rev: | 122 | if revs: |
115 | f.write('# initial_rev: %s\n' % rev) | 123 | for name, rev in revs.items(): |
124 | f.write('# initial_rev %s: %s\n' % (name, rev)) | ||
116 | if copied: | 125 | if copied: |
117 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) | 126 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) |
118 | f.write('# original_files: %s\n' % ' '.join(copied)) | 127 | f.write('# original_files: %s\n' % ' '.join(copied)) |
119 | return af | 128 | return af |
120 | 129 | ||
121 | def _cleanup_on_error(rf, srctree): | 130 | def _cleanup_on_error(rd, srctree): |
122 | rfp = os.path.split(rf)[0] # recipe folder | 131 | if os.path.exists(rd): |
123 | rfpp = os.path.split(rfp)[0] # recipes folder | 132 | shutil.rmtree(rd) |
124 | if os.path.exists(rfp): | ||
125 | shutil.rmtree(rfp) | ||
126 | if not len(os.listdir(rfpp)): | ||
127 | os.rmdir(rfpp) | ||
128 | srctree = os.path.abspath(srctree) | 133 | srctree = os.path.abspath(srctree) |
129 | if os.path.exists(srctree): | 134 | if os.path.exists(srctree): |
130 | shutil.rmtree(srctree) | 135 | shutil.rmtree(srctree) |
131 | 136 | ||
132 | def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None): | 137 | def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None): |
133 | if rf and not keep_failure: | 138 | if not keep_failure: |
134 | _cleanup_on_error(rf, srctree) | 139 | _cleanup_on_error(rd, srctree) |
135 | logger.error(e) | 140 | logger.error(e) |
136 | if extramsg: | 141 | if extramsg: |
137 | logger.error(extramsg) | 142 | logger.error(extramsg) |
@@ -164,6 +169,7 @@ def _get_uri(rd): | |||
164 | 169 | ||
165 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): | 170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): |
166 | """Extract sources of a recipe with a new version""" | 171 | """Extract sources of a recipe with a new version""" |
172 | import oe.patch | ||
167 | 173 | ||
168 | def __run(cmd): | 174 | def __run(cmd): |
169 | """Simple wrapper which calls _run with srctree as cwd""" | 175 | """Simple wrapper which calls _run with srctree as cwd""" |
@@ -178,12 +184,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
178 | uri, rev = _get_uri(crd) | 184 | uri, rev = _get_uri(crd) |
179 | if srcrev: | 185 | if srcrev: |
180 | rev = srcrev | 186 | rev = srcrev |
187 | paths = [srctree] | ||
181 | if uri.startswith('git://') or uri.startswith('gitsm://'): | 188 | if uri.startswith('git://') or uri.startswith('gitsm://'): |
182 | __run('git fetch') | 189 | __run('git fetch') |
183 | __run('git checkout %s' % rev) | 190 | __run('git checkout %s' % rev) |
184 | __run('git tag -f devtool-base-new') | 191 | __run('git tag -f --no-sign devtool-base-new') |
185 | md5 = None | 192 | __run('git submodule update --recursive') |
186 | sha256 = None | 193 | __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'') |
194 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') | ||
195 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] | ||
196 | checksums = {} | ||
187 | _, _, _, _, _, params = bb.fetch2.decodeurl(uri) | 197 | _, _, _, _, _, params = bb.fetch2.decodeurl(uri) |
188 | srcsubdir_rel = params.get('destsuffix', 'git') | 198 | srcsubdir_rel = params.get('destsuffix', 'git') |
189 | if not srcbranch: | 199 | if not srcbranch: |
@@ -191,14 +201,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
191 | get_branch = [x.strip() for x in check_branch.splitlines()] | 201 | get_branch = [x.strip() for x in check_branch.splitlines()] |
192 | # Remove HEAD reference point and drop remote prefix | 202 | # Remove HEAD reference point and drop remote prefix |
193 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] | 203 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] |
194 | if 'master' in get_branch: | 204 | if len(get_branch) == 1: |
195 | # If it is master, we do not need to append 'branch=master' as this is default. | 205 | # If srcrev is on only ONE branch, then use that branch |
196 | # Even with the case where get_branch has multiple objects, if 'master' is one | ||
197 | # of them, we should default take from 'master' | ||
198 | srcbranch = '' | ||
199 | elif len(get_branch) == 1: | ||
200 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' | ||
201 | srcbranch = get_branch[0] | 206 | srcbranch = get_branch[0] |
207 | elif 'main' in get_branch: | ||
208 | # If srcrev is on multiple branches, then choose 'main' if it is one of them | ||
209 | srcbranch = 'main' | ||
210 | elif 'master' in get_branch: | ||
211 | # Otherwise choose 'master' if it is one of the branches | ||
212 | srcbranch = 'master' | ||
202 | else: | 213 | else: |
203 | # If get_branch contains more than one objects, then display error and exit. | 214 | # If get_branch contains more than one objects, then display error and exit. |
204 | mbrch = '\n ' + '\n '.join(get_branch) | 215 | mbrch = '\n ' + '\n '.join(get_branch) |
@@ -215,9 +226,6 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
215 | if ftmpdir and keep_temp: | 226 | if ftmpdir and keep_temp: |
216 | logger.info('Fetch temp directory is %s' % ftmpdir) | 227 | logger.info('Fetch temp directory is %s' % ftmpdir) |
217 | 228 | ||
218 | md5 = checksums['md5sum'] | ||
219 | sha256 = checksums['sha256sum'] | ||
220 | |||
221 | tmpsrctree = _get_srctree(tmpdir) | 229 | tmpsrctree = _get_srctree(tmpdir) |
222 | srctree = os.path.abspath(srctree) | 230 | srctree = os.path.abspath(srctree) |
223 | srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) | 231 | srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) |
@@ -249,32 +257,52 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
249 | useroptions = [] | 257 | useroptions = [] |
250 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | 258 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) |
251 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) | 259 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) |
252 | __run('git tag -f devtool-base-%s' % newpv) | 260 | __run('git tag -f --no-sign devtool-base-%s' % newpv) |
253 | 261 | ||
254 | (stdout, _) = __run('git rev-parse HEAD') | 262 | revs = {} |
255 | rev = stdout.rstrip() | 263 | for path in paths: |
264 | (stdout, _) = _run('git rev-parse HEAD', cwd=path) | ||
265 | revs[os.path.relpath(path, srctree)] = stdout.rstrip() | ||
256 | 266 | ||
257 | if no_patch: | 267 | if no_patch: |
258 | patches = oe.recipeutils.get_recipe_patches(crd) | 268 | patches = oe.recipeutils.get_recipe_patches(crd) |
259 | if patches: | 269 | if patches: |
260 | logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) | 270 | logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) |
261 | else: | 271 | else: |
262 | __run('git checkout devtool-patched -b %s' % branch) | 272 | for path in paths: |
263 | skiptag = False | 273 | _run('git checkout devtool-patched -b %s' % branch, cwd=path) |
264 | try: | 274 | (stdout, _) = _run('git branch --list devtool-override-*', cwd=path) |
265 | __run('git rebase %s' % rev) | 275 | branches_to_rebase = [branch] + stdout.split() |
266 | except bb.process.ExecutionError as e: | 276 | target_branch = revs[os.path.relpath(path, srctree)] |
267 | skiptag = True | 277 | |
268 | if 'conflict' in e.stdout: | 278 | # There is a bug (or feature?) in git rebase where if a commit with |
269 | logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) | 279 | # a note is fully rebased away by being part of an old commit, the |
270 | else: | 280 | # note is still attached to the old commit. Avoid this by making |
271 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) | 281 | # sure all old devtool related commits have a note attached to them |
272 | if not skiptag: | 282 | # (this assumes git config notes.rewriteMode is set to ignore). |
273 | if uri.startswith('git://') or uri.startswith('gitsm://'): | 283 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) |
274 | suffix = 'new' | 284 | for rev in stdout.splitlines(): |
275 | else: | 285 | if not oe.patch.GitApplyTree.getNotes(path, rev): |
276 | suffix = newpv | 286 | oe.patch.GitApplyTree.addNote(path, rev, "dummy") |
277 | __run('git tag -f devtool-patched-%s' % suffix) | 287 | |
288 | for b in branches_to_rebase: | ||
289 | logger.info("Rebasing {} onto {}".format(b, target_branch)) | ||
290 | _run('git checkout %s' % b, cwd=path) | ||
291 | try: | ||
292 | _run('git rebase %s' % target_branch, cwd=path) | ||
293 | except bb.process.ExecutionError as e: | ||
294 | if 'conflict' in e.stdout: | ||
295 | logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) | ||
296 | _run('git rebase --abort', cwd=path) | ||
297 | else: | ||
298 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) | ||
299 | |||
300 | # Remove any dummy notes added above. | ||
301 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) | ||
302 | for rev in stdout.splitlines(): | ||
303 | oe.patch.GitApplyTree.removeNote(path, rev, "dummy") | ||
304 | |||
305 | _run('git checkout %s' % branch, cwd=path) | ||
278 | 306 | ||
279 | if tmpsrctree: | 307 | if tmpsrctree: |
280 | if keep_temp: | 308 | if keep_temp: |
@@ -284,7 +312,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee | |||
284 | if tmpdir != tmpsrctree: | 312 | if tmpdir != tmpsrctree: |
285 | shutil.rmtree(tmpdir) | 313 | shutil.rmtree(tmpdir) |
286 | 314 | ||
287 | return (rev, md5, sha256, srcbranch, srcsubdir_rel) | 315 | return (revs, checksums, srcbranch, srcsubdir_rel) |
288 | 316 | ||
289 | def _add_license_diff_to_recipe(path, diff): | 317 | def _add_license_diff_to_recipe(path, diff): |
290 | notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. | 318 | notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. |
@@ -305,22 +333,22 @@ def _add_license_diff_to_recipe(path, diff): | |||
305 | f.write("\n#\n\n".encode()) | 333 | f.write("\n#\n\n".encode()) |
306 | f.write(orig_content) | 334 | f.write(orig_content) |
307 | 335 | ||
308 | def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): | 336 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): |
309 | """Creates the new recipe under workspace""" | 337 | """Creates the new recipe under workspace""" |
310 | 338 | ||
311 | bpn = rd.getVar('BPN') | 339 | pn = rd.getVar('PN') |
312 | path = os.path.join(workspace, 'recipes', bpn) | 340 | path = os.path.join(workspace, 'recipes', pn) |
313 | bb.utils.mkdirhier(path) | 341 | bb.utils.mkdirhier(path) |
314 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) | 342 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) |
315 | if not copied: | 343 | if not copied: |
316 | raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) | 344 | raise DevtoolError('Internal error - no files were copied for recipe %s' % pn) |
317 | logger.debug('Copied %s to %s' % (copied, path)) | 345 | logger.debug('Copied %s to %s' % (copied, path)) |
318 | 346 | ||
319 | oldpv = rd.getVar('PV') | 347 | oldpv = rd.getVar('PV') |
320 | if not newpv: | 348 | if not newpv: |
321 | newpv = oldpv | 349 | newpv = oldpv |
322 | origpath = rd.getVar('FILE') | 350 | origpath = rd.getVar('FILE') |
323 | fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) | 351 | fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path) |
324 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) | 352 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) |
325 | 353 | ||
326 | newvalues = {} | 354 | newvalues = {} |
@@ -336,7 +364,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
336 | replacing = True | 364 | replacing = True |
337 | new_src_uri = [] | 365 | new_src_uri = [] |
338 | for entry in src_uri: | 366 | for entry in src_uri: |
339 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) | 367 | try: |
368 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) | ||
369 | except bb.fetch2.MalformedUrl as e: | ||
370 | raise DevtoolError("Could not decode SRC_URI: {}".format(e)) | ||
340 | if replacing and scheme in ['git', 'gitsm']: | 371 | if replacing and scheme in ['git', 'gitsm']: |
341 | branch = params.get('branch', 'master') | 372 | branch = params.get('branch', 'master') |
342 | if rd.expand(branch) != srcbranch: | 373 | if rd.expand(branch) != srcbranch: |
@@ -374,30 +405,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
374 | addnames.append(params['name']) | 405 | addnames.append(params['name']) |
375 | # Find what's been set in the original recipe | 406 | # Find what's been set in the original recipe |
376 | oldnames = [] | 407 | oldnames = [] |
408 | oldsums = [] | ||
377 | noname = False | 409 | noname = False |
378 | for varflag in rd.getVarFlags('SRC_URI'): | 410 | for varflag in rd.getVarFlags('SRC_URI'): |
379 | if varflag.endswith(('.md5sum', '.sha256sum')): | 411 | for checksum in checksums: |
380 | name = varflag.rsplit('.', 1)[0] | 412 | if varflag.endswith('.' + checksum): |
381 | if name not in oldnames: | 413 | name = varflag.rsplit('.', 1)[0] |
382 | oldnames.append(name) | 414 | if name not in oldnames: |
383 | elif varflag in ['md5sum', 'sha256sum']: | 415 | oldnames.append(name) |
384 | noname = True | 416 | oldsums.append(checksum) |
417 | elif varflag == checksum: | ||
418 | noname = True | ||
419 | oldsums.append(checksum) | ||
385 | # Even if SRC_URI has named entries it doesn't have to actually use the name | 420 | # Even if SRC_URI has named entries it doesn't have to actually use the name |
386 | if noname and addnames and addnames[0] not in oldnames: | 421 | if noname and addnames and addnames[0] not in oldnames: |
387 | addnames = [] | 422 | addnames = [] |
388 | # Drop any old names (the name actually might include ${PV}) | 423 | # Drop any old names (the name actually might include ${PV}) |
389 | for name in oldnames: | 424 | for name in oldnames: |
390 | if name not in newnames: | 425 | if name not in newnames: |
391 | newvalues['SRC_URI[%s.md5sum]' % name] = None | 426 | for checksum in oldsums: |
392 | newvalues['SRC_URI[%s.sha256sum]' % name] = None | 427 | newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None |
393 | 428 | ||
394 | if sha256: | 429 | nameprefix = '%s.' % addnames[0] if addnames else '' |
395 | if addnames: | 430 | |
396 | nameprefix = '%s.' % addnames[0] | 431 | # md5sum is deprecated, remove any traces of it. If it was the only old |
397 | else: | 432 | # checksum, then replace it with the default checksums. |
398 | nameprefix = '' | 433 | if 'md5sum' in oldsums: |
399 | newvalues['SRC_URI[%smd5sum]' % nameprefix] = None | 434 | newvalues['SRC_URI[%smd5sum]' % nameprefix] = None |
400 | newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256 | 435 | oldsums.remove('md5sum') |
436 | if not oldsums: | ||
437 | oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] | ||
438 | |||
439 | for checksum in oldsums: | ||
440 | newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum] | ||
401 | 441 | ||
402 | if srcsubdir_new != srcsubdir_old: | 442 | if srcsubdir_new != srcsubdir_old: |
403 | s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) | 443 | s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) |
@@ -422,10 +462,11 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
422 | newvalues["LIC_FILES_CHKSUM"] = newlicchksum | 462 | newvalues["LIC_FILES_CHKSUM"] = newlicchksum |
423 | _add_license_diff_to_recipe(fullpath, license_diff) | 463 | _add_license_diff_to_recipe(fullpath, license_diff) |
424 | 464 | ||
465 | tinfoil.modified_files() | ||
425 | try: | 466 | try: |
426 | rd = tinfoil.parse_recipe_file(fullpath, False) | 467 | rd = tinfoil.parse_recipe_file(fullpath, False) |
427 | except bb.tinfoil.TinfoilCommandFailed as e: | 468 | except bb.tinfoil.TinfoilCommandFailed as e: |
428 | _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed') | 469 | _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed') |
429 | oe.recipeutils.patch_recipe(rd, fullpath, newvalues) | 470 | oe.recipeutils.patch_recipe(rd, fullpath, newvalues) |
430 | 471 | ||
431 | return fullpath, copied | 472 | return fullpath, copied |
@@ -434,7 +475,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src | |||
434 | def _check_git_config(): | 475 | def _check_git_config(): |
435 | def getconfig(name): | 476 | def getconfig(name): |
436 | try: | 477 | try: |
437 | value = bb.process.run('git config --global %s' % name)[0].strip() | 478 | value = bb.process.run('git config %s' % name)[0].strip() |
438 | except bb.process.ExecutionError as e: | 479 | except bb.process.ExecutionError as e: |
439 | if e.exitcode == 1: | 480 | if e.exitcode == 1: |
440 | value = None | 481 | value = None |
@@ -494,6 +535,15 @@ def _generate_license_diff(old_licenses, new_licenses): | |||
494 | diff = diff + line | 535 | diff = diff + line |
495 | return diff | 536 | return diff |
496 | 537 | ||
538 | def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil): | ||
539 | tasks = [] | ||
540 | for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split(): | ||
541 | logger.info('Running extra recipe upgrade task: %s' % task) | ||
542 | res = tinfoil.build_targets(pn, task, handle_events=True) | ||
543 | |||
544 | if not res: | ||
545 | raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn)) | ||
546 | |||
497 | def upgrade(args, config, basepath, workspace): | 547 | def upgrade(args, config, basepath, workspace): |
498 | """Entry point for the devtool 'upgrade' subcommand""" | 548 | """Entry point for the devtool 'upgrade' subcommand""" |
499 | 549 | ||
@@ -521,6 +571,8 @@ def upgrade(args, config, basepath, workspace): | |||
521 | else: | 571 | else: |
522 | srctree = standard.get_default_srctree(config, pn) | 572 | srctree = standard.get_default_srctree(config, pn) |
523 | 573 | ||
574 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) | ||
575 | |||
524 | # try to automatically discover latest version and revision if not provided on command line | 576 | # try to automatically discover latest version and revision if not provided on command line |
525 | if not args.version and not args.srcrev: | 577 | if not args.version and not args.srcrev: |
526 | version_info = oe.recipeutils.get_recipe_upstream_version(rd) | 578 | version_info = oe.recipeutils.get_recipe_upstream_version(rd) |
@@ -550,30 +602,34 @@ def upgrade(args, config, basepath, workspace): | |||
550 | try: | 602 | try: |
551 | logger.info('Extracting current version source...') | 603 | logger.info('Extracting current version source...') |
552 | rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | 604 | rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) |
553 | old_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or "")) | 605 | old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) |
554 | logger.info('Extracting upgraded version source...') | 606 | logger.info('Extracting upgraded version source...') |
555 | rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, | 607 | rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, |
556 | args.srcrev, args.srcbranch, args.branch, args.keep_temp, | 608 | args.srcrev, args.srcbranch, args.branch, args.keep_temp, |
557 | tinfoil, rd) | 609 | tinfoil, rd) |
558 | new_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or "")) | 610 | new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) |
559 | license_diff = _generate_license_diff(old_licenses, new_licenses) | 611 | license_diff = _generate_license_diff(old_licenses, new_licenses) |
560 | rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) | 612 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) |
561 | except bb.process.CmdError as e: | 613 | except (bb.process.CmdError, DevtoolError) as e: |
562 | _upgrade_error(e, rf, srctree, args.keep_failure) | 614 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN')) |
563 | except DevtoolError as e: | 615 | _upgrade_error(e, recipedir, srctree, args.keep_failure) |
564 | _upgrade_error(e, rf, srctree, args.keep_failure) | ||
565 | standard._add_md5(config, pn, os.path.dirname(rf)) | 616 | standard._add_md5(config, pn, os.path.dirname(rf)) |
566 | 617 | ||
567 | af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2, | 618 | af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2, |
568 | copied, config.workspace_path, rd) | 619 | copied, config.workspace_path, rd) |
569 | standard._add_md5(config, pn, af) | 620 | standard._add_md5(config, pn, af) |
570 | 621 | ||
622 | _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil) | ||
623 | |||
571 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 624 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
572 | 625 | ||
573 | logger.info('Upgraded source extracted to %s' % srctree) | 626 | logger.info('Upgraded source extracted to %s' % srctree) |
574 | logger.info('New recipe is %s' % rf) | 627 | logger.info('New recipe is %s' % rf) |
575 | if license_diff: | 628 | if license_diff: |
576 | logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.') | 629 | logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.') |
630 | preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN')) | ||
631 | if preferred_version: | ||
632 | logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version) | ||
577 | finally: | 633 | finally: |
578 | tinfoil.shutdown() | 634 | tinfoil.shutdown() |
579 | return 0 | 635 | return 0 |
@@ -599,18 +655,28 @@ def latest_version(args, config, basepath, workspace): | |||
599 | return 0 | 655 | return 0 |
600 | 656 | ||
601 | def check_upgrade_status(args, config, basepath, workspace): | 657 | def check_upgrade_status(args, config, basepath, workspace): |
658 | def _print_status(recipe): | ||
659 | print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'], | ||
660 | recipe['cur_ver'], | ||
661 | recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"), | ||
662 | recipe['maintainer'], | ||
663 | recipe['revision'] if recipe['revision'] != 'N/A' else "", | ||
664 | "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else "")) | ||
602 | if not args.recipe: | 665 | if not args.recipe: |
603 | logger.info("Checking the upstream status for all recipes may take a few minutes") | 666 | logger.info("Checking the upstream status for all recipes may take a few minutes") |
604 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) | 667 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) |
605 | for result in results: | 668 | for recipegroup in results: |
606 | # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason | 669 | upgrades = [r for r in recipegroup if r['status'] != 'MATCH'] |
607 | if args.all or result[1] != 'MATCH': | 670 | currents = [r for r in recipegroup if r['status'] == 'MATCH'] |
608 | logger.info("{:25} {:15} {:15} {} {} {}".format( result[0], | 671 | if len(upgrades) > 1: |
609 | result[2], | 672 | print("These recipes need to be upgraded together {") |
610 | result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), | 673 | for r in sorted(upgrades, key=lambda r:r['pn']): |
611 | result[4], | 674 | _print_status(r) |
612 | result[5] if result[5] != 'N/A' else "", | 675 | if len(upgrades) > 1: |
613 | "cannot be updated due to: %s" %(result[6]) if result[6] else "")) | 676 | print("}") |
677 | for r in currents: | ||
678 | if args.all: | ||
679 | _print_status(r) | ||
614 | 680 | ||
615 | def register_commands(subparsers, context): | 681 | def register_commands(subparsers, context): |
616 | """Register devtool subcommands from this plugin""" | 682 | """Register devtool subcommands from this plugin""" |
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py index 964817766b..bf39f71b11 100644 --- a/scripts/lib/devtool/utilcmds.py +++ b/scripts/lib/devtool/utilcmds.py | |||
@@ -64,7 +64,7 @@ def configure_help(args, config, basepath, workspace): | |||
64 | b = rd.getVar('B') | 64 | b = rd.getVar('B') |
65 | s = rd.getVar('S') | 65 | s = rd.getVar('S') |
66 | configurescript = os.path.join(s, 'configure') | 66 | configurescript = os.path.join(s, 'configure') |
67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) | 67 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd)) |
68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') | 68 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') |
69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') | 69 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') |
70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') | 70 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') |