summaryrefslogtreecommitdiffstats
path: root/scripts/lib/devtool
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/devtool')
-rw-r--r--scripts/lib/devtool/__init__.py404
-rw-r--r--scripts/lib/devtool/build.py92
-rw-r--r--scripts/lib/devtool/build_image.py164
-rw-r--r--scripts/lib/devtool/build_sdk.py55
-rw-r--r--scripts/lib/devtool/deploy.py378
-rw-r--r--scripts/lib/devtool/export.py109
-rw-r--r--scripts/lib/devtool/ide_plugins/__init__.py282
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py463
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_none.py53
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py1070
-rw-r--r--scripts/lib/devtool/import.py134
-rw-r--r--scripts/lib/devtool/menuconfig.py81
-rw-r--r--scripts/lib/devtool/package.py50
-rw-r--r--scripts/lib/devtool/runqemu.py64
-rw-r--r--scripts/lib/devtool/sdk.py330
-rw-r--r--scripts/lib/devtool/search.py109
-rw-r--r--scripts/lib/devtool/standard.py2395
-rw-r--r--scripts/lib/devtool/upgrade.py693
-rw-r--r--scripts/lib/devtool/utilcmds.py242
19 files changed, 0 insertions, 7168 deletions
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
deleted file mode 100644
index 6133c1c5b4..0000000000
--- a/scripts/lib/devtool/__init__.py
+++ /dev/null
@@ -1,404 +0,0 @@
1#!/usr/bin/env python3
2
3# Development tool - utility functions for plugins
4#
5# Copyright (C) 2014 Intel Corporation
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9"""Devtool plugins module"""
10
11import os
12import sys
13import subprocess
14import logging
15import re
16import codecs
17
18logger = logging.getLogger('devtool')
19
20class DevtoolError(Exception):
21 """Exception for handling devtool errors"""
22 def __init__(self, message, exitcode=1):
23 super(DevtoolError, self).__init__(message)
24 self.exitcode = exitcode
25
26
27def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
28 """Run a program in bitbake build context"""
29 import bb
30 if not 'cwd' in options:
31 options["cwd"] = builddir
32 if init_path:
33 # As the OE init script makes use of BASH_SOURCE to determine OEROOT,
34 # and can't determine it when running under dash, we need to set
35 # the executable to bash to correctly set things up
36 if not 'executable' in options:
37 options['executable'] = 'bash'
38 logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
39 init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
40 else:
41 logger.debug('Executing command "%s"' % cmd)
42 init_prefix = ''
43 if watch:
44 if sys.stdout.isatty():
45 # Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
46 cmd = 'script -e -q -c "%s" /dev/null' % cmd
47 return exec_watch('%s%s' % (init_prefix, cmd), **options)
48 else:
49 return bb.process.run('%s%s' % (init_prefix, cmd), **options)
50
51def exec_watch(cmd, **options):
52 """Run program with stdout shown on sys.stdout"""
53 import bb
54 if isinstance(cmd, str) and not "shell" in options:
55 options["shell"] = True
56
57 process = subprocess.Popen(
58 cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
59 )
60
61 reader = codecs.getreader('utf-8')(process.stdout)
62 buf = ''
63 while True:
64 out = reader.read(1, 1)
65 if out:
66 sys.stdout.write(out)
67 sys.stdout.flush()
68 buf += out
69 elif out == '' and process.poll() != None:
70 break
71
72 if process.returncode != 0:
73 raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
74
75 return buf, None
76
77def exec_fakeroot(d, cmd, **kwargs):
78 """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
79 # Grab the command and check it actually exists
80 fakerootcmd = d.getVar('FAKEROOTCMD')
81 fakerootenv = d.getVar('FAKEROOTENV')
82 exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
83
84def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
85 if not os.path.exists(fakerootcmd):
86 logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
87 return 2
88 # Set up the appropriate environment
89 newenv = dict(os.environ)
90 for varvalue in fakerootenv.split():
91 if '=' in varvalue:
92 splitval = varvalue.split('=', 1)
93 newenv[splitval[0]] = splitval[1]
94 return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
95
96def setup_tinfoil(config_only=False, basepath=None, tracking=False):
97 """Initialize tinfoil api from bitbake"""
98 import scriptpath
99 orig_cwd = os.path.abspath(os.curdir)
100 try:
101 if basepath:
102 os.chdir(basepath)
103 bitbakepath = scriptpath.add_bitbake_lib_path()
104 if not bitbakepath:
105 logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
106 sys.exit(1)
107
108 import bb.tinfoil
109 tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
110 try:
111 tinfoil.logger.setLevel(logger.getEffectiveLevel())
112 tinfoil.prepare(config_only)
113 except bb.tinfoil.TinfoilUIException:
114 tinfoil.shutdown()
115 raise DevtoolError('Failed to start bitbake environment')
116 except:
117 tinfoil.shutdown()
118 raise
119 finally:
120 os.chdir(orig_cwd)
121 return tinfoil
122
123def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
124 """Parse the specified recipe"""
125 try:
126 recipefile = tinfoil.get_recipe_file(pn)
127 except bb.providers.NoProvider as e:
128 logger.error(str(e))
129 return None
130 if appends:
131 append_files = tinfoil.get_file_appends(recipefile)
132 if filter_workspace:
133 # Filter out appends from the workspace
134 append_files = [path for path in append_files if
135 not path.startswith(config.workspace_path)]
136 else:
137 append_files = None
138 try:
139 rd = tinfoil.parse_recipe_file(recipefile, appends, append_files)
140 except Exception as e:
141 logger.error(str(e))
142 return None
143 return rd
144
145def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
146 """
147 Check that a recipe is in the workspace and (optionally) that source
148 is present.
149 """
150
151 workspacepn = pn
152
153 for recipe, value in workspace.items():
154 if recipe == pn:
155 break
156 if bbclassextend:
157 recipefile = value['recipefile']
158 if recipefile:
159 targets = get_bbclassextend_targets(recipefile, recipe)
160 if pn in targets:
161 workspacepn = recipe
162 break
163 else:
164 raise DevtoolError("No recipe named '%s' in your workspace" % pn)
165
166 if checksrc:
167 srctree = workspace[workspacepn]['srctree']
168 if not os.path.exists(srctree):
169 raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
170 if not os.listdir(srctree):
171 raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
172
173 return workspacepn
174
175def use_external_build(same_dir, no_same_dir, d):
176 """
177 Determine if we should use B!=S (separate build and source directories) or not
178 """
179 b_is_s = True
180 if no_same_dir:
181 logger.info('Using separate build directory since --no-same-dir specified')
182 b_is_s = False
183 elif same_dir:
184 logger.info('Using source tree as build directory since --same-dir specified')
185 elif bb.data.inherits_class('autotools-brokensep', d):
186 logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
187 elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
188 logger.info('Using source tree as build directory since that would be the default for this recipe')
189 else:
190 b_is_s = False
191 return b_is_s
192
193def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
194 """
195 Set up the git repository for the source tree
196 """
197 import bb.process
198 import oe.patch
199 if not os.path.exists(os.path.join(repodir, '.git')):
200 bb.process.run('git init', cwd=repodir)
201 bb.process.run('git config --local gc.autodetach 0', cwd=repodir)
202 bb.process.run('git add -f -A .', cwd=repodir)
203 commit_cmd = ['git']
204 oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
205 commit_cmd += ['commit', '-q']
206 stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
207 if not stdout:
208 commit_cmd.append('--allow-empty')
209 commitmsg = "Initial empty commit with no upstream sources"
210 elif version:
211 commitmsg = "Initial commit from upstream at version %s" % version
212 else:
213 commitmsg = "Initial commit from upstream"
214 commit_cmd += ['-m', commitmsg]
215 bb.process.run(commit_cmd, cwd=repodir)
216
217 # Ensure singletask.lock (as used by externalsrc.bbclass) is ignored by git
218 gitinfodir = os.path.join(repodir, '.git', 'info')
219 try:
220 os.mkdir(gitinfodir)
221 except FileExistsError:
222 pass
223 excludes = []
224 excludefile = os.path.join(gitinfodir, 'exclude')
225 try:
226 with open(excludefile, 'r') as f:
227 excludes = f.readlines()
228 except FileNotFoundError:
229 pass
230 if 'singletask.lock\n' not in excludes:
231 excludes.append('singletask.lock\n')
232 with open(excludefile, 'w') as f:
233 for line in excludes:
234 f.write(line)
235
236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
237 bb.process.run('git tag -f %s' % basetag, cwd=repodir)
238
239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
241 stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
242 found = False
243 for line in stdout.splitlines():
244 if line.endswith("/"):
245 new_dir = line.split()[1]
246 for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
247 if ".git" in dirs + files:
248 (stdout, _) = bb.process.run('git remote', cwd=root)
249 remote = stdout.splitlines()[0]
250 (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
251 remote_url = stdout.splitlines()[0]
252 logger.error(os.path.relpath(os.path.join(root, ".."), root))
253 bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
254 found = True
255 if found:
256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
257 found = False
258 if os.path.exists(os.path.join(repodir, '.gitmodules')):
259 bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir)
260
261def recipe_to_append(recipefile, config, wildcard=False):
262 """
263 Convert a recipe file to a bbappend file path within the workspace.
264 NOTE: if the bbappend already exists, you should be using
265 workspace[args.recipename]['bbappend'] instead of calling this
266 function.
267 """
268 appendname = os.path.splitext(os.path.basename(recipefile))[0]
269 if wildcard:
270 appendname = re.sub(r'_.*', '_%', appendname)
271 appendpath = os.path.join(config.workspace_path, 'appends')
272 appendfile = os.path.join(appendpath, appendname + '.bbappend')
273 return appendfile
274
275def get_bbclassextend_targets(recipefile, pn):
276 """
277 Cheap function to get BBCLASSEXTEND and then convert that to the
278 list of targets that would result.
279 """
280 import bb.utils
281
282 values = {}
283 def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
284 values[varname] = origvalue
285 return origvalue, None, 0, True
286 with open(recipefile, 'r') as f:
287 bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
288
289 targets = []
290 bbclassextend = values.get('BBCLASSEXTEND', '').split()
291 if bbclassextend:
292 for variant in bbclassextend:
293 if variant == 'nativesdk':
294 targets.append('%s-%s' % (variant, pn))
295 elif variant in ['native', 'cross', 'crosssdk']:
296 targets.append('%s-%s' % (pn, variant))
297 return targets
298
299def replace_from_file(path, old, new):
300 """Replace strings on a file"""
301
302 def read_file(path):
303 data = None
304 with open(path) as f:
305 data = f.read()
306 return data
307
308 def write_file(path, data):
309 if data is None:
310 return
311 wdata = data.rstrip() + "\n"
312 with open(path, "w") as f:
313 f.write(wdata)
314
315 # In case old is None, return immediately
316 if old is None:
317 return
318 try:
319 rdata = read_file(path)
320 except IOError as e:
321 # if file does not exit, just quit, otherwise raise an exception
322 if e.errno == errno.ENOENT:
323 return
324 else:
325 raise
326
327 old_contents = rdata.splitlines()
328 new_contents = []
329 for old_content in old_contents:
330 try:
331 new_contents.append(old_content.replace(old, new))
332 except ValueError:
333 pass
334 write_file(path, "\n".join(new_contents))
335
336
337def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
338 """ This function will make unlocked-sigs.inc match the recipes in the
339 workspace plus any extras we want unlocked. """
340
341 if not fixed_setup:
342 # Only need to write this out within the eSDK
343 return
344
345 if not extra:
346 extra = []
347
348 confdir = os.path.join(basepath, 'conf')
349 unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
350
351 # Get current unlocked list if any
352 values = {}
353 def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
354 values[varname] = origvalue
355 return origvalue, None, 0, True
356 if os.path.exists(unlockedsigs):
357 with open(unlockedsigs, 'r') as f:
358 bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
359 unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
360
361 # If the new list is different to the current list, write it out
362 newunlocked = sorted(list(workspace.keys()) + extra)
363 if unlocked != newunlocked:
364 bb.utils.mkdirhier(confdir)
365 with open(unlockedsigs, 'w') as f:
366 f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
367 "# This layer was created by the OpenEmbedded devtool" +
368 " utility in order to\n" +
369 "# contain recipes that are unlocked.\n")
370
371 f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
372 for pn in newunlocked:
373 f.write(' ' + pn)
374 f.write('"')
375
376def check_prerelease_version(ver, operation):
377 if 'pre' in ver or 'rc' in ver:
378 logger.warning('Version "%s" looks like a pre-release version. '
379 'If that is the case, in order to ensure that the '
380 'version doesn\'t appear to go backwards when you '
381 'later upgrade to the final release version, it is '
382 'recommmended that instead you use '
383 '<current version>+<pre-release version> e.g. if '
384 'upgrading from 1.9 to 2.0-rc2 use "1.9+2.0-rc2". '
385 'If you prefer not to reset and re-try, you can change '
386 'the version after %s succeeds using "devtool rename" '
387 'with -V/--version.' % (ver, operation))
388
389def check_git_repo_dirty(repodir):
390 """Check if a git repository is clean or not"""
391 stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
392 return stdout
393
394def check_git_repo_op(srctree, ignoredirs=None):
395 """Check if a git repository is in the middle of a rebase"""
396 stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree)
397 topleveldir = stdout.strip()
398 if ignoredirs and topleveldir in ignoredirs:
399 return
400 gitdir = os.path.join(topleveldir, '.git')
401 if os.path.exists(os.path.join(gitdir, 'rebase-merge')):
402 raise DevtoolError("Source tree %s appears to be in the middle of a rebase - please resolve this first" % srctree)
403 if os.path.exists(os.path.join(gitdir, 'rebase-apply')):
404 raise DevtoolError("Source tree %s appears to be in the middle of 'git am' or 'git apply' - please resolve this first" % srctree)
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py
deleted file mode 100644
index 935ffab46c..0000000000
--- a/scripts/lib/devtool/build.py
+++ /dev/null
@@ -1,92 +0,0 @@
1# Development tool - build command plugin
2#
3# Copyright (C) 2014-2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool build plugin"""
8
9import os
10import bb
11import logging
12import argparse
13import tempfile
14from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
15from devtool import parse_recipe
16
17logger = logging.getLogger('devtool')
18
19
20def _set_file_values(fn, values):
21 remaining = list(values.keys())
22
23 def varfunc(varname, origvalue, op, newlines):
24 newvalue = values.get(varname, origvalue)
25 remaining.remove(varname)
26 return (newvalue, '=', 0, True)
27
28 with open(fn, 'r') as f:
29 (updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
30
31 for item in remaining:
32 updated = True
33 newlines.append('%s = "%s"' % (item, values[item]))
34
35 if updated:
36 with open(fn, 'w') as f:
37 f.writelines(newlines)
38 return updated
39
40def _get_build_tasks(config):
41 tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
42 return ['do_%s' % task.strip() for task in tasks]
43
44def build(args, config, basepath, workspace):
45 """Entry point for the devtool 'build' subcommand"""
46 workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
47 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
48 try:
49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
50 if not rd:
51 return 1
52 deploytask = 'do_deploy' in rd.getVar('__BBTASKS')
53 finally:
54 tinfoil.shutdown()
55
56 if args.clean:
57 # use clean instead of cleansstate to avoid messing things up in eSDK
58 build_tasks = ['do_clean']
59 else:
60 build_tasks = _get_build_tasks(config)
61 if deploytask:
62 build_tasks.append('do_deploy')
63
64 bbappend = workspace[workspacepn]['bbappend']
65 if args.disable_parallel_make:
66 logger.info("Disabling 'make' parallelism")
67 _set_file_values(bbappend, {'PARALLEL_MAKE': ''})
68 try:
69 bbargs = []
70 for task in build_tasks:
71 if args.recipename.endswith('-native') and 'package' in task:
72 continue
73 bbargs.append('%s:%s' % (args.recipename, task))
74 exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
75 except bb.process.ExecutionError as e:
76 # We've already seen the output since watch=True, so just ensure we return something to the user
77 return e.exitcode
78 finally:
79 if args.disable_parallel_make:
80 _set_file_values(bbappend, {'PARALLEL_MAKE': None})
81
82 return 0
83
84def register_commands(subparsers, context):
85 """Register devtool subcommands from this plugin"""
86 parser_build = subparsers.add_parser('build', help='Build a recipe',
87 description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
88 group='working', order=50)
89 parser_build.add_argument('recipename', help='Recipe to build')
90 parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
91 parser_build.add_argument('-c', '--clean', action='store_true', help='clean up recipe building results')
92 parser_build.set_defaults(func=build)
diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py
deleted file mode 100644
index 980f90ddd6..0000000000
--- a/scripts/lib/devtool/build_image.py
+++ /dev/null
@@ -1,164 +0,0 @@
1# Development tool - build-image plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool plugin containing the build-image subcommand."""
9
10import os
11import errno
12import logging
13
14from bb.process import ExecutionError
15from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19class TargetNotImageError(Exception):
20 pass
21
22def _get_packages(tinfoil, workspace, config):
23 """Get list of packages from recipes in the workspace."""
24 result = []
25 for recipe in workspace:
26 data = parse_recipe(config, tinfoil, recipe, True)
27 if 'class-target' in data.getVar('OVERRIDES').split(':'):
28 if recipe in data.getVar('PACKAGES').split():
29 result.append(recipe)
30 else:
31 logger.warning("Skipping recipe %s as it doesn't produce a "
32 "package with the same name", recipe)
33 return result
34
35def build_image(args, config, basepath, workspace):
36 """Entry point for the devtool 'build-image' subcommand."""
37
38 image = args.imagename
39 auto_image = False
40 if not image:
41 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
42 if sdk_targets:
43 image = sdk_targets[0]
44 auto_image = True
45 if not image:
46 raise DevtoolError('Unable to determine image to build, please specify one')
47
48 try:
49 if args.add_packages:
50 add_packages = args.add_packages.split(',')
51 else:
52 add_packages = None
53 result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
54 except TargetNotImageError:
55 if auto_image:
56 raise DevtoolError('Unable to determine image to build, please specify one')
57 else:
58 raise DevtoolError('Specified recipe %s is not an image recipe' % image)
59
60 if result == 0:
61 logger.info('Successfully built %s. You can find output files in %s'
62 % (image, outputdir))
63 return result
64
65def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
66 # remove <image>.bbappend to make sure setup_tinfoil doesn't
67 # break because of it
68 target_basename = config.get('SDK', 'target_basename', '')
69 if target_basename:
70 appendfile = os.path.join(config.workspace_path, 'appends',
71 '%s.bbappend' % target_basename)
72 try:
73 os.unlink(appendfile)
74 except OSError as exc:
75 if exc.errno != errno.ENOENT:
76 raise
77
78 tinfoil = setup_tinfoil(basepath=basepath)
79 try:
80 rd = parse_recipe(config, tinfoil, image, True)
81 if not rd:
82 # Error already shown
83 return (1, None)
84 if not bb.data.inherits_class('image', rd):
85 raise TargetNotImageError()
86
87 # Get the actual filename used and strip the .bb and full path
88 target_basename = rd.getVar('FILE')
89 target_basename = os.path.splitext(os.path.basename(target_basename))[0]
90 config.set('SDK', 'target_basename', target_basename)
91 config.write()
92
93 appendfile = os.path.join(config.workspace_path, 'appends',
94 '%s.bbappend' % target_basename)
95
96 outputdir = None
97 try:
98 if workspace or add_packages:
99 if add_packages:
100 packages = add_packages
101 else:
102 packages = _get_packages(tinfoil, workspace, config)
103 else:
104 packages = None
105 if not task:
106 if not packages and not add_packages and workspace:
107 logger.warning('No recipes in workspace, building image %s unmodified', image)
108 elif not packages:
109 logger.warning('No packages to add, building image %s unmodified', image)
110
111 if packages or extra_append:
112 bb.utils.mkdirhier(os.path.dirname(appendfile))
113 with open(appendfile, 'w') as afile:
114 if packages:
115 # include packages from workspace recipes into the image
116 afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages))
117 if not task:
118 logger.info('Building image %s with the following '
119 'additional packages: %s', image, ' '.join(packages))
120 if extra_append:
121 for line in extra_append:
122 afile.write('%s\n' % line)
123
124 if task in ['populate_sdk', 'populate_sdk_ext']:
125 outputdir = rd.getVar('SDK_DEPLOY')
126 else:
127 outputdir = rd.getVar('DEPLOY_DIR_IMAGE')
128
129 tmp_tinfoil = tinfoil
130 tinfoil = None
131 tmp_tinfoil.shutdown()
132
133 options = ''
134 if task:
135 options += '-c %s' % task
136
137 # run bitbake to build image (or specified task)
138 try:
139 exec_build_env_command(config.init_path, basepath,
140 'bitbake %s %s' % (options, image), watch=True)
141 except ExecutionError as err:
142 return (err.exitcode, None)
143 finally:
144 if os.path.isfile(appendfile):
145 os.unlink(appendfile)
146 finally:
147 if tinfoil:
148 tinfoil.shutdown()
149 return (0, outputdir)
150
151
152def register_commands(subparsers, context):
153 """Register devtool subcommands from the build-image plugin"""
154 parser = subparsers.add_parser('build-image',
155 help='Build image including workspace recipe packages',
156 description='Builds an image, extending it to include '
157 'packages from recipes in the workspace',
158 group='testbuild', order=-10)
159 parser.add_argument('imagename', help='Image recipe to build', nargs='?')
160 parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
161 'entire workspace, specify packages to be added to the image '
162 '(separate multiple packages by commas)',
163 metavar='PACKAGES')
164 parser.set_defaults(func=build_image)
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
deleted file mode 100644
index 1cd4831d2b..0000000000
--- a/scripts/lib/devtool/build_sdk.py
+++ /dev/null
@@ -1,55 +0,0 @@
1# Development tool - build-sdk command plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import subprocess
10import logging
11import glob
12import shutil
13import errno
14import sys
15import tempfile
16from devtool import DevtoolError
17from devtool import build_image
18
19logger = logging.getLogger('devtool')
20
21
22def build_sdk(args, config, basepath, workspace):
23 """Entry point for the devtool build-sdk command"""
24
25 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
26 if sdk_targets:
27 image = sdk_targets[0]
28 else:
29 raise DevtoolError('Unable to determine image to build SDK for')
30
31 extra_append = ['SDK_DERIVATIVE = "1"']
32 try:
33 result, outputdir = build_image.build_image_task(config,
34 basepath,
35 workspace,
36 image,
37 task='populate_sdk_ext',
38 extra_append=extra_append)
39 except build_image.TargetNotImageError:
40 raise DevtoolError('Unable to determine image to build SDK for')
41
42 if result == 0:
43 logger.info('Successfully built SDK. You can find output files in %s'
44 % outputdir)
45 return result
46
47
48def register_commands(subparsers, context):
49 """Register devtool subcommands"""
50 if context.fixed_setup:
51 parser_build_sdk = subparsers.add_parser('build-sdk',
52 help='Build a derivative SDK of this one',
53 description='Builds an extensible SDK based upon this one and the items in your workspace',
54 group='advanced')
55 parser_build_sdk.set_defaults(func=build_sdk)
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
deleted file mode 100644
index b5ca8f2c2f..0000000000
--- a/scripts/lib/devtool/deploy.py
+++ /dev/null
@@ -1,378 +0,0 @@
1# Development tool - deploy/undeploy command plugin
2#
3# Copyright (C) 2014-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool plugin containing the deploy subcommands"""
8
9import logging
10import os
11import shutil
12import subprocess
13import tempfile
14
15import bb.utils
16import argparse_oe
17import oe.types
18
19from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
20
21logger = logging.getLogger('devtool')
22
23deploylist_path = '/.devtool'
24
25def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
26 """
27 Prepare a shell script for running on the target to
28 deploy/undeploy files. We have to be careful what we put in this
29 script - only commands that are likely to be available on the
30 target are suitable (the target might be constrained, e.g. using
31 busybox rather than bash with coreutils).
32 """
33 lines = []
34 lines.append('#!/bin/sh')
35 lines.append('set -e')
36 if undeployall:
37 # Yes, I know this is crude - but it does work
38 lines.append('for entry in %s/*.list; do' % deploylist_path)
39 lines.append('[ ! -f $entry ] && exit')
40 lines.append('set `basename $entry | sed "s/.list//"`')
41 if dryrun:
42 if not deploy:
43 lines.append('echo "Previously deployed files for $1:"')
44 lines.append('manifest="%s/$1.list"' % deploylist_path)
45 lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
46 lines.append('if [ -f $manifest ] ; then')
47 # Read manifest in reverse and delete files / remove empty dirs
48 lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
49 lines.append(' do')
50 if dryrun:
51 lines.append(' if [ ! -d $file ] ; then')
52 lines.append(' echo $file')
53 lines.append(' fi')
54 else:
55 lines.append(' if [ -d $file ] ; then')
56 # Avoid deleting a preserved directory in case it has special perms
57 lines.append(' if [ ! -d $preservedir/$file ] ; then')
58 lines.append(' rmdir $file > /dev/null 2>&1 || true')
59 lines.append(' fi')
60 lines.append(' else')
61 lines.append(' rm -f $file')
62 lines.append(' fi')
63 lines.append(' done')
64 if not dryrun:
65 lines.append(' rm $manifest')
66 if not deploy and not dryrun:
67 # May as well remove all traces
68 lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
69 lines.append('fi')
70
71 if deploy:
72 if not nocheckspace:
73 # Check for available space
74 # FIXME This doesn't take into account files spread across multiple
75 # partitions, but doing that is non-trivial
76 # Find the part of the destination path that exists
77 lines.append('checkpath="$2"')
78 lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
79 lines.append('do')
80 lines.append(' checkpath=`dirname "$checkpath"`')
81 lines.append('done')
82 lines.append(r'freespace=$(df -P $checkpath | sed -nre "s/^(\S+\s+){3}([0-9]+).*/\2/p")')
83 # First line of the file is the total space
84 lines.append('total=`head -n1 $3`')
85 lines.append('if [ $total -gt $freespace ] ; then')
86 lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
87 lines.append(' exit 1')
88 lines.append('fi')
89 if not nopreserve:
90 # Preserve any files that exist. Note that this will add to the
91 # preserved list with successive deployments if the list of files
92 # deployed changes, but because we've deleted any previously
93 # deployed files at this point it will never preserve anything
94 # that was deployed, only files that existed prior to any deploying
95 # (which makes the most sense)
96 lines.append('cat $3 | sed "1d" | while read file fsize')
97 lines.append('do')
98 lines.append(' if [ -e $file ] ; then')
99 lines.append(' dest="$preservedir/$file"')
100 lines.append(' mkdir -p `dirname $dest`')
101 lines.append(' mv $file $dest')
102 lines.append(' fi')
103 lines.append('done')
104 lines.append('rm $3')
105 lines.append('mkdir -p `dirname $manifest`')
106 lines.append('mkdir -p $2')
107 if verbose:
108 lines.append(' tar xv -C $2 -f - | tee $manifest')
109 else:
110 lines.append(' tar xv -C $2 -f - > $manifest')
111 lines.append('sed -i "s!^./!$2!" $manifest')
112 elif not dryrun:
113 # Put any preserved files back
114 lines.append('if [ -d $preservedir ] ; then')
115 lines.append(' cd $preservedir')
116 # find from busybox might not have -exec, so we don't use that
117 lines.append(' find . -type f | while read file')
118 lines.append(' do')
119 lines.append(' mv $file /$file')
120 lines.append(' done')
121 lines.append(' cd /')
122 lines.append(' rm -rf $preservedir')
123 lines.append('fi')
124
125 if undeployall:
126 if not dryrun:
127 lines.append('echo "NOTE: Successfully undeployed $1"')
128 lines.append('done')
129
130 # Delete the script itself
131 lines.append('rm $0')
132 lines.append('')
133
134 return '\n'.join(lines)
135
136def deploy(args, config, basepath, workspace):
137 """Entry point for the devtool 'deploy' subcommand"""
138 import oe.utils
139
140 check_workspace_recipe(workspace, args.recipename, checksrc=False)
141
142 tinfoil = setup_tinfoil(basepath=basepath)
143 try:
144 try:
145 rd = tinfoil.parse_recipe(args.recipename)
146 except Exception as e:
147 raise DevtoolError('Exception parsing recipe %s: %s' %
148 (args.recipename, e))
149
150 srcdir = rd.getVar('D')
151 workdir = rd.getVar('WORKDIR')
152 path = rd.getVar('PATH')
153 strip_cmd = rd.getVar('STRIP')
154 libdir = rd.getVar('libdir')
155 base_libdir = rd.getVar('base_libdir')
156 max_process = oe.utils.get_bb_number_threads(rd)
157 fakerootcmd = rd.getVar('FAKEROOTCMD')
158 fakerootenv = rd.getVar('FAKEROOTENV')
159 finally:
160 tinfoil.shutdown()
161
162 return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
163
164def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
165 import math
166 import oe.package
167
168 try:
169 host, destdir = args.target.split(':')
170 except ValueError:
171 destdir = '/'
172 else:
173 args.target = host
174 if not destdir.endswith('/'):
175 destdir += '/'
176
177 recipe_outdir = srcdir
178 if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
179 raise DevtoolError('No files to deploy - have you built the %s '
180 'recipe? If so, the install step has not installed '
181 'any files.' % args.recipename)
182
183 if args.strip and not args.dry_run:
184 # Fakeroot copy to new destination
185 srcdir = recipe_outdir
186 recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
187 if os.path.isdir(recipe_outdir):
188 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
189 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
190 os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
191 oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
192
193 filelist = []
194 inodes = set({})
195 ftotalsize = 0
196 for root, _, files in os.walk(recipe_outdir):
197 for fn in files:
198 fstat = os.lstat(os.path.join(root, fn))
199 # Get the size in kiB (since we'll be comparing it to the output of du -k)
200 # MUST use lstat() here not stat() or getfilesize() since we don't want to
201 # dereference symlinks
202 if fstat.st_ino in inodes:
203 fsize = 0
204 else:
205 fsize = int(math.ceil(float(fstat.st_size)/1024))
206 inodes.add(fstat.st_ino)
207 ftotalsize += fsize
208 # The path as it would appear on the target
209 fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
210 filelist.append((fpath, fsize))
211
212 if args.dry_run:
213 print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
214 for item, _ in filelist:
215 print(' %s' % item)
216 return 0
217
218 extraoptions = ''
219 if args.no_host_check:
220 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
221 if not args.show_status:
222 extraoptions += ' -q'
223
224 scp_sshexec = ''
225 ssh_sshexec = 'ssh'
226 if args.ssh_exec:
227 scp_sshexec = "-S %s" % args.ssh_exec
228 ssh_sshexec = args.ssh_exec
229 scp_port = ''
230 ssh_port = ''
231 if args.port:
232 scp_port = "-P %s" % args.port
233 ssh_port = "-p %s" % args.port
234
235 if args.key:
236 extraoptions += ' -i %s' % args.key
237
238 # In order to delete previously deployed files and have the manifest file on
239 # the target, we write out a shell script and then copy it to the target
240 # so we can then run it (piping tar output to it).
241 # (We cannot use scp here, because it doesn't preserve symlinks.)
242 tmpdir = tempfile.mkdtemp(prefix='devtool')
243 try:
244 tmpscript = '/tmp/devtool_deploy.sh'
245 tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
246 shellscript = _prepare_remote_script(deploy=True,
247 verbose=args.show_status,
248 nopreserve=args.no_preserve,
249 nocheckspace=args.no_check_space)
250 # Write out the script to a file
251 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
252 f.write(shellscript)
253 # Write out the file list
254 with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
255 f.write('%d\n' % ftotalsize)
256 for fpath, fsize in filelist:
257 f.write('%s %d\n' % (fpath, fsize))
258 # Copy them to the target
259 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
260 if ret != 0:
261 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
262 'get a complete error message' % args.target)
263 finally:
264 shutil.rmtree(tmpdir)
265
266 # Now run the script
267 ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
268 if ret != 0:
269 raise DevtoolError('Deploy failed - rerun with -s to get a complete '
270 'error message')
271
272 logger.info('Successfully deployed %s' % recipe_outdir)
273
274 files_list = []
275 for root, _, files in os.walk(recipe_outdir):
276 for filename in files:
277 filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
278 files_list.append(os.path.join(destdir, filename))
279
280 return 0
281
282def undeploy(args, config, basepath, workspace):
283 """Entry point for the devtool 'undeploy' subcommand"""
284 if args.all and args.recipename:
285 raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
286 elif not args.recipename and not args.all:
287 raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
288
289 extraoptions = ''
290 if args.no_host_check:
291 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
292 if not args.show_status:
293 extraoptions += ' -q'
294
295 scp_sshexec = ''
296 ssh_sshexec = 'ssh'
297 if args.ssh_exec:
298 scp_sshexec = "-S %s" % args.ssh_exec
299 ssh_sshexec = args.ssh_exec
300 scp_port = ''
301 ssh_port = ''
302 if args.port:
303 scp_port = "-P %s" % args.port
304 ssh_port = "-p %s" % args.port
305
306 args.target = args.target.split(':')[0]
307
308 tmpdir = tempfile.mkdtemp(prefix='devtool')
309 try:
310 tmpscript = '/tmp/devtool_undeploy.sh'
311 shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
312 # Write out the script to a file
313 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
314 f.write(shellscript)
315 # Copy it to the target
316 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
317 if ret != 0:
318 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
319 'get a complete error message' % args.target)
320 finally:
321 shutil.rmtree(tmpdir)
322
323 # Now run the script
324 ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
325 if ret != 0:
326 raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
327 'error message')
328
329 if not args.all and not args.dry_run:
330 logger.info('Successfully undeployed %s' % args.recipename)
331 return 0
332
333
334def register_commands(subparsers, context):
335 """Register devtool subcommands from the deploy plugin"""
336
337 parser_deploy = subparsers.add_parser('deploy-target',
338 help='Deploy recipe output files to live target machine',
339 description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
340 group='testbuild')
341 parser_deploy.add_argument('recipename', help='Recipe to deploy')
342 parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
343 parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
344 parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
345 parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
346 parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
347 parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
348 parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
349 parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
350 parser_deploy.add_argument('-I', '--key',
351 help='Specify ssh private key for connection to the target')
352
353 strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
354 strip_opts.add_argument('-S', '--strip',
355 help='Strip executables prior to deploying (default: %(default)s). '
356 'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
357 default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
358 action='store_true')
359 strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
360
361 parser_deploy.set_defaults(func=deploy)
362
363 parser_undeploy = subparsers.add_parser('undeploy-target',
364 help='Undeploy recipe output files in live target machine',
365 description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
366 group='testbuild')
367 parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
368 parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
369 parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
370 parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
371 parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
372 parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
373 parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
374 parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
375 parser_undeploy.add_argument('-I', '--key',
376 help='Specify ssh private key for connection to the target')
377
378 parser_undeploy.set_defaults(func=undeploy)
diff --git a/scripts/lib/devtool/export.py b/scripts/lib/devtool/export.py
deleted file mode 100644
index 01174edae5..0000000000
--- a/scripts/lib/devtool/export.py
+++ /dev/null
@@ -1,109 +0,0 @@
1# Development tool - export command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool export plugin"""
8
9import os
10import argparse
11import tarfile
12import logging
13import datetime
14import json
15
16logger = logging.getLogger('devtool')
17
18# output files
19default_arcname_prefix = "workspace-export"
20metadata = '.export_metadata'
21
22def export(args, config, basepath, workspace):
23 """Entry point for the devtool 'export' subcommand"""
24
25 def add_metadata(tar):
26 """Archive the workspace object"""
27 # finally store the workspace metadata
28 with open(metadata, 'w') as fd:
29 fd.write(json.dumps((config.workspace_path, workspace)))
30 tar.add(metadata)
31 os.unlink(metadata)
32
33 def add_recipe(tar, recipe, data):
34 """Archive recipe with proper arcname"""
35 # Create a map of name/arcnames
36 arcnames = []
37 for key, name in data.items():
38 if name:
39 if key == 'srctree':
40 # all sources, no matter where are located, goes into the sources directory
41 arcname = 'sources/%s' % recipe
42 else:
43 arcname = name.replace(config.workspace_path, '')
44 arcnames.append((name, arcname))
45
46 for name, arcname in arcnames:
47 tar.add(name, arcname=arcname)
48
49
50 # Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
51 if not workspace:
52 logger.info('Workspace contains no recipes, nothing to export')
53 return 0
54 else:
55 for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
56 for recipe in recipes:
57 if recipe not in workspace:
58 logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
59 return 1
60
61 name = args.file
62
63 default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
64 if not name:
65 name = default_name
66 else:
67 # if name is a directory, append the default name
68 if os.path.isdir(name):
69 name = os.path.join(name, default_name)
70
71 if os.path.exists(name) and not args.overwrite:
72 logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
73 return 1
74
75 # if all workspace is excluded, quit
76 if not len(set(workspace.keys()).difference(set(args.exclude))):
77 logger.warning('All recipes in workspace excluded, nothing to export')
78 return 0
79
80 exported = []
81 with tarfile.open(name, 'w:gz') as tar:
82 if args.include:
83 for recipe in args.include:
84 add_recipe(tar, recipe, workspace[recipe])
85 exported.append(recipe)
86 else:
87 for recipe, data in workspace.items():
88 if recipe not in args.exclude:
89 add_recipe(tar, recipe, data)
90 exported.append(recipe)
91
92 add_metadata(tar)
93
94 logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
95 return 0
96
97def register_commands(subparsers, context):
98 """Register devtool export subcommands"""
99 parser = subparsers.add_parser('export',
100 help='Export workspace into a tar archive',
101 description='Export one or more recipes from current workspace into a tar archive',
102 group='advanced')
103
104 parser.add_argument('--file', '-f', help='Output archive file name')
105 parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
106 group = parser.add_mutually_exclusive_group()
107 group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
108 group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
109 parser.set_defaults(func=export)
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py
deleted file mode 100644
index 19c2f61c5f..0000000000
--- a/scripts/lib/devtool/ide_plugins/__init__.py
+++ /dev/null
@@ -1,282 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
7
8import errno
9import json
10import logging
11import os
12import stat
13from enum import Enum, auto
14from devtool import DevtoolError
15from bb.utils import mkdirhier
16
17logger = logging.getLogger('devtool')
18
19
20class BuildTool(Enum):
21 UNDEFINED = auto()
22 CMAKE = auto()
23 MESON = auto()
24
25 @property
26 def is_c_ccp(self):
27 if self is BuildTool.CMAKE:
28 return True
29 if self is BuildTool.MESON:
30 return True
31 return False
32
33
34class GdbCrossConfig:
35 """Base class defining the GDB configuration generator interface
36
37 Generate a GDB configuration for a binary on the target device.
38 Only one instance per binary is allowed. This allows to assign unique port
39 numbers for all gdbserver instances.
40 """
41 _gdbserver_port_next = 1234
42 _binaries = []
43
44 def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
45 self.image_recipe = image_recipe
46 self.modified_recipe = modified_recipe
47 self.gdb_cross = modified_recipe.gdb_cross
48 self.binary = binary
49 if binary in GdbCrossConfig._binaries:
50 raise DevtoolError(
51 "gdbserver config for binary %s is already generated" % binary)
52 GdbCrossConfig._binaries.append(binary)
53 self.script_dir = modified_recipe.ide_sdk_scripts_dir
54 self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
55 self.gdbserver_multi = gdbserver_multi
56 self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
57 self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
58 GdbCrossConfig._gdbserver_port_next += 1
59 self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
60 # gdbserver start script
61 gdbserver_script_file = 'gdbserver_' + self.id_pretty
62 if self.gdbserver_multi:
63 gdbserver_script_file += "_m"
64 self.gdbserver_script = os.path.join(
65 self.script_dir, gdbserver_script_file)
66 # gdbinit file
67 self.gdbinit = os.path.join(
68 self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
69 # gdb start script
70 self.gdb_script = os.path.join(
71 self.script_dir, 'gdb_' + self.id_pretty)
72
73 def _gen_gdbserver_start_script(self):
74 """Generate a shell command starting the gdbserver on the remote device via ssh
75
76 GDB supports two modes:
77 multi: gdbserver remains running over several debug sessions
78 once: gdbserver terminates after the debugged process terminates
79 """
80 cmd_lines = ['#!/bin/sh']
81 if self.gdbserver_multi:
82 temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
83 gdbserver_cmd_start = temp_dir
84 gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
85 gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
86 gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
87 self.gdb_cross.gdbserver_path, self.gdbserver_port)
88 gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
89
90 gdbserver_cmd_stop = temp_dir
91 gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
92 gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
93
94 gdbserver_cmd_l = []
95 gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
96 gdbserver_cmd_l.append(' shift')
97 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
98 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
99 gdbserver_cmd_l.append('else')
100 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
101 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
102 gdbserver_cmd_l.append('fi')
103 gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
104 else:
105 gdbserver_cmd_start = "%s --once :%s %s" % (
106 self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
107 gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
108 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
109 cmd_lines.append(gdbserver_cmd)
110 GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
111
112 def _gen_gdbinit_config(self):
113 """Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
114 gdbinit_lines = ['# This file is generated by devtool ide-sdk']
115 if self.gdbserver_multi:
116 target_help = '# gdbserver --multi :%d' % self.gdbserver_port
117 remote_cmd = 'target extended-remote'
118 else:
119 target_help = '# gdbserver :%d %s' % (
120 self.gdbserver_port, self.binary)
121 remote_cmd = 'target remote'
122 gdbinit_lines.append('# On the remote target:')
123 gdbinit_lines.append(target_help)
124 gdbinit_lines.append('# On the build machine:')
125 gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
126 gdbinit_lines.append(
127 '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
128
129 gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
130 gdbinit_lines.append('set substitute-path "/usr/include" "' +
131 os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
132 # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
133 gdbinit_lines.append('set debuginfod enabled off')
134 if self.image_recipe.rootfs_dbg:
135 gdbinit_lines.append(
136 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
137 # First: Search for sources of this recipe in the workspace folder
138 if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
139 gdbinit_lines.append('set substitute-path "%s" "%s"' %
140 (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
141 else:
142 logger.error(
143 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
144 # Second: Search for sources of other recipes in the rootfs-dbg
145 if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
146 gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
147 self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
148 else:
149 logger.error(
150 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
151 else:
152 logger.warning(
153 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
154 gdbinit_lines.append(
155 '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
156 gdbinit_lines.append('set remote exec-file ' + self.binary)
157 gdbinit_lines.append(
158 'run ' + os.path.join(self.modified_recipe.d, self.binary))
159
160 GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
161
162 def _gen_gdb_start_script(self):
163 """Generate a script starting GDB with the corresponding gdbinit configuration."""
164 cmd_lines = ['#!/bin/sh']
165 cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
166 cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
167 self.gdbinit + ' "$@"')
168 GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
169
170 def initialize(self):
171 self._gen_gdbserver_start_script()
172 self._gen_gdbinit_config()
173 self._gen_gdb_start_script()
174
175 @staticmethod
176 def write_file(script_file, cmd_lines, executable=False):
177 script_dir = os.path.dirname(script_file)
178 mkdirhier(script_dir)
179 with open(script_file, 'w') as script_f:
180 script_f.write(os.linesep.join(cmd_lines))
181 script_f.write(os.linesep)
182 if executable:
183 st = os.stat(script_file)
184 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
185 logger.info("Created: %s" % script_file)
186
187
188class IdeBase:
189 """Base class defining the interface for IDE plugins"""
190
191 def __init__(self):
192 self.ide_name = 'undefined'
193 self.gdb_cross_configs = []
194
195 @classmethod
196 def ide_plugin_priority(cls):
197 """Used to find the default ide handler if --ide is not passed"""
198 return 10
199
200 def setup_shared_sysroots(self, shared_env):
201 logger.warn("Shared sysroot mode is not supported for IDE %s" %
202 self.ide_name)
203
204 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
205 logger.warn("Modified recipe mode is not supported for IDE %s" %
206 self.ide_name)
207
208 def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
209 binaries = modified_recipe.find_installed_binaries()
210 for binary in binaries:
211 gdb_cross_config = gdb_cross_config_class(
212 image_recipe, modified_recipe, binary)
213 gdb_cross_config.initialize()
214 self.gdb_cross_configs.append(gdb_cross_config)
215
216 @staticmethod
217 def gen_oe_scrtips_sym_link(modified_recipe):
218 # create a sym-link from sources to the scripts directory
219 if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
220 IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
221 os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
222
223 @staticmethod
224 def update_json_file(json_dir, json_file, update_dict):
225 """Update a json file
226
227 By default it uses the dict.update function. If this is not sutiable
228 the update function might be passed via update_func parameter.
229 """
230 json_path = os.path.join(json_dir, json_file)
231 logger.info("Updating IDE config file: %s (%s)" %
232 (json_file, json_path))
233 if not os.path.exists(json_dir):
234 os.makedirs(json_dir)
235 try:
236 with open(json_path) as f:
237 orig_dict = json.load(f)
238 except json.decoder.JSONDecodeError:
239 logger.info(
240 "Decoding %s failed. Probably because of comments in the json file" % json_path)
241 orig_dict = {}
242 except FileNotFoundError:
243 orig_dict = {}
244 orig_dict.update(update_dict)
245 with open(json_path, 'w') as f:
246 json.dump(orig_dict, f, indent=4)
247
248 @staticmethod
249 def symlink_force(tgt, dst):
250 try:
251 os.symlink(tgt, dst)
252 except OSError as err:
253 if err.errno == errno.EEXIST:
254 if os.readlink(dst) != tgt:
255 os.remove(dst)
256 os.symlink(tgt, dst)
257 else:
258 raise err
259
260
261def get_devtool_deploy_opts(args):
262 """Filter args for devtool deploy-target args"""
263 if not args.target:
264 return None
265 devtool_deploy_opts = [args.target]
266 if args.no_host_check:
267 devtool_deploy_opts += ["-c"]
268 if args.show_status:
269 devtool_deploy_opts += ["-s"]
270 if args.no_preserve:
271 devtool_deploy_opts += ["-p"]
272 if args.no_check_space:
273 devtool_deploy_opts += ["--no-check-space"]
274 if args.ssh_exec:
275 devtool_deploy_opts += ["-e", args.ssh.exec]
276 if args.port:
277 devtool_deploy_opts += ["-P", args.port]
278 if args.key:
279 devtool_deploy_opts += ["-I", args.key]
280 if args.strip is False:
281 devtool_deploy_opts += ["--no-strip"]
282 return devtool_deploy_opts
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
deleted file mode 100644
index a62b93224e..0000000000
--- a/scripts/lib/devtool/ide_plugins/ide_code.py
+++ /dev/null
@@ -1,463 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
7
8import json
9import logging
10import os
11import shutil
12from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
13
14logger = logging.getLogger('devtool')
15
16
17class GdbCrossConfigVSCode(GdbCrossConfig):
18 def __init__(self, image_recipe, modified_recipe, binary):
19 super().__init__(image_recipe, modified_recipe, binary, False)
20
21 def initialize(self):
22 self._gen_gdbserver_start_script()
23
24
25class IdeVSCode(IdeBase):
26 """Manage IDE configurations for VSCode
27
28 Modified recipe mode:
29 - cmake: use the cmake-preset generated by devtool ide-sdk
30 - meson: meson is called via a wrapper script generated by devtool ide-sdk
31
32 Shared sysroot mode:
33 In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
34 A workspace cannot be created because there is no recipe that defines how a workspace could
35 be set up.
36 - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
37 The cmake-kit uses the environment script and the tool-chain file
38 generated by meta-ide-support.
39 - meson: Meson needs manual workspace configuration.
40 """
41
42 @classmethod
43 def ide_plugin_priority(cls):
44 """If --ide is not passed this is the default plugin"""
45 if shutil.which('code'):
46 return 100
47 return 0
48
49 def setup_shared_sysroots(self, shared_env):
50 """Expose the toolchain of the shared sysroots SDK"""
51 datadir = shared_env.ide_support.datadir
52 deploy_dir_image = shared_env.ide_support.deploy_dir_image
53 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
54 standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
55 vscode_ws_path = os.path.join(
56 os.environ['HOME'], '.local', 'share', 'CMakeTools')
57 cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
58 oecmake_generator = "Ninja"
59 env_script = os.path.join(
60 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
61
62 if not os.path.isdir(vscode_ws_path):
63 os.makedirs(vscode_ws_path)
64 cmake_kits_old = []
65 if os.path.exists(cmake_kits_path):
66 with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
67 cmake_kits_old = json.load(cmake_kits_file)
68 cmake_kits = cmake_kits_old.copy()
69
70 cmake_kit_new = {
71 "name": "OE " + real_multimach_target_sys,
72 "environmentSetupScript": env_script,
73 "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
74 "preferredGenerator": {
75 "name": oecmake_generator
76 }
77 }
78
79 def merge_kit(cmake_kits, cmake_kit_new):
80 i = 0
81 while i < len(cmake_kits):
82 if 'environmentSetupScript' in cmake_kits[i] and \
83 cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
84 cmake_kits[i] = cmake_kit_new
85 return
86 i += 1
87 cmake_kits.append(cmake_kit_new)
88 merge_kit(cmake_kits, cmake_kit_new)
89
90 if cmake_kits != cmake_kits_old:
91 logger.info("Updating: %s" % cmake_kits_path)
92 with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
93 json.dump(cmake_kits, cmake_kits_file, indent=4)
94 else:
95 logger.info("Already up to date: %s" % cmake_kits_path)
96
97 cmake_native = os.path.join(
98 shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
99 if os.path.isfile(cmake_native):
100 logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
101 else:
102 logger.error("Cannot find cmake native at: %s" % cmake_native)
103
104 def dot_code_dir(self, modified_recipe):
105 return os.path.join(modified_recipe.srctree, '.vscode')
106
107 def __vscode_settings_meson(self, settings_dict, modified_recipe):
108 if modified_recipe.build_tool is not BuildTool.MESON:
109 return
110 settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
111
112 confopts = modified_recipe.mesonopts.split()
113 confopts += modified_recipe.meson_cross_file.split()
114 confopts += modified_recipe.extra_oemeson.split()
115 settings_dict["mesonbuild.configureOptions"] = confopts
116 settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
117
118 def __vscode_settings_cmake(self, settings_dict, modified_recipe):
119 """Add cmake specific settings to settings.json.
120
121 Note: most settings are passed to the cmake preset.
122 """
123 if modified_recipe.build_tool is not BuildTool.CMAKE:
124 return
125 settings_dict["cmake.configureOnOpen"] = True
126 settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
127
128 def vscode_settings(self, modified_recipe, image_recipe):
129 files_excludes = {
130 "**/.git/**": True,
131 "**/oe-logs/**": True,
132 "**/oe-workdir/**": True,
133 "**/source-date-epoch/**": True
134 }
135 python_exclude = [
136 "**/.git/**",
137 "**/oe-logs/**",
138 "**/oe-workdir/**",
139 "**/source-date-epoch/**"
140 ]
141 files_readonly = {
142 modified_recipe.recipe_sysroot + '/**': True,
143 modified_recipe.recipe_sysroot_native + '/**': True,
144 }
145 if image_recipe.rootfs_dbg is not None:
146 files_readonly[image_recipe.rootfs_dbg + '/**'] = True
147 settings_dict = {
148 "files.watcherExclude": files_excludes,
149 "files.exclude": files_excludes,
150 "files.readonlyInclude": files_readonly,
151 "python.analysis.exclude": python_exclude
152 }
153 self.__vscode_settings_cmake(settings_dict, modified_recipe)
154 self.__vscode_settings_meson(settings_dict, modified_recipe)
155
156 settings_file = 'settings.json'
157 IdeBase.update_json_file(
158 self.dot_code_dir(modified_recipe), settings_file, settings_dict)
159
160 def __vscode_extensions_cmake(self, modified_recipe, recommendations):
161 if modified_recipe.build_tool is not BuildTool.CMAKE:
162 return
163 recommendations += [
164 "twxs.cmake",
165 "ms-vscode.cmake-tools",
166 "ms-vscode.cpptools",
167 "ms-vscode.cpptools-extension-pack",
168 "ms-vscode.cpptools-themes"
169 ]
170
171 def __vscode_extensions_meson(self, modified_recipe, recommendations):
172 if modified_recipe.build_tool is not BuildTool.MESON:
173 return
174 recommendations += [
175 'mesonbuild.mesonbuild',
176 "ms-vscode.cpptools",
177 "ms-vscode.cpptools-extension-pack",
178 "ms-vscode.cpptools-themes"
179 ]
180
181 def vscode_extensions(self, modified_recipe):
182 recommendations = []
183 self.__vscode_extensions_cmake(modified_recipe, recommendations)
184 self.__vscode_extensions_meson(modified_recipe, recommendations)
185 extensions_file = 'extensions.json'
186 IdeBase.update_json_file(
187 self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
188
189 def vscode_c_cpp_properties(self, modified_recipe):
190 properties_dict = {
191 "name": modified_recipe.recipe_id_pretty,
192 }
193 if modified_recipe.build_tool is BuildTool.CMAKE:
194 properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
195 elif modified_recipe.build_tool is BuildTool.MESON:
196 properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
197 properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
198 else: # no C/C++ build
199 return
200
201 properties_dicts = {
202 "configurations": [
203 properties_dict
204 ],
205 "version": 4
206 }
207 prop_file = 'c_cpp_properties.json'
208 IdeBase.update_json_file(
209 self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
210
211 def vscode_launch_bin_dbg(self, gdb_cross_config):
212 modified_recipe = gdb_cross_config.modified_recipe
213
214 launch_config = {
215 "name": gdb_cross_config.id_pretty,
216 "type": "cppdbg",
217 "request": "launch",
218 "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
219 "stopAtEntry": True,
220 "cwd": "${workspaceFolder}",
221 "environment": [],
222 "externalConsole": False,
223 "MIMode": "gdb",
224 "preLaunchTask": gdb_cross_config.id_pretty,
225 "miDebuggerPath": modified_recipe.gdb_cross.gdb,
226 "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
227 }
228
229 # Search for header files in recipe-sysroot.
230 src_file_map = {
231 "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
232 }
233 # First of all search for not stripped binaries in the image folder.
234 # These binaries are copied (and optionally stripped) by deploy-target
235 setup_commands = [
236 {
237 "description": "sysroot",
238 "text": "set sysroot " + modified_recipe.d
239 }
240 ]
241
242 if gdb_cross_config.image_recipe.rootfs_dbg:
243 launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
244 gdb_cross_config.image_recipe)
245 # First: Search for sources of this recipe in the workspace folder
246 if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
247 src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
248 else:
249 logger.error(
250 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
251 # Second: Search for sources of other recipes in the rootfs-dbg
252 if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
253 src_file_map["/usr/src/debug"] = os.path.join(
254 gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
255 else:
256 logger.error(
257 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
258 else:
259 logger.warning(
260 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
261
262 launch_config['sourceFileMap'] = src_file_map
263 launch_config['setupCommands'] = setup_commands
264 return launch_config
265
266 def vscode_launch(self, modified_recipe):
267 """GDB Launch configuration for binaries (elf files)"""
268
269 configurations = []
270 for gdb_cross_config in self.gdb_cross_configs:
271 if gdb_cross_config.modified_recipe is modified_recipe:
272 configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
273 launch_dict = {
274 "version": "0.2.0",
275 "configurations": configurations
276 }
277 launch_file = 'launch.json'
278 IdeBase.update_json_file(
279 self.dot_code_dir(modified_recipe), launch_file, launch_dict)
280
281 def vscode_tasks_cpp(self, args, modified_recipe):
282 run_install_deploy = modified_recipe.gen_install_deploy_script(args)
283 install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
284 tasks_dict = {
285 "version": "2.0.0",
286 "tasks": [
287 {
288 "label": install_task_name,
289 "type": "shell",
290 "command": run_install_deploy,
291 "problemMatcher": []
292 }
293 ]
294 }
295 for gdb_cross_config in self.gdb_cross_configs:
296 if gdb_cross_config.modified_recipe is not modified_recipe:
297 continue
298 tasks_dict['tasks'].append(
299 {
300 "label": gdb_cross_config.id_pretty,
301 "type": "shell",
302 "isBackground": True,
303 "dependsOn": [
304 install_task_name
305 ],
306 "command": gdb_cross_config.gdbserver_script,
307 "problemMatcher": [
308 {
309 "pattern": [
310 {
311 "regexp": ".",
312 "file": 1,
313 "location": 2,
314 "message": 3
315 }
316 ],
317 "background": {
318 "activeOnStart": True,
319 "beginsPattern": ".",
320 "endsPattern": ".",
321 }
322 }
323 ]
324 })
325 tasks_file = 'tasks.json'
326 IdeBase.update_json_file(
327 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
328
329 def vscode_tasks_fallback(self, args, modified_recipe):
330 oe_init_dir = modified_recipe.oe_init_dir
331 oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
332 dt_build = "devtool build "
333 dt_build_label = dt_build + modified_recipe.recipe_id_pretty
334 dt_build_cmd = dt_build + modified_recipe.bpn
335 clean_opt = " --clean"
336 dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
337 dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
338 dt_deploy = "devtool deploy-target "
339 dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
340 dt_deploy_cmd = dt_deploy + modified_recipe.bpn
341 dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
342 deploy_opts = ' '.join(get_devtool_deploy_opts(args))
343 tasks_dict = {
344 "version": "2.0.0",
345 "tasks": [
346 {
347 "label": dt_build_label,
348 "type": "shell",
349 "command": "bash",
350 "linux": {
351 "options": {
352 "cwd": oe_init_dir
353 }
354 },
355 "args": [
356 "--login",
357 "-c",
358 "%s%s" % (oe_init, dt_build_cmd)
359 ],
360 "problemMatcher": []
361 },
362 {
363 "label": dt_deploy_label,
364 "type": "shell",
365 "command": "bash",
366 "linux": {
367 "options": {
368 "cwd": oe_init_dir
369 }
370 },
371 "args": [
372 "--login",
373 "-c",
374 "%s%s %s" % (
375 oe_init, dt_deploy_cmd, deploy_opts)
376 ],
377 "problemMatcher": []
378 },
379 {
380 "label": dt_build_deploy_label,
381 "dependsOrder": "sequence",
382 "dependsOn": [
383 dt_build_label,
384 dt_deploy_label
385 ],
386 "problemMatcher": [],
387 "group": {
388 "kind": "build",
389 "isDefault": True
390 }
391 },
392 {
393 "label": dt_build_clean_label,
394 "type": "shell",
395 "command": "bash",
396 "linux": {
397 "options": {
398 "cwd": oe_init_dir
399 }
400 },
401 "args": [
402 "--login",
403 "-c",
404 "%s%s" % (oe_init, dt_build_clean_cmd)
405 ],
406 "problemMatcher": []
407 }
408 ]
409 }
410 if modified_recipe.gdb_cross:
411 for gdb_cross_config in self.gdb_cross_configs:
412 if gdb_cross_config.modified_recipe is not modified_recipe:
413 continue
414 tasks_dict['tasks'].append(
415 {
416 "label": gdb_cross_config.id_pretty,
417 "type": "shell",
418 "isBackground": True,
419 "dependsOn": [
420 dt_build_deploy_label
421 ],
422 "command": gdb_cross_config.gdbserver_script,
423 "problemMatcher": [
424 {
425 "pattern": [
426 {
427 "regexp": ".",
428 "file": 1,
429 "location": 2,
430 "message": 3
431 }
432 ],
433 "background": {
434 "activeOnStart": True,
435 "beginsPattern": ".",
436 "endsPattern": ".",
437 }
438 }
439 ]
440 })
441 tasks_file = 'tasks.json'
442 IdeBase.update_json_file(
443 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
444
445 def vscode_tasks(self, args, modified_recipe):
446 if modified_recipe.build_tool.is_c_ccp:
447 self.vscode_tasks_cpp(args, modified_recipe)
448 else:
449 self.vscode_tasks_fallback(args, modified_recipe)
450
451 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
452 self.vscode_settings(modified_recipe, image_recipe)
453 self.vscode_extensions(modified_recipe)
454 self.vscode_c_cpp_properties(modified_recipe)
455 if args.target:
456 self.initialize_gdb_cross_configs(
457 image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
458 self.vscode_launch(modified_recipe)
459 self.vscode_tasks(args, modified_recipe)
460
461
462def register_ide_plugin(ide_plugins):
463 ide_plugins['code'] = IdeVSCode
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py
deleted file mode 100644
index f106c5a026..0000000000
--- a/scripts/lib/devtool/ide_plugins/ide_none.py
+++ /dev/null
@@ -1,53 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk generic IDE plugin"""
7
8import os
9import logging
10from devtool.ide_plugins import IdeBase, GdbCrossConfig
11
12logger = logging.getLogger('devtool')
13
14
15class IdeNone(IdeBase):
16 """Generate some generic helpers for other IDEs
17
18 Modified recipe mode:
19 Generate some helper scripts for remote debugging with GDB
20
21 Shared sysroot mode:
22 A wrapper for bitbake meta-ide-support and bitbake build-sysroots
23 """
24
25 def __init__(self):
26 super().__init__()
27
28 def setup_shared_sysroots(self, shared_env):
29 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
30 deploy_dir_image = shared_env.ide_support.deploy_dir_image
31 env_script = os.path.join(
32 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
33 logger.info(
34 "To use this SDK please source this: %s" % env_script)
35
36 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
37 """generate some helper scripts and config files
38
39 - Execute the do_install task
40 - Execute devtool deploy-target
41 - Generate a gdbinit file per executable
42 - Generate the oe-scripts sym-link
43 """
44 script_path = modified_recipe.gen_install_deploy_script(args)
45 logger.info("Created: %s" % script_path)
46
47 self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
48
49 IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
50
51
52def register_ide_plugin(ide_plugins):
53 ide_plugins['none'] = IdeNone
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
deleted file mode 100755
index 65873b088d..0000000000
--- a/scripts/lib/devtool/ide_sdk.py
+++ /dev/null
@@ -1,1070 +0,0 @@
1# Development tool - ide-sdk command plugin
2#
3# Copyright (C) 2023-2024 Siemens AG
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool ide-sdk plugin"""
8
9import json
10import logging
11import os
12import re
13import shutil
14import stat
15import subprocess
16import sys
17from argparse import RawTextHelpFormatter
18from enum import Enum
19
20import scriptutils
21import bb
22from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe
23from devtool.standard import get_real_srctree
24from devtool.ide_plugins import BuildTool
25
26
27logger = logging.getLogger('devtool')
28
29# dict of classes derived from IdeBase
30ide_plugins = {}
31
32
33class DevtoolIdeMode(Enum):
34 """Different modes are supported by the ide-sdk plugin.
35
36 The enum might be extended by more advanced modes in the future. Some ideas:
37 - auto: modified if all recipes are modified, shared if none of the recipes is modified.
38 - mixed: modified mode for modified recipes, shared mode for all other recipes.
39 """
40
41 modified = 'modified'
42 shared = 'shared'
43
44
45class TargetDevice:
46 """SSH remote login parameters"""
47
48 def __init__(self, args):
49 self.extraoptions = ''
50 if args.no_host_check:
51 self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
52 self.ssh_sshexec = 'ssh'
53 if args.ssh_exec:
54 self.ssh_sshexec = args.ssh_exec
55 self.ssh_port = ''
56 if args.port:
57 self.ssh_port = "-p %s" % args.port
58 if args.key:
59 self.extraoptions += ' -i %s' % args.key
60
61 self.target = args.target
62 target_sp = args.target.split('@')
63 if len(target_sp) == 1:
64 self.login = ""
65 self.host = target_sp[0]
66 elif len(target_sp) == 2:
67 self.login = target_sp[0]
68 self.host = target_sp[1]
69 else:
70 logger.error("Invalid target argument: %s" % args.target)
71
72
73class RecipeNative:
74 """Base class for calling bitbake to provide a -native recipe"""
75
76 def __init__(self, name, target_arch=None):
77 self.name = name
78 self.target_arch = target_arch
79 self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot']
80 self.staging_bindir_native = None
81 self.target_sys = None
82 self.__native_bin = None
83
84 def _initialize(self, config, workspace, tinfoil):
85 """Get the parsed recipe"""
86 recipe_d = parse_recipe(
87 config, tinfoil, self.name, appends=True, filter_workspace=False)
88 if not recipe_d:
89 raise DevtoolError("Parsing %s recipe failed" % self.name)
90 self.staging_bindir_native = os.path.realpath(
91 recipe_d.getVar('STAGING_BINDIR_NATIVE'))
92 self.target_sys = recipe_d.getVar('TARGET_SYS')
93 return recipe_d
94
95 def initialize(self, config, workspace, tinfoil):
96 """Basic initialization that can be overridden by a derived class"""
97 self._initialize(config, workspace, tinfoil)
98
99 @property
100 def native_bin(self):
101 if not self.__native_bin:
102 raise DevtoolError("native binary name is not defined.")
103 return self.__native_bin
104
105
106class RecipeGdbCross(RecipeNative):
107 """Handle handle gdb-cross on the host and the gdbserver on the target device"""
108
109 def __init__(self, args, target_arch, target_device):
110 super().__init__('gdb-cross-' + target_arch, target_arch)
111 self.target_device = target_device
112 self.gdb = None
113 self.gdbserver_port_next = int(args.gdbserver_port_start)
114 self.config_db = {}
115
116 def __find_gdbserver(self, config, tinfoil):
117 """Absolute path of the gdbserver"""
118 recipe_d_gdb = parse_recipe(
119 config, tinfoil, 'gdb', appends=True, filter_workspace=False)
120 if not recipe_d_gdb:
121 raise DevtoolError("Parsing gdb recipe failed")
122 return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver')
123
124 def initialize(self, config, workspace, tinfoil):
125 super()._initialize(config, workspace, tinfoil)
126 gdb_bin = self.target_sys + '-gdb'
127 gdb_path = os.path.join(
128 self.staging_bindir_native, self.target_sys, gdb_bin)
129 self.gdb = gdb_path
130 self.gdbserver_path = self.__find_gdbserver(config, tinfoil)
131
132 @property
133 def host(self):
134 return self.target_device.host
135
136
137class RecipeImage:
138 """Handle some image recipe related properties
139
140 Most workflows require firmware that runs on the target device.
141 This firmware must be consistent with the setup of the host system.
142 In particular, the debug symbols must be compatible. For this, the
143 rootfs must be created as part of the SDK.
144 """
145
146 def __init__(self, name):
147 self.combine_dbg_image = False
148 self.gdbserver_missing = False
149 self.name = name
150 self.rootfs = None
151 self.__rootfs_dbg = None
152 self.bootstrap_tasks = [self.name + ':do_build']
153
154 def initialize(self, config, tinfoil):
155 image_d = parse_recipe(
156 config, tinfoil, self.name, appends=True, filter_workspace=False)
157 if not image_d:
158 raise DevtoolError(
159 "Parsing image recipe %s failed" % self.name)
160
161 self.combine_dbg_image = bb.data.inherits_class(
162 'image-combined-dbg', image_d)
163
164 workdir = image_d.getVar('WORKDIR')
165 self.rootfs = os.path.join(workdir, 'rootfs')
166 if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1":
167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
168
169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
170 'IMAGE_INSTALL')
171
172 @property
173 def debug_support(self):
174 return bool(self.rootfs_dbg)
175
176 @property
177 def rootfs_dbg(self):
178 if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg):
179 return self.__rootfs_dbg
180 return None
181
182
183class RecipeMetaIdeSupport:
184 """For the shared sysroots mode meta-ide-support is needed
185
186 For use cases where just a cross tool-chain is required but
187 no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support
188 and bitbake build-sysroots. This also allows to expose the cross-toolchains
189 to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits.
190 """
191
192 def __init__(self):
193 self.bootstrap_tasks = ['meta-ide-support:do_build']
194 self.topdir = None
195 self.datadir = None
196 self.deploy_dir_image = None
197 self.build_sys = None
198 # From toolchain-scripts
199 self.real_multimach_target_sys = None
200
201 def initialize(self, config, tinfoil):
202 meta_ide_support_d = parse_recipe(
203 config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False)
204 if not meta_ide_support_d:
205 raise DevtoolError("Parsing meta-ide-support recipe failed")
206
207 self.topdir = meta_ide_support_d.getVar('TOPDIR')
208 self.datadir = meta_ide_support_d.getVar('datadir')
209 self.deploy_dir_image = meta_ide_support_d.getVar(
210 'DEPLOY_DIR_IMAGE')
211 self.build_sys = meta_ide_support_d.getVar('BUILD_SYS')
212 self.real_multimach_target_sys = meta_ide_support_d.getVar(
213 'REAL_MULTIMACH_TARGET_SYS')
214
215
216class RecipeBuildSysroots:
217 """For the shared sysroots mode build-sysroots is needed"""
218
219 def __init__(self):
220 self.standalone_sysroot = None
221 self.standalone_sysroot_native = None
222 self.bootstrap_tasks = [
223 'build-sysroots:do_build_target_sysroot',
224 'build-sysroots:do_build_native_sysroot'
225 ]
226
227 def initialize(self, config, tinfoil):
228 build_sysroots_d = parse_recipe(
229 config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False)
230 if not build_sysroots_d:
231 raise DevtoolError("Parsing build-sysroots recipe failed")
232 self.standalone_sysroot = build_sysroots_d.getVar(
233 'STANDALONE_SYSROOT')
234 self.standalone_sysroot_native = build_sysroots_d.getVar(
235 'STANDALONE_SYSROOT_NATIVE')
236
237
238class SharedSysrootsEnv:
239 """Handle the shared sysroots based workflow
240
241 Support the workflow with just a tool-chain without a recipe.
242 It's basically like:
243 bitbake some-dependencies
244 bitbake meta-ide-support
245 bitbake build-sysroots
246 Use the environment-* file found in the deploy folder
247 """
248
249 def __init__(self):
250 self.ide_support = None
251 self.build_sysroots = None
252
253 def initialize(self, ide_support, build_sysroots):
254 self.ide_support = ide_support
255 self.build_sysroots = build_sysroots
256
257 def setup_ide(self, ide):
258 ide.setup(self)
259
260
261class RecipeNotModified:
262 """Handling of recipes added to the Direct DSK shared sysroots."""
263
264 def __init__(self, name):
265 self.name = name
266 self.bootstrap_tasks = [name + ':do_populate_sysroot']
267
268
269class RecipeModified:
270 """Handling of recipes in the workspace created by devtool modify"""
271 OE_INIT_BUILD_ENV = 'oe-init-build-env'
272
273 VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$")
274
275 def __init__(self, name):
276 self.name = name
277 self.bootstrap_tasks = [name + ':do_install']
278 self.gdb_cross = None
279 # workspace
280 self.real_srctree = None
281 self.srctree = None
282 self.ide_sdk_dir = None
283 self.ide_sdk_scripts_dir = None
284 self.bbappend = None
285 # recipe variables from d.getVar
286 self.b = None
287 self.base_libdir = None
288 self.bblayers = None
289 self.bpn = None
290 self.d = None
291 self.fakerootcmd = None
292 self.fakerootenv = None
293 self.libdir = None
294 self.max_process = None
295 self.package_arch = None
296 self.package_debug_split_style = None
297 self.path = None
298 self.pn = None
299 self.recipe_sysroot = None
300 self.recipe_sysroot_native = None
301 self.staging_incdir = None
302 self.strip_cmd = None
303 self.target_arch = None
304 self.target_dbgsrc_dir = None
305 self.topdir = None
306 self.workdir = None
307 self.recipe_id = None
308 # replicate bitbake build environment
309 self.exported_vars = None
310 self.cmd_compile = None
311 self.__oe_init_dir = None
312 # main build tool used by this recipe
313 self.build_tool = BuildTool.UNDEFINED
314 # build_tool = cmake
315 self.oecmake_generator = None
316 self.cmake_cache_vars = None
317 # build_tool = meson
318 self.meson_buildtype = None
319 self.meson_wrapper = None
320 self.mesonopts = None
321 self.extra_oemeson = None
322 self.meson_cross_file = None
323
324 def initialize(self, config, workspace, tinfoil):
325 recipe_d = parse_recipe(
326 config, tinfoil, self.name, appends=True, filter_workspace=False)
327 if not recipe_d:
328 raise DevtoolError("Parsing %s recipe failed" % self.name)
329
330 # Verify this recipe is built as externalsrc setup by devtool modify
331 workspacepn = check_workspace_recipe(
332 workspace, self.name, bbclassextend=True)
333 self.srctree = workspace[workspacepn]['srctree']
334 # Need to grab this here in case the source is within a subdirectory
335 self.real_srctree = get_real_srctree(
336 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR'))
337 self.bbappend = workspace[workspacepn]['bbappend']
338
339 self.ide_sdk_dir = os.path.join(
340 config.workspace_path, 'ide-sdk', self.name)
341 if os.path.exists(self.ide_sdk_dir):
342 shutil.rmtree(self.ide_sdk_dir)
343 self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts')
344
345 self.b = recipe_d.getVar('B')
346 self.base_libdir = recipe_d.getVar('base_libdir')
347 self.bblayers = recipe_d.getVar('BBLAYERS').split()
348 self.bpn = recipe_d.getVar('BPN')
349 self.cxx = recipe_d.getVar('CXX')
350 self.d = recipe_d.getVar('D')
351 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
352 self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
353 self.libdir = recipe_d.getVar('libdir')
354 self.max_process = int(recipe_d.getVar(
355 "BB_NUMBER_THREADS") or os.cpu_count() or 1)
356 self.package_arch = recipe_d.getVar('PACKAGE_ARCH')
357 self.package_debug_split_style = recipe_d.getVar(
358 'PACKAGE_DEBUG_SPLIT_STYLE')
359 self.path = recipe_d.getVar('PATH')
360 self.pn = recipe_d.getVar('PN')
361 self.recipe_sysroot = os.path.realpath(
362 recipe_d.getVar('RECIPE_SYSROOT'))
363 self.recipe_sysroot_native = os.path.realpath(
364 recipe_d.getVar('RECIPE_SYSROOT_NATIVE'))
365 self.staging_bindir_toolchain = os.path.realpath(
366 recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN'))
367 self.staging_incdir = os.path.realpath(
368 recipe_d.getVar('STAGING_INCDIR'))
369 self.strip_cmd = recipe_d.getVar('STRIP')
370 self.target_arch = recipe_d.getVar('TARGET_ARCH')
371 self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR')
372 self.topdir = recipe_d.getVar('TOPDIR')
373 self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR'))
374
375 self.__init_exported_variables(recipe_d)
376
377 if bb.data.inherits_class('cmake', recipe_d):
378 self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR')
379 self.__init_cmake_preset_cache(recipe_d)
380 self.build_tool = BuildTool.CMAKE
381 elif bb.data.inherits_class('meson', recipe_d):
382 self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE')
383 self.mesonopts = recipe_d.getVar('MESONOPTS')
384 self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON')
385 self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE')
386 self.build_tool = BuildTool.MESON
387
388 # Recipe ID is the identifier for IDE config sections
389 self.recipe_id = self.bpn + "-" + self.package_arch
390 self.recipe_id_pretty = self.bpn + ": " + self.package_arch
391
392 def append_to_bbappend(self, append_text):
393 with open(self.bbappend, 'a') as bbap:
394 bbap.write(append_text)
395
396 def remove_from_bbappend(self, append_text):
397 with open(self.bbappend, 'r') as bbap:
398 text = bbap.read()
399 new_text = text.replace(append_text, '')
400 with open(self.bbappend, 'w') as bbap:
401 bbap.write(new_text)
402
403 @staticmethod
404 def is_valid_shell_variable(var):
405 """Skip strange shell variables like systemd
406
407 prevent from strange bugs because of strange variables which
408 are not used in this context but break various tools.
409 """
410 if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var):
411 bb.debug(1, "ignoring variable: %s" % var)
412 return True
413 return False
414
415 def debug_build_config(self, args):
416 """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise"""
417 if self.build_tool is BuildTool.CMAKE:
418 append_text = os.linesep + \
419 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep
420 if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
421 self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = {
422 "type": "STRING",
423 "value": "Debug",
424 }
425 self.append_to_bbappend(append_text)
426 elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
427 del self.cmake_cache_vars['CMAKE_BUILD_TYPE']
428 self.remove_from_bbappend(append_text)
429 elif self.build_tool is BuildTool.MESON:
430 append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep
431 if args.debug_build_config and self.meson_buildtype != "debug":
432 self.mesonopts.replace(
433 '--buildtype ' + self.meson_buildtype, '--buildtype debug')
434 self.append_to_bbappend(append_text)
435 elif self.meson_buildtype == "debug":
436 self.mesonopts.replace(
437 '--buildtype debug', '--buildtype plain')
438 self.remove_from_bbappend(append_text)
439 elif args.debug_build_config:
440 logger.warn(
441 "--debug-build-config is not implemented for this build tool yet.")
442
443 def solib_search_path(self, image):
444 """Search for debug symbols in the rootfs and rootfs-dbg
445
446 The debug symbols of shared libraries which are provided by other packages
447 are grabbed from the -dbg packages in the rootfs-dbg.
448
449 But most cross debugging tools like gdb, perf, and systemtap need to find
450 executable/library first and through it debuglink note find corresponding
451 symbols file. Therefore the library paths from the rootfs are added as well.
452
453 Note: For the devtool modified recipe compiled from the IDE, the debug
454 symbols are taken from the unstripped binaries in the image folder.
455 Also, devtool deploy-target takes the files from the image folder.
456 debug symbols in the image folder refer to the corresponding source files
457 with absolute paths of the build machine. Debug symbols found in the
458 rootfs-dbg are relocated and contain paths which refer to the source files
459 installed on the target device e.g. /usr/src/...
460 """
461 base_libdir = self.base_libdir.lstrip('/')
462 libdir = self.libdir.lstrip('/')
463 so_paths = [
464 # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug
465 os.path.join(image.rootfs_dbg, base_libdir, ".debug"),
466 os.path.join(image.rootfs_dbg, libdir, ".debug"),
467 # debug symbols for package_debug_split_style: debug-file-directory
468 os.path.join(image.rootfs_dbg, "usr", "lib", "debug"),
469
470 # The binaries are required as well, the debug packages are not enough
471 # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg
472 os.path.join(image.rootfs_dbg, base_libdir),
473 os.path.join(image.rootfs_dbg, libdir),
474 # Without image-combined-dbg.bbclass the binaries are only in rootfs.
475 # Note: Stepping into source files located in rootfs-dbg does not
476 # work without image-combined-dbg.bbclass yet.
477 os.path.join(image.rootfs, base_libdir),
478 os.path.join(image.rootfs, libdir)
479 ]
480 return so_paths
481
482 def solib_search_path_str(self, image):
483 """Return a : separated list of paths usable by GDB's set solib-search-path"""
484 return ':'.join(self.solib_search_path(image))
485
486 def __init_exported_variables(self, d):
487 """Find all variables with export flag set.
488
489 This allows to generate IDE configurations which compile with the same
490 environment as bitbake does. That's at least a reasonable default behavior.
491 """
492 exported_vars = {}
493
494 vars = (key for key in d.keys() if not key.startswith(
495 "__") and not d.getVarFlag(key, "func", False))
496 for var in vars:
497 func = d.getVarFlag(var, "func", False)
498 if d.getVarFlag(var, 'python', False) and func:
499 continue
500 export = d.getVarFlag(var, "export", False)
501 unexport = d.getVarFlag(var, "unexport", False)
502 if not export and not unexport and not func:
503 continue
504 if unexport:
505 continue
506
507 val = d.getVar(var)
508 if val is None:
509 continue
510 if set(var) & set("-.{}+"):
511 logger.warn(
512 "Warning: Found invalid character in variable name %s", str(var))
513 continue
514 varExpanded = d.expand(var)
515 val = str(val)
516
517 if not RecipeModified.is_valid_shell_variable(varExpanded):
518 continue
519
520 if func:
521 code_line = "line: {0}, file: {1}\n".format(
522 d.getVarFlag(var, "lineno", False),
523 d.getVarFlag(var, "filename", False))
524 val = val.rstrip('\n')
525 logger.warn("Warning: exported shell function %s() is not exported (%s)" %
526 (varExpanded, code_line))
527 continue
528
529 if export:
530 exported_vars[varExpanded] = val.strip()
531 continue
532
533 self.exported_vars = exported_vars
534
535 def __init_cmake_preset_cache(self, d):
536 """Get the arguments passed to cmake
537
538 Replicate the cmake configure arguments with all details to
539 share on build folder between bitbake and SDK.
540 """
541 site_file = os.path.join(self.workdir, 'site-file.cmake')
542 if os.path.exists(site_file):
543 print("Warning: site-file.cmake is not supported")
544
545 cache_vars = {}
546 oecmake_args = d.getVar('OECMAKE_ARGS').split()
547 extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
548 for param in oecmake_args + extra_oecmake:
549 d_pref = "-D"
550 if param.startswith(d_pref):
551 param = param[len(d_pref):]
552 else:
553 print("Error: expected a -D")
554 param_s = param.split('=', 1)
555 param_nt = param_s[0].split(':', 1)
556
557 def handle_undefined_variable(var):
558 if var.startswith('${') and var.endswith('}'):
559 return ''
560 else:
561 return var
562 # Example: FOO=ON
563 if len(param_nt) == 1:
564 cache_vars[param_s[0]] = handle_undefined_variable(param_s[1])
565 # Example: FOO:PATH=/tmp
566 elif len(param_nt) == 2:
567 cache_vars[param_nt[0]] = {
568 "type": param_nt[1],
569 "value": handle_undefined_variable(param_s[1]),
570 }
571 else:
572 print("Error: cannot parse %s" % param)
573 self.cmake_cache_vars = cache_vars
574
575 def cmake_preset(self):
576 """Create a preset for cmake that mimics how bitbake calls cmake"""
577 toolchain_file = os.path.join(self.workdir, 'toolchain.cmake')
578 cmake_executable = os.path.join(
579 self.recipe_sysroot_native, 'usr', 'bin', 'cmake')
580 self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id
581
582 preset_dict_configure = {
583 "name": self.recipe_id,
584 "displayName": self.recipe_id_pretty,
585 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
586 "binaryDir": self.b,
587 "generator": self.oecmake_generator,
588 "toolchainFile": toolchain_file,
589 "cacheVariables": self.cmake_cache_vars,
590 "environment": self.exported_vars,
591 "cmakeExecutable": cmake_executable
592 }
593
594 preset_dict_build = {
595 "name": self.recipe_id,
596 "displayName": self.recipe_id_pretty,
597 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
598 "configurePreset": self.recipe_id,
599 "inheritConfigureEnvironment": True
600 }
601
602 preset_dict_test = {
603 "name": self.recipe_id,
604 "displayName": self.recipe_id_pretty,
605 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
606 "configurePreset": self.recipe_id,
607 "inheritConfigureEnvironment": True
608 }
609
610 preset_dict = {
611 "version": 3, # cmake 3.21, backward compatible with kirkstone
612 "configurePresets": [preset_dict_configure],
613 "buildPresets": [preset_dict_build],
614 "testPresets": [preset_dict_test]
615 }
616
617 # Finally write the json file
618 json_file = 'CMakeUserPresets.json'
619 json_path = os.path.join(self.real_srctree, json_file)
620 logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path))
621 if not os.path.exists(self.real_srctree):
622 os.makedirs(self.real_srctree)
623 try:
624 with open(json_path) as f:
625 orig_dict = json.load(f)
626 except json.decoder.JSONDecodeError:
627 logger.info(
628 "Decoding %s failed. Probably because of comments in the json file" % json_path)
629 orig_dict = {}
630 except FileNotFoundError:
631 orig_dict = {}
632
633 # Add or update the presets for the recipe and keep other presets
634 for k, v in preset_dict.items():
635 if isinstance(v, list):
636 update_preset = v[0]
637 preset_added = False
638 if k in orig_dict:
639 for index, orig_preset in enumerate(orig_dict[k]):
640 if 'name' in orig_preset:
641 if orig_preset['name'] == update_preset['name']:
642 logger.debug("Updating preset: %s" %
643 orig_preset['name'])
644 orig_dict[k][index] = update_preset
645 preset_added = True
646 break
647 else:
648 logger.debug("keeping preset: %s" %
649 orig_preset['name'])
650 else:
651 logger.warn("preset without a name found")
652 if not preset_added:
653 if not k in orig_dict:
654 orig_dict[k] = []
655 orig_dict[k].append(update_preset)
656 logger.debug("Added preset: %s" %
657 update_preset['name'])
658 else:
659 orig_dict[k] = v
660
661 with open(json_path, 'w') as f:
662 json.dump(orig_dict, f, indent=4)
663
664 def gen_meson_wrapper(self):
665 """Generate a wrapper script to call meson with the cross environment"""
666 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
667 meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson')
668 meson_real = os.path.join(
669 self.recipe_sysroot_native, 'usr', 'bin', 'meson.real')
670 with open(meson_wrapper, 'w') as mwrap:
671 mwrap.write("#!/bin/sh" + os.linesep)
672 for var, val in self.exported_vars.items():
673 mwrap.write('export %s="%s"' % (var, val) + os.linesep)
674 mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep)
675 private_temp = os.path.join(self.b, "meson-private", "tmp")
676 mwrap.write('mkdir -p "%s"' % private_temp + os.linesep)
677 mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep)
678 mwrap.write('exec "%s" "$@"' % meson_real + os.linesep)
679 st = os.stat(meson_wrapper)
680 os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC)
681 self.meson_wrapper = meson_wrapper
682 self.cmd_compile = meson_wrapper + " compile -C " + self.b
683
684 def which(self, executable):
685 bin_path = shutil.which(executable, path=self.path)
686 if not bin_path:
687 raise DevtoolError(
688 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn))
689 return bin_path
690
691 @staticmethod
692 def is_elf_file(file_path):
693 with open(file_path, "rb") as f:
694 data = f.read(4)
695 if data == b'\x7fELF':
696 return True
697 return False
698
699 def find_installed_binaries(self):
700 """find all executable elf files in the image directory"""
701 binaries = []
702 d_len = len(self.d)
703 re_so = re.compile(r'.*\.so[.0-9]*$')
704 for root, _, files in os.walk(self.d, followlinks=False):
705 for file in files:
706 if os.path.islink(file):
707 continue
708 if re_so.match(file):
709 continue
710 abs_name = os.path.join(root, file)
711 if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name):
712 binaries.append(abs_name[d_len:])
713 return sorted(binaries)
714
715 def gen_delete_package_dirs(self):
716 """delete folders of package tasks
717
718 This is a workaround for and issue with recipes having their sources
719 downloaded as file://
720 This likely breaks pseudo like:
721 path mismatch [3 links]: ino 79147802 db
722 .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/
723 cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp
724 .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp
725 Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue.
726 """
727 cmd_lines = ['#!/bin/sh']
728
729 # Set up the appropriate environment
730 newenv = dict(os.environ)
731 for varvalue in self.fakerootenv.split():
732 if '=' in varvalue:
733 splitval = varvalue.split('=', 1)
734 newenv[splitval[0]] = splitval[1]
735
736 # Replicate the environment variables from bitbake
737 for var, val in newenv.items():
738 if not RecipeModified.is_valid_shell_variable(var):
739 continue
740 cmd_lines.append('%s="%s"' % (var, val))
741 cmd_lines.append('export %s' % var)
742
743 # Delete the folders
744 pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [
745 "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]])
746 cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs)
747 cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd))
748
749 return self.write_script(cmd_lines, 'delete_package_dirs')
750
751 def gen_deploy_target_script(self, args):
752 """Generate a script which does what devtool deploy-target does
753
754 This script is much quicker than devtool target-deploy. Because it
755 does not need to start a bitbake server. All information from tinfoil
756 is hard-coded in the generated script.
757 """
758 cmd_lines = ['#!%s' % str(sys.executable)]
759 cmd_lines.append('import sys')
760 cmd_lines.append('devtool_sys_path = %s' % str(sys.path))
761 cmd_lines.append('devtool_sys_path.reverse()')
762 cmd_lines.append('for p in devtool_sys_path:')
763 cmd_lines.append(' if p not in sys.path:')
764 cmd_lines.append(' sys.path.insert(0, p)')
765 cmd_lines.append('from devtool.deploy import deploy_no_d')
766 args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check',
767 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target']
768 filtered_args_dict = {key: value for key, value in vars(
769 args).items() if key in args_filter}
770 cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict))
771 cmd_lines.append('class Dict2Class(object):')
772 cmd_lines.append(' def __init__(self, my_dict):')
773 cmd_lines.append(' for key in my_dict:')
774 cmd_lines.append(' setattr(self, key, my_dict[key])')
775 cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)')
776 cmd_lines.append(
777 'setattr(filtered_args, "recipename", "%s")' % self.bpn)
778 cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' %
779 (self.d, self.workdir, self.path, self.strip_cmd,
780 self.libdir, self.base_libdir, self.max_process,
781 self.fakerootcmd, self.fakerootenv))
782 return self.write_script(cmd_lines, 'deploy_target')
783
784 def gen_install_deploy_script(self, args):
785 """Generate a script which does install and deploy"""
786 cmd_lines = ['#!/bin/bash']
787
788 cmd_lines.append(self.gen_delete_package_dirs())
789
790 # . oe-init-build-env $BUILDDIR
791 # Note: Sourcing scripts with arguments requires bash
792 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
793 self.oe_init_dir, self.oe_init_dir))
794 cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % (
795 self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir))
796
797 # bitbake -c install
798 cmd_lines.append(
799 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn))
800
801 # Self contained devtool deploy-target
802 cmd_lines.append(self.gen_deploy_target_script(args))
803
804 return self.write_script(cmd_lines, 'install_and_deploy')
805
806 def write_script(self, cmd_lines, script_name):
807 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
808 script_name_arch = script_name + '_' + self.recipe_id
809 script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch)
810 with open(script_file, 'w') as script_f:
811 script_f.write(os.linesep.join(cmd_lines))
812 st = os.stat(script_file)
813 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
814 return script_file
815
816 @property
817 def oe_init_build_env(self):
818 """Find the oe-init-build-env used for this setup"""
819 oe_init_dir = self.oe_init_dir
820 if oe_init_dir:
821 return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
822 return None
823
824 @property
825 def oe_init_dir(self):
826 """Find the directory where the oe-init-build-env is located
827
828 Assumption: There might be a layer with higher priority than poky
829 which provides to oe-init-build-env in the layer's toplevel folder.
830 """
831 if not self.__oe_init_dir:
832 for layer in reversed(self.bblayers):
833 result = subprocess.run(
834 ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True)
835 if result.returncode == 0:
836 oe_init_dir = result.stdout.decode('utf-8').strip()
837 oe_init_path = os.path.join(
838 oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
839 if os.path.exists(oe_init_path):
840 logger.debug("Using %s from: %s" % (
841 RecipeModified.OE_INIT_BUILD_ENV, oe_init_path))
842 self.__oe_init_dir = oe_init_dir
843 break
844 if not self.__oe_init_dir:
845 logger.error("Cannot find the bitbake top level folder")
846 return self.__oe_init_dir
847
848
849def ide_setup(args, config, basepath, workspace):
850 """Generate the IDE configuration for the workspace"""
851
852 # Explicitely passing some special recipes does not make sense
853 for recipe in args.recipenames:
854 if recipe in ['meta-ide-support', 'build-sysroots']:
855 raise DevtoolError("Invalid recipe: %s." % recipe)
856
857 # Collect information about tasks which need to be bitbaked
858 bootstrap_tasks = []
859 bootstrap_tasks_late = []
860 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
861 try:
862 # define mode depending on recipes which need to be processed
863 recipes_image_names = []
864 recipes_modified_names = []
865 recipes_other_names = []
866 for recipe in args.recipenames:
867 try:
868 check_workspace_recipe(
869 workspace, recipe, bbclassextend=True)
870 recipes_modified_names.append(recipe)
871 except DevtoolError:
872 recipe_d = parse_recipe(
873 config, tinfoil, recipe, appends=True, filter_workspace=False)
874 if not recipe_d:
875 raise DevtoolError("Parsing recipe %s failed" % recipe)
876 if bb.data.inherits_class('image', recipe_d):
877 recipes_image_names.append(recipe)
878 else:
879 recipes_other_names.append(recipe)
880
881 invalid_params = False
882 if args.mode == DevtoolIdeMode.shared:
883 if len(recipes_modified_names):
884 logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str(
885 recipes_modified_names))
886 invalid_params = True
887 if args.mode == DevtoolIdeMode.modified:
888 if len(recipes_other_names):
889 logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str(
890 recipes_other_names))
891 invalid_params = True
892 if len(recipes_image_names) != 1:
893 logger.error(
894 "One image recipe is required as the rootfs for the remote development.")
895 invalid_params = True
896 for modified_recipe_name in recipes_modified_names:
897 if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'):
898 logger.error(
899 "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name)
900 invalid_params = True
901
902 if invalid_params:
903 raise DevtoolError("Invalid parameters are passed.")
904
905 # For the shared sysroots mode, add all dependencies of all the images to the sysroots
906 # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg
907 recipes_images = []
908 for recipes_image_name in recipes_image_names:
909 logger.info("Using image: %s" % recipes_image_name)
910 recipe_image = RecipeImage(recipes_image_name)
911 recipe_image.initialize(config, tinfoil)
912 bootstrap_tasks += recipe_image.bootstrap_tasks
913 recipes_images.append(recipe_image)
914
915 # Provide a Direct SDK with shared sysroots
916 recipes_not_modified = []
917 if args.mode == DevtoolIdeMode.shared:
918 ide_support = RecipeMetaIdeSupport()
919 ide_support.initialize(config, tinfoil)
920 bootstrap_tasks += ide_support.bootstrap_tasks
921
922 logger.info("Adding %s to the Direct SDK sysroots." %
923 str(recipes_other_names))
924 for recipe_name in recipes_other_names:
925 recipe_not_modified = RecipeNotModified(recipe_name)
926 bootstrap_tasks += recipe_not_modified.bootstrap_tasks
927 recipes_not_modified.append(recipe_not_modified)
928
929 build_sysroots = RecipeBuildSysroots()
930 build_sysroots.initialize(config, tinfoil)
931 bootstrap_tasks_late += build_sysroots.bootstrap_tasks
932 shared_env = SharedSysrootsEnv()
933 shared_env.initialize(ide_support, build_sysroots)
934
935 recipes_modified = []
936 if args.mode == DevtoolIdeMode.modified:
937 logger.info("Setting up workspaces for modified recipe: %s" %
938 str(recipes_modified_names))
939 gdbs_cross = {}
940 for recipe_name in recipes_modified_names:
941 recipe_modified = RecipeModified(recipe_name)
942 recipe_modified.initialize(config, workspace, tinfoil)
943 bootstrap_tasks += recipe_modified.bootstrap_tasks
944 recipes_modified.append(recipe_modified)
945
946 if recipe_modified.target_arch not in gdbs_cross:
947 target_device = TargetDevice(args)
948 gdb_cross = RecipeGdbCross(
949 args, recipe_modified.target_arch, target_device)
950 gdb_cross.initialize(config, workspace, tinfoil)
951 bootstrap_tasks += gdb_cross.bootstrap_tasks
952 gdbs_cross[recipe_modified.target_arch] = gdb_cross
953 recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch]
954
955 finally:
956 tinfoil.shutdown()
957
958 if not args.skip_bitbake:
959 bb_cmd = 'bitbake '
960 if args.bitbake_k:
961 bb_cmd += "-k "
962 bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks)
963 exec_build_env_command(
964 config.init_path, basepath, bb_cmd_early, watch=True)
965 if bootstrap_tasks_late:
966 bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late)
967 exec_build_env_command(
968 config.init_path, basepath, bb_cmd_late, watch=True)
969
970 for recipe_image in recipes_images:
971 if (recipe_image.gdbserver_missing):
972 logger.warning(
973 "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image)
974
975 if recipe_image.combine_dbg_image is False:
976 logger.warning(
977 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image)
978
979 # Instantiate the active IDE plugin
980 ide = ide_plugins[args.ide]()
981 if args.mode == DevtoolIdeMode.shared:
982 ide.setup_shared_sysroots(shared_env)
983 elif args.mode == DevtoolIdeMode.modified:
984 for recipe_modified in recipes_modified:
985 if recipe_modified.build_tool is BuildTool.CMAKE:
986 recipe_modified.cmake_preset()
987 if recipe_modified.build_tool is BuildTool.MESON:
988 recipe_modified.gen_meson_wrapper()
989 ide.setup_modified_recipe(
990 args, recipe_image, recipe_modified)
991 else:
992 raise DevtoolError("Must not end up here.")
993
994
995def register_commands(subparsers, context):
996 """Register devtool subcommands from this plugin"""
997
998 global ide_plugins
999
1000 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
1001 pluginpaths = [os.path.join(path, 'ide_plugins')
1002 for path in context.pluginpaths]
1003 ide_plugin_modules = []
1004 for pluginpath in pluginpaths:
1005 scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath)
1006
1007 for ide_plugin_module in ide_plugin_modules:
1008 if hasattr(ide_plugin_module, 'register_ide_plugin'):
1009 ide_plugin_module.register_ide_plugin(ide_plugins)
1010 # Sort plugins according to their priority. The first entry is the default IDE plugin.
1011 ide_plugins = dict(sorted(ide_plugins.items(),
1012 key=lambda p: p[1].ide_plugin_priority(), reverse=True))
1013
1014 parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter,
1015 help='Setup the SDK and configure the IDE')
1016 parser_ide_sdk.add_argument(
1017 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
1018 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.')
1019 parser_ide_sdk.add_argument(
1020 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
1021 help='Different SDK types are supported:\n'
1022 '- "' + DevtoolIdeMode.modified.name + '" (default):\n'
1023 ' devtool modify creates a workspace to work on the source code of a recipe.\n'
1024 ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n'
1025 ' Usage example:\n'
1026 ' devtool modify cmake-example\n'
1027 ' devtool ide-sdk cmake-example core-image-minimal\n'
1028 ' Start the IDE in the workspace folder\n'
1029 ' At least one devtool modified recipe plus one image recipe are required:\n'
1030 ' The image recipe is used to generate the target image and the remote debug configuration.\n'
1031 '- "' + DevtoolIdeMode.shared.name + '":\n'
1032 ' Usage example:\n'
1033 ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n'
1034 ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n'
1035 ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n'
1036 ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit')
1037 default_ide = list(ide_plugins.keys())[0]
1038 parser_ide_sdk.add_argument(
1039 '-i', '--ide', choices=ide_plugins.keys(), default=default_ide,
1040 help='Setup the configuration for this IDE (default: %s)' % default_ide)
1041 parser_ide_sdk.add_argument(
1042 '-t', '--target', default='root@192.168.7.2',
1043 help='Live target machine running an ssh server: user@hostname.')
1044 parser_ide_sdk.add_argument(
1045 '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.')
1046 parser_ide_sdk.add_argument(
1047 '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
1048 parser_ide_sdk.add_argument(
1049 '-e', '--ssh-exec', help='Executable to use in place of ssh')
1050 parser_ide_sdk.add_argument(
1051 '-P', '--port', help='Specify ssh port to use for connection to the target')
1052 parser_ide_sdk.add_argument(
1053 '-I', '--key', help='Specify ssh private key for connection to the target')
1054 parser_ide_sdk.add_argument(
1055 '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true')
1056 parser_ide_sdk.add_argument(
1057 '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true')
1058 parser_ide_sdk.add_argument(
1059 '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
1060 parser_ide_sdk.add_argument(
1061 '-n', '--dry-run', help='List files to be undeployed only', action='store_true')
1062 parser_ide_sdk.add_argument(
1063 '-s', '--show-status', help='Show progress/status output', action='store_true')
1064 parser_ide_sdk.add_argument(
1065 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
1066 parser_ide_sdk.add_argument(
1067 '--no-check-space', help='Do not check for available space before deploying', action='store_true')
1068 parser_ide_sdk.add_argument(
1069 '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true')
1070 parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/import.py b/scripts/lib/devtool/import.py
deleted file mode 100644
index 6829851669..0000000000
--- a/scripts/lib/devtool/import.py
+++ /dev/null
@@ -1,134 +0,0 @@
1# Development tool - import command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool import plugin"""
8
9import os
10import tarfile
11import logging
12import collections
13import json
14import fnmatch
15
16from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
17from devtool import export
18
19logger = logging.getLogger('devtool')
20
21def devimport(args, config, basepath, workspace):
22 """Entry point for the devtool 'import' subcommand"""
23
24 def get_pn(name):
25 """ Returns the filename of a workspace recipe/append"""
26 metadata = name.split('/')[-1]
27 fn, _ = os.path.splitext(metadata)
28 return fn
29
30 if not os.path.exists(args.file):
31 raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
32
33 with tarfile.open(args.file) as tar:
34 # Get exported metadata
35 export_workspace_path = export_workspace = None
36 try:
37 metadata = tar.getmember(export.metadata)
38 except KeyError as ke:
39 raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
40
41 tar.extract(metadata)
42 with open(metadata.name) as fdm:
43 export_workspace_path, export_workspace = json.load(fdm)
44 os.unlink(metadata.name)
45
46 members = tar.getmembers()
47
48 # Get appends and recipes from the exported archive, these
49 # will be needed to find out those appends without corresponding
50 # recipe pair
51 append_fns, recipe_fns = set(), set()
52 for member in members:
53 if member.name.startswith('appends'):
54 append_fns.add(get_pn(member.name))
55 elif member.name.startswith('recipes'):
56 recipe_fns.add(get_pn(member.name))
57
58 # Setup tinfoil, get required data and shutdown
59 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
60 try:
61 current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
62 finally:
63 tinfoil.shutdown()
64
65 # Find those appends that do not have recipes in current metadata
66 non_importables = []
67 for fn in append_fns - recipe_fns:
68 # Check on current metadata (covering those layers indicated in bblayers.conf)
69 for current_fn in current_fns:
70 if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
71 break
72 else:
73 non_importables.append(fn)
74 logger.warning('No recipe to append %s.bbapppend, skipping' % fn)
75
76 # Extract
77 imported = []
78 for member in members:
79 if member.name == export.metadata:
80 continue
81
82 for nonimp in non_importables:
83 pn = nonimp.split('_')[0]
84 # do not extract data from non-importable recipes or metadata
85 if member.name.startswith('appends/%s' % nonimp) or \
86 member.name.startswith('recipes/%s' % nonimp) or \
87 member.name.startswith('sources/%s' % pn):
88 break
89 else:
90 path = os.path.join(config.workspace_path, member.name)
91 if os.path.exists(path):
92 # by default, no file overwrite is done unless -o is given by the user
93 if args.overwrite:
94 try:
95 tar.extract(member, path=config.workspace_path)
96 except PermissionError as pe:
97 logger.warning(pe)
98 else:
99 logger.warning('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
100 continue
101 else:
102 tar.extract(member, path=config.workspace_path)
103
104 # Update EXTERNALSRC and the devtool md5 file
105 if member.name.startswith('appends'):
106 if export_workspace_path:
107 # appends created by 'devtool modify' just need to update the workspace
108 replace_from_file(path, export_workspace_path, config.workspace_path)
109
110 # appends created by 'devtool add' need replacement of exported source tree
111 pn = get_pn(member.name).split('_')[0]
112 exported_srctree = export_workspace[pn]['srctree']
113 if exported_srctree:
114 replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
115
116 standard._add_md5(config, pn, path)
117 imported.append(pn)
118
119 if imported:
120 logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
121 else:
122 logger.warning('No recipes imported into the workspace')
123
124 return 0
125
126def register_commands(subparsers, context):
127 """Register devtool import subcommands"""
128 parser = subparsers.add_parser('import',
129 help='Import exported tar archive into workspace',
130 description='Import tar archive previously created by "devtool export" into workspace',
131 group='advanced')
132 parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
133 parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
134 parser.set_defaults(func=devimport)
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
deleted file mode 100644
index 18daef30c3..0000000000
--- a/scripts/lib/devtool/menuconfig.py
+++ /dev/null
@@ -1,81 +0,0 @@
1# OpenEmbedded Development tool - menuconfig command plugin
2#
3# Copyright (C) 2018 Xilinx
4# Written by: Chandana Kalluri <ckalluri@xilinx.com>
5#
6# SPDX-License-Identifier: MIT
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21"""Devtool menuconfig plugin"""
22
23import os
24import bb
25import logging
26import argparse
27import re
28import glob
29from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
30from devtool import check_workspace_recipe
31logger = logging.getLogger('devtool')
32
33def menuconfig(args, config, basepath, workspace):
34 """Entry point for the devtool 'menuconfig' subcommand"""
35
36 rd = ""
37 kconfigpath = ""
38 pn_src = ""
39 localfilesdir = ""
40 workspace_dir = ""
41 tinfoil = setup_tinfoil(basepath=basepath)
42 try:
43 rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
44 if not rd:
45 return 1
46
47 check_workspace_recipe(workspace, args.component)
48 pn = rd.getVar('PN')
49
50 if not rd.getVarFlag('do_menuconfig','task'):
51 raise DevtoolError("This recipe does not support menuconfig option")
52
53 workspace_dir = os.path.join(config.workspace_path,'sources')
54 kconfigpath = rd.getVar('B')
55 pn_src = os.path.join(workspace_dir,pn)
56
57 # add check to see if oe_local_files exists or not
58 localfilesdir = os.path.join(pn_src,'oe-local-files')
59 if not os.path.exists(localfilesdir):
60 bb.utils.mkdirhier(localfilesdir)
61 # Add gitignore to ensure source tree is clean
62 gitignorefile = os.path.join(localfilesdir,'.gitignore')
63 with open(gitignorefile, 'w') as f:
64 f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
65 f.write('*\n')
66
67 finally:
68 tinfoil.shutdown()
69
70 logger.info('Launching menuconfig')
71 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
72 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
73 res = standard._create_kconfig_diff(pn_src,rd,fragment)
74
75 return 0
76
77def register_commands(subparsers, context):
78 """register devtool subcommands from this plugin"""
79 parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
80 parser_menuconfig.add_argument('component', help='compenent to alter config')
81 parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)
diff --git a/scripts/lib/devtool/package.py b/scripts/lib/devtool/package.py
deleted file mode 100644
index c2367342c3..0000000000
--- a/scripts/lib/devtool/package.py
+++ /dev/null
@@ -1,50 +0,0 @@
1# Development tool - package command plugin
2#
3# Copyright (C) 2014-2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool plugin containing the package subcommands"""
8
9import os
10import subprocess
11import logging
12from bb.process import ExecutionError
13from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
14
15logger = logging.getLogger('devtool')
16
17def package(args, config, basepath, workspace):
18 """Entry point for the devtool 'package' subcommand"""
19 check_workspace_recipe(workspace, args.recipename)
20
21 tinfoil = setup_tinfoil(basepath=basepath, config_only=True)
22 try:
23 image_pkgtype = config.get('Package', 'image_pkgtype', '')
24 if not image_pkgtype:
25 image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
26
27 deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
28 finally:
29 tinfoil.shutdown()
30
31 package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
32 try:
33 exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
34 except bb.process.ExecutionError as e:
35 # We've already seen the output since watch=True, so just ensure we return something to the user
36 return e.exitcode
37
38 logger.info('Your packages are in %s' % deploy_dir_pkg)
39
40 return 0
41
42def register_commands(subparsers, context):
43 """Register devtool subcommands from the package plugin"""
44 if context.fixed_setup:
45 parser_package = subparsers.add_parser('package',
46 help='Build packages for a recipe',
47 description='Builds packages for a recipe\'s output files',
48 group='testbuild', order=-5)
49 parser_package.add_argument('recipename', help='Recipe to package')
50 parser_package.set_defaults(func=package)
diff --git a/scripts/lib/devtool/runqemu.py b/scripts/lib/devtool/runqemu.py
deleted file mode 100644
index ead978aabc..0000000000
--- a/scripts/lib/devtool/runqemu.py
+++ /dev/null
@@ -1,64 +0,0 @@
1# Development tool - runqemu command plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool runqemu plugin"""
9
10import os
11import bb
12import logging
13import argparse
14import glob
15from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19def runqemu(args, config, basepath, workspace):
20 """Entry point for the devtool 'runqemu' subcommand"""
21
22 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
23 try:
24 machine = tinfoil.config_data.getVar('MACHINE')
25 bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'),
26 tinfoil.config_data.getVar('BUILD_ARCH'),
27 tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep))
28 finally:
29 tinfoil.shutdown()
30
31 if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
32 raise DevtoolError('QEMU is not available within this SDK')
33
34 imagename = args.imagename
35 if not imagename:
36 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
37 if sdk_targets:
38 imagename = sdk_targets[0]
39 if not imagename:
40 raise DevtoolError('Unable to determine image name to run, please specify one')
41
42 try:
43 # FIXME runqemu assumes that if OECORE_NATIVE_SYSROOT is set then it shouldn't
44 # run bitbake to find out the values of various environment variables, which
45 # isn't the case for the extensible SDK. Work around it for now.
46 newenv = dict(os.environ)
47 newenv.pop('OECORE_NATIVE_SYSROOT', '')
48 exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True, env=newenv)
49 except bb.process.ExecutionError as e:
50 # We've already seen the output since watch=True, so just ensure we return something to the user
51 return e.exitcode
52
53 return 0
54
55def register_commands(subparsers, context):
56 """Register devtool subcommands from this plugin"""
57 if context.fixed_setup:
58 parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
59 description='Runs QEMU to boot the specified image',
60 group='testbuild', order=-20)
61 parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
62 parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
63 nargs=argparse.REMAINDER)
64 parser_runqemu.set_defaults(func=runqemu)
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
deleted file mode 100644
index 9aefd7e354..0000000000
--- a/scripts/lib/devtool/sdk.py
+++ /dev/null
@@ -1,330 +0,0 @@
1# Development tool - sdk-update command plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import subprocess
10import logging
11import glob
12import shutil
13import errno
14import sys
15import tempfile
16import re
17from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
18
19logger = logging.getLogger('devtool')
20
21def parse_locked_sigs(sigfile_path):
22 """Return <pn:task>:<hash> dictionary"""
23 sig_dict = {}
24 with open(sigfile_path) as f:
25 lines = f.readlines()
26 for line in lines:
27 if ':' in line:
28 taskkey, _, hashval = line.rpartition(':')
29 sig_dict[taskkey.strip()] = hashval.split()[0]
30 return sig_dict
31
32def generate_update_dict(sigfile_new, sigfile_old):
33 """Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
34 update_dict = {}
35 sigdict_new = parse_locked_sigs(sigfile_new)
36 sigdict_old = parse_locked_sigs(sigfile_old)
37 for k in sigdict_new:
38 if k not in sigdict_old:
39 update_dict[k] = sigdict_new[k]
40 continue
41 if sigdict_new[k] != sigdict_old[k]:
42 update_dict[k] = sigdict_new[k]
43 continue
44 return update_dict
45
46def get_sstate_objects(update_dict, sstate_dir):
47 """Return a list containing sstate objects which are to be installed"""
48 sstate_objects = []
49 for k in update_dict:
50 files = set()
51 hashval = update_dict[k]
52 p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
53 files |= set(glob.glob(p))
54 p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
55 files |= set(glob.glob(p))
56 files = list(files)
57 if len(files) == 1:
58 sstate_objects.extend(files)
59 elif len(files) > 1:
60 logger.error("More than one matching sstate object found for %s" % hashval)
61
62 return sstate_objects
63
64def mkdir(d):
65 try:
66 os.makedirs(d)
67 except OSError as e:
68 if e.errno != errno.EEXIST:
69 raise e
70
71def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
72 """Install sstate objects into destination SDK"""
73 sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
74 if not os.path.exists(sstate_dir):
75 logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
76 raise
77 for sb in sstate_objects:
78 dst = sb.replace(src_sdk, dest_sdk)
79 destdir = os.path.dirname(dst)
80 mkdir(destdir)
81 logger.debug("Copying %s to %s" % (sb, dst))
82 shutil.copy(sb, dst)
83
84def check_manifest(fn, basepath):
85 import bb.utils
86 changedfiles = []
87 with open(fn, 'r') as f:
88 for line in f:
89 splitline = line.split()
90 if len(splitline) > 1:
91 chksum = splitline[0]
92 fpath = splitline[1]
93 curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
94 if chksum != curr_chksum:
95 logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
96 changedfiles.append(fpath)
97 return changedfiles
98
99def sdk_update(args, config, basepath, workspace):
100 """Entry point for devtool sdk-update command"""
101 updateserver = args.updateserver
102 if not updateserver:
103 updateserver = config.get('SDK', 'updateserver', '')
104 logger.debug("updateserver: %s" % updateserver)
105
106 # Make sure we are using sdk-update from within SDK
107 logger.debug("basepath = %s" % basepath)
108 old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
109 if not os.path.exists(old_locked_sig_file_path):
110 logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
111 return -1
112 else:
113 logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
114
115 if not '://' in updateserver:
116 logger.error("Update server must be a URL")
117 return -1
118
119 layers_dir = os.path.join(basepath, 'layers')
120 conf_dir = os.path.join(basepath, 'conf')
121
122 # Grab variable values
123 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
124 try:
125 stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR')
126 sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS')
127 site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION')
128 finally:
129 tinfoil.shutdown()
130
131 tmpsdk_dir = tempfile.mkdtemp()
132 try:
133 os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
134 new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
135 # Fetch manifest from server
136 tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
137 ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
138 if ret != 0:
139 logger.error("Cannot dowload files from %s" % updateserver)
140 return ret
141 changedfiles = check_manifest(tmpmanifest, basepath)
142 if not changedfiles:
143 logger.info("Already up-to-date")
144 return 0
145 # Update metadata
146 logger.debug("Updating metadata via git ...")
147 #Check for the status before doing a fetch and reset
148 if os.path.exists(os.path.join(basepath, 'layers/.git')):
149 out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
150 if not out:
151 ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
152 else:
153 logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
154 logger.error("Changed files:\n%s" % out);
155 return -1
156 else:
157 ret = -1
158 if ret != 0:
159 ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
160 if ret != 0:
161 logger.error("Updating metadata via git failed")
162 return ret
163 logger.debug("Updating conf files ...")
164 for changedfile in changedfiles:
165 ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
166 if ret != 0:
167 logger.error("Updating %s failed" % changedfile)
168 return ret
169
170 # Check if UNINATIVE_CHECKSUM changed
171 uninative = False
172 if 'conf/local.conf' in changedfiles:
173 def read_uninative_checksums(fn):
174 chksumitems = []
175 with open(fn, 'r') as f:
176 for line in f:
177 if line.startswith('UNINATIVE_CHECKSUM'):
178 splitline = re.split(r'[\[\]"\']', line)
179 if len(splitline) > 3:
180 chksumitems.append((splitline[1], splitline[3]))
181 return chksumitems
182
183 oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
184 newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
185 if oldsums != newsums:
186 uninative = True
187 for buildarch, chksum in newsums:
188 uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
189 mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
190 ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
191
192 # Ok, all is well at this point - move everything over
193 tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
194 if os.path.exists(tmplayers_dir):
195 shutil.rmtree(layers_dir)
196 shutil.move(tmplayers_dir, layers_dir)
197 for changedfile in changedfiles:
198 destfile = os.path.join(basepath, changedfile)
199 os.remove(destfile)
200 shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
201 os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
202 shutil.move(tmpmanifest, conf_dir)
203 if uninative:
204 shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
205 shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
206
207 if not sstate_mirrors:
208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
210 f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
211 finally:
212 shutil.rmtree(tmpsdk_dir)
213
214 if not args.skip_prepare:
215 # Find all potentially updateable tasks
216 sdk_update_targets = []
217 tasks = ['do_populate_sysroot', 'do_packagedata']
218 for root, _, files in os.walk(stamps_dir):
219 for fn in files:
220 if not '.sigdata.' in fn:
221 for task in tasks:
222 if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
223 sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
224 # Run bitbake command for the whole SDK
225 logger.info("Preparing build system... (This may take some time.)")
226 try:
227 exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
228 output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
229 runlines = []
230 for line in output.splitlines():
231 if 'Running task ' in line:
232 runlines.append(line)
233 if runlines:
234 logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
235 return -1
236 except bb.process.ExecutionError as e:
237 logger.error('Preparation failed:\n%s' % e.stdout)
238 return -1
239 return 0
240
241def sdk_install(args, config, basepath, workspace):
242 """Entry point for the devtool sdk-install command"""
243
244 import oe.recipeutils
245 import bb.process
246
247 for recipe in args.recipename:
248 if recipe in workspace:
249 raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
250
251 tasks = ['do_populate_sysroot', 'do_packagedata']
252 stampprefixes = {}
253 def checkstamp(recipe):
254 stampprefix = stampprefixes[recipe]
255 stamps = glob.glob(stampprefix + '*')
256 for stamp in stamps:
257 if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
258 return True
259 else:
260 return False
261
262 install_recipes = []
263 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
264 try:
265 for recipe in args.recipename:
266 rd = parse_recipe(config, tinfoil, recipe, True)
267 if not rd:
268 return 1
269 stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0])
270 if checkstamp(recipe):
271 logger.info('%s is already installed' % recipe)
272 else:
273 install_recipes.append(recipe)
274 finally:
275 tinfoil.shutdown()
276
277 if install_recipes:
278 logger.info('Installing %s...' % ', '.join(install_recipes))
279 install_tasks = []
280 for recipe in install_recipes:
281 for task in tasks:
282 if recipe.endswith('-native') and 'package' in task:
283 continue
284 install_tasks.append('%s:%s' % (recipe, task))
285 options = ''
286 if not args.allow_build:
287 options += ' --setscene-only'
288 try:
289 exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
290 except bb.process.ExecutionError as e:
291 raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
292 failed = False
293 for recipe in install_recipes:
294 if checkstamp(recipe):
295 logger.info('Successfully installed %s' % recipe)
296 else:
297 raise DevtoolError('Failed to install %s - unavailable' % recipe)
298 failed = True
299 if failed:
300 return 2
301
302 try:
303 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
304 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
305 except bb.process.ExecutionError as e:
306 raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
307
308
309def register_commands(subparsers, context):
310 """Register devtool subcommands from the sdk plugin"""
311 if context.fixed_setup:
312 parser_sdk = subparsers.add_parser('sdk-update',
313 help='Update SDK components',
314 description='Updates installed SDK components from a remote server',
315 group='sdk')
316 updateserver = context.config.get('SDK', 'updateserver', '')
317 if updateserver:
318 parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
319 else:
320 parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
321 parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
322 parser_sdk.set_defaults(func=sdk_update)
323
324 parser_sdk_install = subparsers.add_parser('sdk-install',
325 help='Install additional SDK components',
326 description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
327 group='sdk')
328 parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
329 parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
330 parser_sdk_install.set_defaults(func=sdk_install)
diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py
deleted file mode 100644
index 70b81cac5e..0000000000
--- a/scripts/lib/devtool/search.py
+++ /dev/null
@@ -1,109 +0,0 @@
1# Development tool - search command plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool search plugin"""
9
10import os
11import bb
12import logging
13import argparse
14import re
15from devtool import setup_tinfoil, parse_recipe, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19def search(args, config, basepath, workspace):
20 """Entry point for the devtool 'search' subcommand"""
21
22 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
23 try:
24 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
25 defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
26
27 keyword_rc = re.compile(args.keyword)
28
29 def print_match(pn):
30 rd = parse_recipe(config, tinfoil, pn, True)
31 if not rd:
32 return
33 summary = rd.getVar('SUMMARY')
34 if summary == rd.expand(defsummary):
35 summary = ''
36 print("%s %s" % (pn.ljust(20), summary))
37
38
39 matches = []
40 if os.path.exists(pkgdata_dir):
41 for fn in os.listdir(pkgdata_dir):
42 pfn = os.path.join(pkgdata_dir, fn)
43 if not os.path.isfile(pfn):
44 continue
45
46 packages = []
47 match = False
48 if keyword_rc.search(fn):
49 match = True
50
51 if not match:
52 with open(pfn, 'r') as f:
53 for line in f:
54 if line.startswith('PACKAGES:'):
55 packages = line.split(':', 1)[1].strip().split()
56
57 for pkg in packages:
58 if keyword_rc.search(pkg):
59 match = True
60 break
61 if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
62 with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
63 for line in f:
64 if ': ' in line:
65 splitline = line.split(': ', 1)
66 key = splitline[0]
67 value = splitline[1].strip()
68 key = key.replace(":" + pkg, "")
69 if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']:
70 if keyword_rc.search(value):
71 match = True
72 break
73 if match:
74 print_match(fn)
75 matches.append(fn)
76 else:
77 logger.warning('Package data is not available, results may be limited')
78
79 for recipe in tinfoil.all_recipes():
80 if args.fixed_setup and 'nativesdk' in recipe.inherits():
81 continue
82
83 match = False
84 if keyword_rc.search(recipe.pn):
85 match = True
86 else:
87 for prov in recipe.provides:
88 if keyword_rc.search(prov):
89 match = True
90 break
91 if not match:
92 for rprov in recipe.rprovides:
93 if keyword_rc.search(rprov):
94 match = True
95 break
96 if match and not recipe.pn in matches:
97 print_match(recipe.pn)
98 finally:
99 tinfoil.shutdown()
100
101 return 0
102
103def register_commands(subparsers, context):
104 """Register devtool subcommands from this plugin"""
105 parser_search = subparsers.add_parser('search', help='Search available recipes',
106 description='Searches for available recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name and summary on match.',
107 group='info')
108 parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed, use quotes to avoid shell expansion)')
109 parser_search.set_defaults(func=search, no_workspace=True, fixed_setup=context.fixed_setup)
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
deleted file mode 100644
index 1d0fe13788..0000000000
--- a/scripts/lib/devtool/standard.py
+++ /dev/null
@@ -1,2395 +0,0 @@
1# Development tool - standard commands plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool standard plugins"""
8
9import os
10import sys
11import re
12import shutil
13import subprocess
14import tempfile
15import logging
16import argparse
17import argparse_oe
18import scriptutils
19import errno
20import glob
21import filecmp
22from collections import OrderedDict
23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError
24from devtool import parse_recipe
25
26logger = logging.getLogger('devtool')
27
28override_branch_prefix = 'devtool-override-'
29
30
31def add(args, config, basepath, workspace):
32 """Entry point for the devtool 'add' subcommand"""
33 import bb
34 import oe.recipeutils
35
36 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri:
37 raise argparse_oe.ArgumentUsageError('At least one of recipename, srctree, fetchuri or -f/--fetch must be specified', 'add')
38
39 # These are positional arguments, but because we're nice, allow
40 # specifying e.g. source tree without name, or fetch URI without name or
41 # source tree (if we can detect that that is what the user meant)
42 if scriptutils.is_src_url(args.recipename):
43 if not args.fetchuri:
44 if args.fetch:
45 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
46 args.fetchuri = args.recipename
47 args.recipename = ''
48 elif scriptutils.is_src_url(args.srctree):
49 if not args.fetchuri:
50 if args.fetch:
51 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
52 args.fetchuri = args.srctree
53 args.srctree = ''
54 elif args.recipename and not args.srctree:
55 if os.sep in args.recipename:
56 args.srctree = args.recipename
57 args.recipename = None
58 elif os.path.isdir(args.recipename):
59 logger.warning('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
60
61 if not args.fetchuri:
62 if args.srcrev:
63 raise DevtoolError('The -S/--srcrev option is only valid when fetching from an SCM repository')
64 if args.srcbranch:
65 raise DevtoolError('The -B/--srcbranch option is only valid when fetching from an SCM repository')
66
67 if args.srctree and os.path.isfile(args.srctree):
68 args.fetchuri = 'file://' + os.path.abspath(args.srctree)
69 args.srctree = ''
70
71 if args.fetch:
72 if args.fetchuri:
73 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
74 else:
75 logger.warning('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead')
76 args.fetchuri = args.fetch
77
78 if args.recipename:
79 if args.recipename in workspace:
80 raise DevtoolError("recipe %s is already in your workspace" %
81 args.recipename)
82 reason = oe.recipeutils.validate_pn(args.recipename)
83 if reason:
84 raise DevtoolError(reason)
85
86 if args.srctree:
87 srctree = os.path.abspath(args.srctree)
88 srctreeparent = None
89 tmpsrcdir = None
90 else:
91 srctree = None
92 srctreeparent = get_default_srctree(config)
93 bb.utils.mkdirhier(srctreeparent)
94 tmpsrcdir = tempfile.mkdtemp(prefix='devtoolsrc', dir=srctreeparent)
95
96 if srctree and os.path.exists(srctree):
97 if args.fetchuri:
98 if not os.path.isdir(srctree):
99 raise DevtoolError("Cannot fetch into source tree path %s as "
100 "it exists and is not a directory" %
101 srctree)
102 elif os.listdir(srctree):
103 raise DevtoolError("Cannot fetch into source tree path %s as "
104 "it already exists and is non-empty" %
105 srctree)
106 elif not args.fetchuri:
107 if args.srctree:
108 raise DevtoolError("Specified source tree %s could not be found" %
109 args.srctree)
110 elif srctree:
111 raise DevtoolError("No source tree exists at default path %s - "
112 "either create and populate this directory, "
113 "or specify a path to a source tree, or a "
114 "URI to fetch source from" % srctree)
115 else:
116 raise DevtoolError("You must either specify a source tree "
117 "or a URI to fetch source from")
118
119 if args.version:
120 if '_' in args.version or ' ' in args.version:
121 raise DevtoolError('Invalid version string "%s"' % args.version)
122
123 if args.color == 'auto' and sys.stdout.isatty():
124 color = 'always'
125 else:
126 color = args.color
127 extracmdopts = ''
128 if args.fetchuri:
129 source = args.fetchuri
130 if srctree:
131 extracmdopts += ' -x %s' % srctree
132 else:
133 extracmdopts += ' -x %s' % tmpsrcdir
134 else:
135 source = srctree
136 if args.recipename:
137 extracmdopts += ' -N %s' % args.recipename
138 if args.version:
139 extracmdopts += ' -V %s' % args.version
140 if args.binary:
141 extracmdopts += ' -b'
142 if args.also_native:
143 extracmdopts += ' --also-native'
144 if args.src_subdir:
145 extracmdopts += ' --src-subdir "%s"' % args.src_subdir
146 if args.autorev:
147 extracmdopts += ' -a'
148 if args.npm_dev:
149 extracmdopts += ' --npm-dev'
150 if args.no_pypi:
151 extracmdopts += ' --no-pypi'
152 if args.mirrors:
153 extracmdopts += ' --mirrors'
154 if args.srcrev:
155 extracmdopts += ' --srcrev %s' % args.srcrev
156 if args.srcbranch:
157 extracmdopts += ' --srcbranch %s' % args.srcbranch
158 if args.provides:
159 extracmdopts += ' --provides %s' % args.provides
160
161 tempdir = tempfile.mkdtemp(prefix='devtool')
162 try:
163 try:
164 stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True)
165 except bb.process.ExecutionError as e:
166 if e.exitcode == 15:
167 raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
168 else:
169 raise DevtoolError('Command \'%s\' failed' % e.command)
170
171 recipes = glob.glob(os.path.join(tempdir, '*.bb'))
172 if recipes:
173 recipename = os.path.splitext(os.path.basename(recipes[0]))[0].split('_')[0]
174 if recipename in workspace:
175 raise DevtoolError('A recipe with the same name as the one being created (%s) already exists in your workspace' % recipename)
176 recipedir = os.path.join(config.workspace_path, 'recipes', recipename)
177 bb.utils.mkdirhier(recipedir)
178 recipefile = os.path.join(recipedir, os.path.basename(recipes[0]))
179 appendfile = recipe_to_append(recipefile, config)
180 if os.path.exists(appendfile):
181 # This shouldn't be possible, but just in case
182 raise DevtoolError('A recipe with the same name as the one being created already exists in your workspace')
183 if os.path.exists(recipefile):
184 raise DevtoolError('A recipe file %s already exists in your workspace; this shouldn\'t be there - please delete it before continuing' % recipefile)
185 if tmpsrcdir:
186 srctree = os.path.join(srctreeparent, recipename)
187 if os.path.exists(tmpsrcdir):
188 if os.path.exists(srctree):
189 if os.path.isdir(srctree):
190 try:
191 os.rmdir(srctree)
192 except OSError as e:
193 if e.errno == errno.ENOTEMPTY:
194 raise DevtoolError('Source tree path %s already exists and is not empty' % srctree)
195 else:
196 raise
197 else:
198 raise DevtoolError('Source tree path %s already exists and is not a directory' % srctree)
199 logger.info('Using default source tree path %s' % srctree)
200 shutil.move(tmpsrcdir, srctree)
201 else:
202 raise DevtoolError('Couldn\'t find source tree created by recipetool')
203 bb.utils.mkdirhier(recipedir)
204 shutil.move(recipes[0], recipefile)
205 # Move any additional files created by recipetool
206 for fn in os.listdir(tempdir):
207 shutil.move(os.path.join(tempdir, fn), recipedir)
208 else:
209 raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout))
210 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
211 if os.path.exists(attic_recipe):
212 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
213 finally:
214 if tmpsrcdir and os.path.exists(tmpsrcdir):
215 shutil.rmtree(tmpsrcdir)
216 shutil.rmtree(tempdir)
217
218 for fn in os.listdir(recipedir):
219 _add_md5(config, recipename, os.path.join(recipedir, fn))
220
221 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
222 try:
223 try:
224 rd = tinfoil.parse_recipe_file(recipefile, False)
225 except Exception as e:
226 logger.error(str(e))
227 rd = None
228 if not rd:
229 # Parsing failed. We just created this recipe and we shouldn't
230 # leave it in the workdir or it'll prevent bitbake from starting
231 movefn = '%s.parsefailed' % recipefile
232 logger.error('Parsing newly created recipe failed, moving recipe to %s for reference. If this looks to be caused by the recipe itself, please report this error.' % movefn)
233 shutil.move(recipefile, movefn)
234 return 1
235
236 if args.fetchuri and not args.no_git:
237 setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data)
238
239 initial_rev = {}
240 if os.path.exists(os.path.join(srctree, '.git')):
241 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
242 initial_rev["."] = stdout.rstrip()
243 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree)
244 for line in stdout.splitlines():
245 (rev, submodule) = line.split()
246 initial_rev[os.path.relpath(submodule, srctree)] = rev
247
248 if args.src_subdir:
249 srctree = os.path.join(srctree, args.src_subdir)
250
251 bb.utils.mkdirhier(os.path.dirname(appendfile))
252 with open(appendfile, 'w') as f:
253 f.write('inherit externalsrc\n')
254 f.write('EXTERNALSRC = "%s"\n' % srctree)
255
256 b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
257 if b_is_s:
258 f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
259 if initial_rev:
260 for key, value in initial_rev.items():
261 f.write('\n# initial_rev %s: %s\n' % (key, value))
262
263 if args.binary:
264 f.write('do_install:append() {\n')
265 f.write(' rm -rf ${D}/.git\n')
266 f.write(' rm -f ${D}/singletask.lock\n')
267 f.write('}\n')
268
269 if bb.data.inherits_class('npm', rd):
270 f.write('python do_configure:append() {\n')
271 f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n')
272 f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n')
273 f.write(' bb.utils.remove(lockfile)\n')
274 f.write('}\n')
275
276 # Check if the new layer provides recipes whose priorities have been
277 # overriden by PREFERRED_PROVIDER.
278 recipe_name = rd.getVar('PN')
279 provides = rd.getVar('PROVIDES')
280 # Search every item defined in PROVIDES
281 for recipe_provided in provides.split():
282 preferred_provider = 'PREFERRED_PROVIDER_' + recipe_provided
283 current_pprovider = rd.getVar(preferred_provider)
284 if current_pprovider and current_pprovider != recipe_name:
285 if args.fixed_setup:
286 #if we are inside the eSDK add the new PREFERRED_PROVIDER in the workspace layer.conf
287 layerconf_file = os.path.join(config.workspace_path, "conf", "layer.conf")
288 with open(layerconf_file, 'a') as f:
289 f.write('%s = "%s"\n' % (preferred_provider, recipe_name))
290 else:
291 logger.warning('Set \'%s\' in order to use the recipe' % preferred_provider)
292 break
293
294 _add_md5(config, recipename, appendfile)
295
296 check_prerelease_version(rd.getVar('PV'), 'devtool add')
297
298 logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
299
300 finally:
301 tinfoil.shutdown()
302
303 return 0
304
305
306def _check_compatible_recipe(pn, d):
307 """Check if the recipe is supported by devtool"""
308 if pn == 'perf':
309 raise DevtoolError("The perf recipe does not actually check out "
310 "source and thus cannot be supported by this tool",
311 4)
312
313 if pn in ['kernel-devsrc', 'package-index'] or pn.startswith('gcc-source'):
314 raise DevtoolError("The %s recipe is not supported by this tool" % pn, 4)
315
316 if bb.data.inherits_class('image', d):
317 raise DevtoolError("The %s recipe is an image, and therefore is not "
318 "supported by this tool" % pn, 4)
319
320 if bb.data.inherits_class('populate_sdk', d):
321 raise DevtoolError("The %s recipe is an SDK, and therefore is not "
322 "supported by this tool" % pn, 4)
323
324 if bb.data.inherits_class('packagegroup', d):
325 raise DevtoolError("The %s recipe is a packagegroup, and therefore is "
326 "not supported by this tool" % pn, 4)
327
328 if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'):
329 # Not an incompatibility error per se, so we don't pass the error code
330 raise DevtoolError("externalsrc is currently enabled for the %s "
331 "recipe. This prevents the normal do_patch task "
332 "from working. You will need to disable this "
333 "first." % pn)
334
335def _dry_run_copy(src, dst, dry_run_outdir, base_outdir):
336 """Common function for copying a file to the dry run output directory"""
337 relpath = os.path.relpath(dst, base_outdir)
338 if relpath.startswith('..'):
339 raise Exception('Incorrect base path %s for path %s' % (base_outdir, dst))
340 dst = os.path.join(dry_run_outdir, relpath)
341 dst_d = os.path.dirname(dst)
342 if dst_d:
343 bb.utils.mkdirhier(dst_d)
344 # Don't overwrite existing files, otherwise in the case of an upgrade
345 # the dry-run written out recipe will be overwritten with an unmodified
346 # version
347 if not os.path.exists(dst):
348 shutil.copy(src, dst)
349
350def _move_file(src, dst, dry_run_outdir=None, base_outdir=None):
351 """Move a file. Creates all the directory components of destination path."""
352 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
353 logger.debug('Moving %s to %s%s' % (src, dst, dry_run_suffix))
354 if dry_run_outdir:
355 # We want to copy here, not move
356 _dry_run_copy(src, dst, dry_run_outdir, base_outdir)
357 else:
358 dst_d = os.path.dirname(dst)
359 if dst_d:
360 bb.utils.mkdirhier(dst_d)
361 shutil.move(src, dst)
362
363def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
364 """Copy a file. Creates all the directory components of destination path."""
365 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
366 logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix))
367 if dry_run_outdir:
368 _dry_run_copy(src, dst, dry_run_outdir, base_outdir)
369 else:
370 dst_d = os.path.dirname(dst)
371 if dst_d:
372 bb.utils.mkdirhier(dst_d)
373 shutil.copy(src, dst)
374
375def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
376 """List contents of a git treeish"""
377 import bb
378 cmd = ['git', 'ls-tree', '-z', treeish]
379 if recursive:
380 cmd.append('-r')
381 out, _ = bb.process.run(cmd, cwd=repodir)
382 ret = {}
383 if out:
384 for line in out.split('\0'):
385 if line:
386 split = line.split(None, 4)
387 ret[split[3]] = split[0:3]
388 return ret
389
390def _git_modified(repodir):
391 """List the difference between HEAD and the index"""
392 import bb
393 cmd = ['git', 'status', '--porcelain']
394 out, _ = bb.process.run(cmd, cwd=repodir)
395 ret = []
396 if out:
397 for line in out.split("\n"):
398 if line and not line.startswith('??'):
399 ret.append(line[3:])
400 return ret
401
402
403def _git_exclude_path(srctree, path):
404 """Return pathspec (list of paths) that excludes certain path"""
405 # NOTE: "Filtering out" files/paths in this way is not entirely reliable -
406 # we don't catch files that are deleted, for example. A more reliable way
407 # to implement this would be to use "negative pathspecs" which were
408 # introduced in Git v1.9.0. Revisit this when/if the required Git version
409 # becomes greater than that.
410 path = os.path.normpath(path)
411 recurse = True if len(path.split(os.path.sep)) > 1 else False
412 git_files = list(_git_ls_tree(srctree, 'HEAD', recurse).keys())
413 if path in git_files:
414 git_files.remove(path)
415 return git_files
416 else:
417 return ['.']
418
419def _ls_tree(directory):
420 """Recursive listing of files in a directory"""
421 ret = []
422 for root, dirs, files in os.walk(directory):
423 ret.extend([os.path.relpath(os.path.join(root, fname), directory) for
424 fname in files])
425 return ret
426
427
428def extract(args, config, basepath, workspace):
429 """Entry point for the devtool 'extract' subcommand"""
430 import bb
431
432 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
433 if not tinfoil:
434 # Error already shown
435 return 1
436 try:
437 rd = parse_recipe(config, tinfoil, args.recipename, True)
438 if not rd:
439 return 1
440
441 srctree = os.path.abspath(args.srctree)
442 initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
443 logger.info('Source tree extracted to %s' % srctree)
444
445 if initial_rev:
446 return 0
447 else:
448 return 1
449 finally:
450 tinfoil.shutdown()
451
452def sync(args, config, basepath, workspace):
453 """Entry point for the devtool 'sync' subcommand"""
454 import bb
455
456 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
457 if not tinfoil:
458 # Error already shown
459 return 1
460 try:
461 rd = parse_recipe(config, tinfoil, args.recipename, True)
462 if not rd:
463 return 1
464
465 srctree = os.path.abspath(args.srctree)
466 initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, True, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=True)
467 logger.info('Source tree %s synchronized' % srctree)
468
469 if initial_rev:
470 return 0
471 else:
472 return 1
473 finally:
474 tinfoil.shutdown()
475
476def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
477 """Extract sources of a recipe"""
478 import oe.recipeutils
479 import oe.patch
480 import oe.path
481
482 pn = d.getVar('PN')
483
484 _check_compatible_recipe(pn, d)
485
486 if sync:
487 if not os.path.exists(srctree):
488 raise DevtoolError("output path %s does not exist" % srctree)
489 else:
490 if os.path.exists(srctree):
491 if not os.path.isdir(srctree):
492 raise DevtoolError("output path %s exists and is not a directory" %
493 srctree)
494 elif os.listdir(srctree):
495 raise DevtoolError("output path %s already exists and is "
496 "non-empty" % srctree)
497
498 if 'noexec' in (d.getVarFlags('do_unpack', False) or []):
499 raise DevtoolError("The %s recipe has do_unpack disabled, unable to "
500 "extract source" % pn, 4)
501
502 if not sync:
503 # Prepare for shutil.move later on
504 bb.utils.mkdirhier(srctree)
505 os.rmdir(srctree)
506
507 extra_overrides = []
508 if not no_overrides:
509 history = d.varhistory.variable('SRC_URI')
510 for event in history:
511 if not 'flag' in event:
512 if event['op'].startswith((':append[', ':prepend[')):
513 override = event['op'].split('[')[1].split(']')[0]
514 if not override.startswith('pn-'):
515 extra_overrides.append(override)
516 # We want to remove duplicate overrides. If a recipe had multiple
517 # SRC_URI_override += values it would cause mulitple instances of
518 # overrides. This doesn't play nicely with things like creating a
519 # branch for every instance of DEVTOOL_EXTRA_OVERRIDES.
520 extra_overrides = list(set(extra_overrides))
521 if extra_overrides:
522 logger.info('SRC_URI contains some conditional appends/prepends - will create branches to represent these')
523
524 initial_rev = None
525
526 recipefile = d.getVar('FILE')
527 appendfile = recipe_to_append(recipefile, config)
528 is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
529
530 # We need to redirect WORKDIR, STAMPS_DIR etc. under a temporary
531 # directory so that:
532 # (a) we pick up all files that get unpacked to the WORKDIR, and
533 # (b) we don't disturb the existing build
534 # However, with recipe-specific sysroots the sysroots for the recipe
535 # will be prepared under WORKDIR, and if we used the system temporary
536 # directory (i.e. usually /tmp) as used by mkdtemp by default, then
537 # our attempts to hardlink files into the recipe-specific sysroots
538 # will fail on systems where /tmp is a different filesystem, and it
539 # would have to fall back to copying the files which is a waste of
540 # time. Put the temp directory under the WORKDIR to prevent that from
541 # being a problem.
542 tempbasedir = d.getVar('WORKDIR')
543 bb.utils.mkdirhier(tempbasedir)
544 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir)
545 try:
546 tinfoil.logger.setLevel(logging.WARNING)
547
548 # FIXME this results in a cache reload under control of tinfoil, which is fine
549 # except we don't get the knotty progress bar
550
551 if os.path.exists(appendfile):
552 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak')
553 shutil.copyfile(appendfile, appendbackup)
554 else:
555 appendbackup = None
556 bb.utils.mkdirhier(os.path.dirname(appendfile))
557 logger.debug('writing append file %s' % appendfile)
558 with open(appendfile, 'a') as f:
559 f.write('###--- _extract_source\n')
560 f.write('deltask do_recipe_qa\n')
561 f.write('deltask do_recipe_qa_setscene\n')
562 f.write('ERROR_QA:remove = "patch-fuzz"\n')
563 f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
564 f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
565 if not is_kernel_yocto:
566 f.write('PATCHTOOL = "git"\n')
567 f.write('PATCH_COMMIT_FUNCTIONS = "1"\n')
568 if extra_overrides:
569 f.write('DEVTOOL_EXTRA_OVERRIDES = "%s"\n' % ':'.join(extra_overrides))
570 f.write('inherit devtool-source\n')
571 f.write('###--- _extract_source\n')
572
573 update_unlockedsigs(basepath, workspace, fixed_setup, [pn])
574
575 sstate_manifests = d.getVar('SSTATE_MANIFESTS')
576 bb.utils.mkdirhier(sstate_manifests)
577 preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
578 with open(preservestampfile, 'w') as f:
579 f.write(d.getVar('STAMP'))
580 tinfoil.modified_files()
581 try:
582 if is_kernel_yocto:
583 # We need to generate the kernel config
584 task = 'do_configure'
585 else:
586 task = 'do_patch'
587
588 if 'noexec' in (d.getVarFlags(task, False) or []) or 'task' not in (d.getVarFlags(task, False) or []):
589 logger.info('The %s recipe has %s disabled. Running only '
590 'do_configure task dependencies' % (pn, task))
591
592 if 'depends' in d.getVarFlags('do_configure', False):
593 pn = d.getVarFlags('do_configure', False)['depends']
594 pn = pn.replace('${PV}', d.getVar('PV'))
595 pn = pn.replace('${COMPILERDEP}', d.getVar('COMPILERDEP'))
596 task = None
597
598 # Run the fetch + unpack tasks
599 res = tinfoil.build_targets(pn,
600 task,
601 handle_events=True)
602 finally:
603 if os.path.exists(preservestampfile):
604 os.remove(preservestampfile)
605
606 if not res:
607 raise DevtoolError('Extracting source for %s failed' % pn)
608
609 if not is_kernel_yocto and ('noexec' in (d.getVarFlags('do_patch', False) or []) or 'task' not in (d.getVarFlags('do_patch', False) or [])):
610 workshareddir = d.getVar('S')
611 if os.path.islink(srctree):
612 os.unlink(srctree)
613
614 os.symlink(workshareddir, srctree)
615
616 # The initial_rev file is created in devtool_post_unpack function that will not be executed if
617 # do_unpack/do_patch tasks are disabled so we have to directly say that source extraction was successful
618 return True, True
619
620 try:
621 with open(os.path.join(tempdir, 'initial_rev'), 'r') as f:
622 initial_rev = f.read()
623
624 with open(os.path.join(tempdir, 'srcsubdir'), 'r') as f:
625 srcsubdir = f.read()
626 except FileNotFoundError as e:
627 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
628 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir'))
629
630 # Check if work-shared is empty, if yes
631 # find source and copy to work-shared
632 if is_kernel_yocto:
633 workshareddir = d.getVar('STAGING_KERNEL_DIR')
634 staging_kerVer = get_staging_kver(workshareddir)
635 kernelVersion = d.getVar('LINUX_VERSION')
636
637 # handle dangling symbolic link in work-shared:
638 if os.path.islink(workshareddir):
639 os.unlink(workshareddir)
640
641 if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
642 shutil.rmtree(workshareddir)
643 oe.path.copyhardlinktree(srcsubdir, workshareddir)
644 elif not os.path.exists(workshareddir):
645 oe.path.copyhardlinktree(srcsubdir, workshareddir)
646
647 if sync:
648 try:
649 logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch))
650 bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree)
651
652 # Use git fetch to update the source with the current recipe
653 # To be able to update the currently checked out branch with
654 # possibly new history (no fast-forward) git needs to be told
655 # that's ok
656 logger.info('Syncing source files including patches to git branch: %s' % devbranch)
657 bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
658 except bb.process.ExecutionError as e:
659 raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e))
660
661 else:
662 shutil.move(srcsubdir, srctree)
663
664 if is_kernel_yocto:
665 logger.info('Copying kernel config to srctree')
666 shutil.copy2(os.path.join(tempdir, '.config'), srctree)
667
668 finally:
669 if appendbackup:
670 shutil.copyfile(appendbackup, appendfile)
671 elif os.path.exists(appendfile):
672 os.remove(appendfile)
673 if keep_temp:
674 logger.info('Preserving temporary directory %s' % tempdir)
675 else:
676 shutil.rmtree(tempdir)
677 return initial_rev, srcsubdir_rel
678
679def _add_md5(config, recipename, filename):
680 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace"""
681 import bb.utils
682
683 def addfile(fn):
684 md5 = bb.utils.md5_file(fn)
685 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f:
686 md5_str = '%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5)
687 f.seek(0, os.SEEK_SET)
688 if not md5_str in f.read():
689 f.write(md5_str)
690
691 if os.path.isdir(filename):
692 for root, _, files in os.walk(filename):
693 for f in files:
694 addfile(os.path.join(root, f))
695 else:
696 addfile(filename)
697
698def _check_preserve(config, recipename):
699 """Check if a file was manually changed and needs to be saved in 'attic'
700 directory"""
701 import bb.utils
702 origfile = os.path.join(config.workspace_path, '.devtool_md5')
703 newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
704 preservepath = os.path.join(config.workspace_path, 'attic', recipename)
705 with open(origfile, 'r') as f:
706 with open(newfile, 'w') as tf:
707 for line in f.readlines():
708 splitline = line.rstrip().split('|')
709 if splitline[0] == recipename:
710 removefile = os.path.join(config.workspace_path, splitline[1])
711 try:
712 md5 = bb.utils.md5_file(removefile)
713 except IOError as err:
714 if err.errno == 2:
715 # File no longer exists, skip it
716 continue
717 else:
718 raise
719 if splitline[2] != md5:
720 bb.utils.mkdirhier(preservepath)
721 preservefile = os.path.basename(removefile)
722 logger.warning('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
723 shutil.move(removefile, os.path.join(preservepath, preservefile))
724 else:
725 os.remove(removefile)
726 else:
727 tf.write(line)
728 bb.utils.rename(newfile, origfile)
729
730def get_staging_kver(srcdir):
731 # Kernel version from work-shared
732 kerver = []
733 staging_kerVer=""
734 if os.path.exists(srcdir) and os.listdir(srcdir):
735 with open(os.path.join(srcdir, "Makefile")) as f:
736 version = [next(f) for x in range(5)][1:4]
737 for word in version:
738 kerver.append(word.split('= ')[1].split('\n')[0])
739 staging_kerVer = ".".join(kerver)
740 return staging_kerVer
741
742def get_staging_kbranch(srcdir):
743 staging_kbranch = ""
744 if os.path.exists(srcdir) and os.listdir(srcdir):
745 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
746 staging_kbranch = "".join(branch.split('\n')[0])
747 return staging_kbranch
748
749def get_real_srctree(srctree, s, workdir):
750 # Check that recipe isn't using a shared workdir
751 s = os.path.abspath(s)
752 workdir = os.path.abspath(workdir)
753 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
754 # Handle if S is set to a subdirectory of the source
755 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
756 srctree = os.path.join(srctree, srcsubdir)
757 return srctree
758
759def modify(args, config, basepath, workspace):
760 """Entry point for the devtool 'modify' subcommand"""
761 import bb
762 import oe.recipeutils
763 import oe.patch
764 import oe.path
765
766 if args.recipename in workspace:
767 raise DevtoolError("recipe %s is already in your workspace" %
768 args.recipename)
769
770 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
771 try:
772 rd = parse_recipe(config, tinfoil, args.recipename, True)
773 if not rd:
774 return 1
775
776 pn = rd.getVar('PN')
777 if pn != args.recipename:
778 logger.info('Mapping %s to %s' % (args.recipename, pn))
779 if pn in workspace:
780 raise DevtoolError("recipe %s is already in your workspace" %
781 pn)
782
783 if args.srctree:
784 srctree = os.path.abspath(args.srctree)
785 else:
786 srctree = get_default_srctree(config, pn)
787
788 if args.no_extract and not os.path.isdir(srctree):
789 raise DevtoolError("--no-extract specified and source path %s does "
790 "not exist or is not a directory" %
791 srctree)
792
793 recipefile = rd.getVar('FILE')
794 appendfile = recipe_to_append(recipefile, config, args.wildcard)
795 if os.path.exists(appendfile):
796 raise DevtoolError("Another variant of recipe %s is already in your "
797 "workspace (only one variant of a recipe can "
798 "currently be worked on at once)"
799 % pn)
800
801 _check_compatible_recipe(pn, rd)
802
803 initial_revs = {}
804 commits = {}
805 check_commits = False
806
807 if bb.data.inherits_class('kernel-yocto', rd):
808 # Current set kernel version
809 kernelVersion = rd.getVar('LINUX_VERSION')
810 srcdir = rd.getVar('STAGING_KERNEL_DIR')
811 kbranch = rd.getVar('KBRANCH')
812
813 staging_kerVer = get_staging_kver(srcdir)
814 staging_kbranch = get_staging_kbranch(srcdir)
815 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
816 oe.path.copyhardlinktree(srcdir, srctree)
817 workdir = rd.getVar('WORKDIR')
818 unpackdir = rd.getVar('UNPACKDIR')
819 srcsubdir = rd.getVar('S')
820 localfilesdir = os.path.join(srctree, 'oe-local-files')
821
822 # Add locally copied files to gitignore as we add back to the metadata directly
823 local_files = oe.recipeutils.get_recipe_local_files(rd)
824 srcabspath = os.path.abspath(srcsubdir)
825 local_files = [fname for fname in local_files if
826 os.path.exists(os.path.join(unpackdir, fname)) and
827 srcabspath == unpackdir]
828 if local_files:
829 with open(os.path.join(srctree, '.gitignore'), 'a+') as f:
830 f.write('# Ignore local files, by default. Remove following lines'
831 'if you want to commit the directory to Git\n')
832 for fname in local_files:
833 f.write('%s\n' % fname)
834
835 task = 'do_configure'
836 res = tinfoil.build_targets(pn, task, handle_events=True)
837
838 # Copy .config to workspace
839 kconfpath = rd.getVar('B')
840 logger.info('Copying kernel config to workspace')
841 shutil.copy2(os.path.join(kconfpath, '.config'), srctree)
842
843 # Set this to true, we still need to get initial_rev
844 # by parsing the git repo
845 args.no_extract = True
846
847 if not args.no_extract:
848 initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
849 if not initial_revs["."]:
850 return 1
851 logger.info('Source tree extracted to %s' % srctree)
852
853 if os.path.exists(os.path.join(srctree, '.git')):
854 # Get list of commits since this revision
855 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
856 commits["."] = stdout.split()
857 check_commits = True
858 try:
859 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
860 except bb.process.ExecutionError:
861 stdout = ""
862 for line in stdout.splitlines():
863 (rev, submodule_path) = line.split()
864 submodule = os.path.relpath(submodule_path, srctree)
865 initial_revs[submodule] = rev
866 (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path)
867 commits[submodule] = stdout.split()
868 else:
869 if os.path.exists(os.path.join(srctree, '.git')):
870 # Check if it's a tree previously extracted by us. This is done
871 # by ensuring that devtool-base and args.branch (devtool) exist.
872 # The check_commits logic will cause an exception if either one
873 # of these doesn't exist
874 try:
875 (stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=srctree)
876 bb.process.run('git rev-parse %s' % args.branch, cwd=srctree)
877 except bb.process.ExecutionError:
878 stdout = ''
879 if stdout:
880 check_commits = True
881 for line in stdout.splitlines():
882 if line.startswith('*'):
883 (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
884 initial_revs["."] = stdout.rstrip()
885 if "." not in initial_revs:
886 # Otherwise, just grab the head revision
887 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
888 initial_revs["."] = stdout.rstrip()
889
890 branch_patches = {}
891 if check_commits:
892 # Check if there are override branches
893 (stdout, _) = bb.process.run('git branch', cwd=srctree)
894 branches = []
895 for line in stdout.rstrip().splitlines():
896 branchname = line[2:].rstrip()
897 if branchname.startswith(override_branch_prefix):
898 branches.append(branchname)
899 if branches:
900 logger.warning('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix))
901 branches.insert(0, args.branch)
902 seen_patches = []
903 for branch in branches:
904 branch_patches[branch] = []
905 (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree)
906 for sha1 in stdout.splitlines():
907 notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip())
908 origpatch = notes.get(oe.patch.GitApplyTree.original_patch)
909 if origpatch and origpatch not in seen_patches:
910 seen_patches.append(origpatch)
911 branch_patches[branch].append(origpatch)
912
913 # Need to grab this here in case the source is within a subdirectory
914 srctreebase = srctree
915 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
916
917 bb.utils.mkdirhier(os.path.dirname(appendfile))
918 with open(appendfile, 'w') as f:
919 # if not present, add type=git-dependency to the secondary sources
920 # (non local files) so they can be extracted correctly when building a recipe after
921 # doing a devtool modify on it
922 src_uri = rd.getVar('SRC_URI').split()
923 src_uri_append = []
924 src_uri_remove = []
925
926 # Assume first entry is main source extracted in ${S} so skip it
927 src_uri = src_uri[1::]
928
929 # Add "type=git-dependency" to all non local sources
930 for url in src_uri:
931 if not url.startswith('file://') and not 'type=' in url:
932 src_uri_remove.append(url)
933 src_uri_append.append('%s;type=git-dependency' % url)
934
935 if src_uri_remove:
936 f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove))
937 f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append))
938
939 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n')
940 # Local files can be modified/tracked in separate subdir under srctree
941 # Mostly useful for packages with S != WORKDIR
942 f.write('FILESPATH:prepend := "%s:"\n' %
943 os.path.join(srctreebase, 'oe-local-files'))
944 f.write('# srctreebase: %s\n' % srctreebase)
945
946 f.write('\ninherit externalsrc\n')
947 f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
948 f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
949
950 b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
951 if b_is_s:
952 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
953
954 if bb.data.inherits_class('kernel', rd):
955 f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
956 'do_fetch do_unpack do_kernel_configcheck"\n')
957 f.write('\ndo_patch[noexec] = "1"\n')
958 f.write('\ndo_configure:append() {\n'
959 ' cp ${B}/.config ${S}/.config.baseline\n'
960 ' ln -sfT ${B}/.config ${S}/.config.new\n'
961 '}\n')
962 f.write('\ndo_kernel_configme:prepend() {\n'
963 ' if [ -e ${S}/.config ]; then\n'
964 ' mv ${S}/.config ${S}/.config.old\n'
965 ' fi\n'
966 '}\n')
967 if rd.getVarFlag('do_menuconfig', 'task'):
968 f.write('\ndo_configure:append() {\n'
969 ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n'
970 ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n'
971 ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n'
972 ' fi\n'
973 '}\n')
974 if initial_revs:
975 for name, rev in initial_revs.items():
976 f.write('\n# initial_rev %s: %s\n' % (name, rev))
977 if name in commits:
978 for commit in commits[name]:
979 f.write('# commit %s: %s\n' % (name, commit))
980 if branch_patches:
981 for branch in branch_patches:
982 if branch == args.branch:
983 continue
984 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch])))
985
986 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
987
988 _add_md5(config, pn, appendfile)
989
990 logger.info('Recipe %s now set up to build from %s' % (pn, srctree))
991
992 finally:
993 tinfoil.shutdown()
994
995 return 0
996
997
998def rename(args, config, basepath, workspace):
999 """Entry point for the devtool 'rename' subcommand"""
1000 import bb
1001 import oe.recipeutils
1002
1003 check_workspace_recipe(workspace, args.recipename)
1004
1005 if not (args.newname or args.version):
1006 raise DevtoolError('You must specify a new name, a version with -V/--version, or both')
1007
1008 recipefile = workspace[args.recipename]['recipefile']
1009 if not recipefile:
1010 raise DevtoolError('devtool rename can only be used where the recipe file itself is in the workspace (e.g. after devtool add)')
1011
1012 if args.newname and args.newname != args.recipename:
1013 reason = oe.recipeutils.validate_pn(args.newname)
1014 if reason:
1015 raise DevtoolError(reason)
1016 newname = args.newname
1017 else:
1018 newname = args.recipename
1019
1020 append = workspace[args.recipename]['bbappend']
1021 appendfn = os.path.splitext(os.path.basename(append))[0]
1022 splitfn = appendfn.split('_')
1023 if len(splitfn) > 1:
1024 origfnver = appendfn.split('_')[1]
1025 else:
1026 origfnver = ''
1027
1028 recipefilemd5 = None
1029 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1030 try:
1031 rd = parse_recipe(config, tinfoil, args.recipename, True)
1032 if not rd:
1033 return 1
1034
1035 bp = rd.getVar('BP')
1036 bpn = rd.getVar('BPN')
1037 if newname != args.recipename:
1038 localdata = rd.createCopy()
1039 localdata.setVar('PN', newname)
1040 newbpn = localdata.getVar('BPN')
1041 else:
1042 newbpn = bpn
1043 s = rd.getVar('S', False)
1044 src_uri = rd.getVar('SRC_URI', False)
1045 pv = rd.getVar('PV')
1046
1047 # Correct variable values that refer to the upstream source - these
1048 # values must stay the same, so if the name/version are changing then
1049 # we need to fix them up
1050 new_s = s
1051 new_src_uri = src_uri
1052 if newbpn != bpn:
1053 # ${PN} here is technically almost always incorrect, but people do use it
1054 new_s = new_s.replace('${BPN}', bpn)
1055 new_s = new_s.replace('${PN}', bpn)
1056 new_s = new_s.replace('${BP}', '%s-${PV}' % bpn)
1057 new_src_uri = new_src_uri.replace('${BPN}', bpn)
1058 new_src_uri = new_src_uri.replace('${PN}', bpn)
1059 new_src_uri = new_src_uri.replace('${BP}', '%s-${PV}' % bpn)
1060 if args.version and origfnver == pv:
1061 new_s = new_s.replace('${PV}', pv)
1062 new_s = new_s.replace('${BP}', '${BPN}-%s' % pv)
1063 new_src_uri = new_src_uri.replace('${PV}', pv)
1064 new_src_uri = new_src_uri.replace('${BP}', '${BPN}-%s' % pv)
1065 patchfields = {}
1066 if new_s != s:
1067 patchfields['S'] = new_s
1068 if new_src_uri != src_uri:
1069 patchfields['SRC_URI'] = new_src_uri
1070 if patchfields:
1071 recipefilemd5 = bb.utils.md5_file(recipefile)
1072 oe.recipeutils.patch_recipe(rd, recipefile, patchfields)
1073 newrecipefilemd5 = bb.utils.md5_file(recipefile)
1074 finally:
1075 tinfoil.shutdown()
1076
1077 if args.version:
1078 newver = args.version
1079 else:
1080 newver = origfnver
1081
1082 if newver:
1083 newappend = '%s_%s.bbappend' % (newname, newver)
1084 newfile = '%s_%s.bb' % (newname, newver)
1085 else:
1086 newappend = '%s.bbappend' % newname
1087 newfile = '%s.bb' % newname
1088
1089 oldrecipedir = os.path.dirname(recipefile)
1090 newrecipedir = os.path.join(config.workspace_path, 'recipes', newname)
1091 if oldrecipedir != newrecipedir:
1092 bb.utils.mkdirhier(newrecipedir)
1093
1094 newappend = os.path.join(os.path.dirname(append), newappend)
1095 newfile = os.path.join(newrecipedir, newfile)
1096
1097 # Rename bbappend
1098 logger.info('Renaming %s to %s' % (append, newappend))
1099 bb.utils.rename(append, newappend)
1100 # Rename recipe file
1101 logger.info('Renaming %s to %s' % (recipefile, newfile))
1102 bb.utils.rename(recipefile, newfile)
1103
1104 # Rename source tree if it's the default path
1105 appendmd5 = None
1106 if not args.no_srctree:
1107 srctree = workspace[args.recipename]['srctree']
1108 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename):
1109 newsrctree = os.path.join(config.workspace_path, 'sources', newname)
1110 logger.info('Renaming %s to %s' % (srctree, newsrctree))
1111 shutil.move(srctree, newsrctree)
1112 # Correct any references (basically EXTERNALSRC*) in the .bbappend
1113 appendmd5 = bb.utils.md5_file(newappend)
1114 appendlines = []
1115 with open(newappend, 'r') as f:
1116 for line in f:
1117 appendlines.append(line)
1118 with open(newappend, 'w') as f:
1119 for line in appendlines:
1120 if srctree in line:
1121 line = line.replace(srctree, newsrctree)
1122 f.write(line)
1123 newappendmd5 = bb.utils.md5_file(newappend)
1124
1125 bpndir = None
1126 newbpndir = None
1127 if newbpn != bpn:
1128 bpndir = os.path.join(oldrecipedir, bpn)
1129 if os.path.exists(bpndir):
1130 newbpndir = os.path.join(newrecipedir, newbpn)
1131 logger.info('Renaming %s to %s' % (bpndir, newbpndir))
1132 shutil.move(bpndir, newbpndir)
1133
1134 bpdir = None
1135 newbpdir = None
1136 if newver != origfnver or newbpn != bpn:
1137 bpdir = os.path.join(oldrecipedir, bp)
1138 if os.path.exists(bpdir):
1139 newbpdir = os.path.join(newrecipedir, '%s-%s' % (newbpn, newver))
1140 logger.info('Renaming %s to %s' % (bpdir, newbpdir))
1141 shutil.move(bpdir, newbpdir)
1142
1143 if oldrecipedir != newrecipedir:
1144 # Move any stray files and delete the old recipe directory
1145 for entry in os.listdir(oldrecipedir):
1146 oldpath = os.path.join(oldrecipedir, entry)
1147 newpath = os.path.join(newrecipedir, entry)
1148 logger.info('Renaming %s to %s' % (oldpath, newpath))
1149 shutil.move(oldpath, newpath)
1150 os.rmdir(oldrecipedir)
1151
1152 # Now take care of entries in .devtool_md5
1153 md5entries = []
1154 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'r') as f:
1155 for line in f:
1156 md5entries.append(line)
1157
1158 if bpndir and newbpndir:
1159 relbpndir = os.path.relpath(bpndir, config.workspace_path) + '/'
1160 else:
1161 relbpndir = None
1162 if bpdir and newbpdir:
1163 relbpdir = os.path.relpath(bpdir, config.workspace_path) + '/'
1164 else:
1165 relbpdir = None
1166
1167 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'w') as f:
1168 for entry in md5entries:
1169 splitentry = entry.rstrip().split('|')
1170 if len(splitentry) > 2:
1171 if splitentry[0] == args.recipename:
1172 splitentry[0] = newname
1173 if splitentry[1] == os.path.relpath(append, config.workspace_path):
1174 splitentry[1] = os.path.relpath(newappend, config.workspace_path)
1175 if appendmd5 and splitentry[2] == appendmd5:
1176 splitentry[2] = newappendmd5
1177 elif splitentry[1] == os.path.relpath(recipefile, config.workspace_path):
1178 splitentry[1] = os.path.relpath(newfile, config.workspace_path)
1179 if recipefilemd5 and splitentry[2] == recipefilemd5:
1180 splitentry[2] = newrecipefilemd5
1181 elif relbpndir and splitentry[1].startswith(relbpndir):
1182 splitentry[1] = os.path.relpath(os.path.join(newbpndir, splitentry[1][len(relbpndir):]), config.workspace_path)
1183 elif relbpdir and splitentry[1].startswith(relbpdir):
1184 splitentry[1] = os.path.relpath(os.path.join(newbpdir, splitentry[1][len(relbpdir):]), config.workspace_path)
1185 entry = '|'.join(splitentry) + '\n'
1186 f.write(entry)
1187 return 0
1188
1189
1190def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refresh=False):
1191 """Get initial and update rev of a recipe. These are the start point of the
1192 whole patchset and start point for the patches to be re-generated/updated.
1193 """
1194 import bb
1195
1196 # Get current branch
1197 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
1198 cwd=srctree)
1199 branchname = stdout.rstrip()
1200
1201 # Parse initial rev from recipe if not specified
1202 commits = {}
1203 patches = []
1204 initial_revs = {}
1205 with open(recipe_path, 'r') as f:
1206 for line in f:
1207 pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$'
1208 match = re.search(pattern, line)
1209 if match:
1210 name = match.group(1)
1211 rev = match.group(2)
1212 if line.startswith('# initial_rev'):
1213 if not (name == "." and initial_rev):
1214 initial_revs[name] = rev
1215 elif line.startswith('# commit') and not force_patch_refresh:
1216 if name not in commits:
1217 commits[name] = [rev]
1218 else:
1219 commits[name].append(rev)
1220 elif line.startswith('# patches_%s:' % branchname):
1221 patches = line.split(':')[-1].strip().split(',')
1222
1223 update_revs = dict(initial_revs)
1224 changed_revs = {}
1225 for name, rev in initial_revs.items():
1226 # Find first actually changed revision
1227 stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
1228 rev, cwd=os.path.join(srctree, name))
1229 newcommits = stdout.split()
1230 if name in commits:
1231 for i in range(min(len(commits[name]), len(newcommits))):
1232 if newcommits[i] == commits[name][i]:
1233 update_revs[name] = commits[name][i]
1234
1235 try:
1236 stdout, _ = bb.process.run('git cherry devtool-patched',
1237 cwd=os.path.join(srctree, name))
1238 except bb.process.ExecutionError as err:
1239 stdout = None
1240
1241 if stdout is not None and not force_patch_refresh:
1242 for line in stdout.splitlines():
1243 if line.startswith('+ '):
1244 rev = line.split()[1]
1245 if rev in newcommits:
1246 if name not in changed_revs:
1247 changed_revs[name] = [rev]
1248 else:
1249 changed_revs[name].append(rev)
1250
1251 return initial_revs, update_revs, changed_revs, patches
1252
1253def _remove_file_entries(srcuri, filelist):
1254 """Remove file:// entries from SRC_URI"""
1255 remaining = filelist[:]
1256 entries = []
1257 for fname in filelist:
1258 basename = os.path.basename(fname)
1259 for i in range(len(srcuri)):
1260 if (srcuri[i].startswith('file://') and
1261 os.path.basename(srcuri[i].split(';')[0]) == basename):
1262 entries.append(srcuri[i])
1263 remaining.remove(fname)
1264 srcuri.pop(i)
1265 break
1266 return entries, remaining
1267
1268def _replace_srcuri_entry(srcuri, filename, newentry):
1269 """Replace entry corresponding to specified file with a new entry"""
1270 basename = os.path.basename(filename)
1271 for i in range(len(srcuri)):
1272 if os.path.basename(srcuri[i].split(';')[0]) == basename:
1273 srcuri.pop(i)
1274 srcuri.insert(i, newentry)
1275 break
1276
1277def _remove_source_files(append, files, destpath, no_report_remove=False, dry_run=False):
1278 """Unlink existing patch files"""
1279
1280 dry_run_suffix = ' (dry-run)' if dry_run else ''
1281
1282 for path in files:
1283 if append:
1284 if not destpath:
1285 raise Exception('destpath should be set here')
1286 path = os.path.join(destpath, os.path.basename(path))
1287
1288 if os.path.exists(path):
1289 if not no_report_remove:
1290 logger.info('Removing file %s%s' % (path, dry_run_suffix))
1291 if not dry_run:
1292 # FIXME "git rm" here would be nice if the file in question is
1293 # tracked
1294 # FIXME there's a chance that this file is referred to by
1295 # another recipe, in which case deleting wouldn't be the
1296 # right thing to do
1297 os.remove(path)
1298 # Remove directory if empty
1299 try:
1300 os.rmdir(os.path.dirname(path))
1301 except OSError as ose:
1302 if ose.errno != errno.ENOTEMPTY:
1303 raise
1304
1305
1306def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1307 """Export patches from srctree to given location.
1308 Returns three-tuple of dicts:
1309 1. updated - patches that already exist in SRCURI
1310 2. added - new patches that don't exist in SRCURI
1311 3 removed - patches that exist in SRCURI but not in exported patches
1312 In each dict the key is the 'basepath' of the URI and value is:
1313 - for updated and added dicts, a dict with 2 optionnal keys:
1314 - 'path': the absolute path to the existing file in recipe space (if any)
1315 - 'patchdir': the directory in wich the patch should be applied (if any)
1316 - for removed dict, the absolute path to the existing file in recipe space
1317 """
1318 import oe.recipeutils
1319 from oe.patch import GitApplyTree
1320 updated = OrderedDict()
1321 added = OrderedDict()
1322 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
1323
1324 existing_patches = dict((os.path.basename(path), path) for path in
1325 oe.recipeutils.get_recipe_patches(rd))
1326 logger.debug('Existing patches: %s' % existing_patches)
1327
1328 # Generate patches from Git, exclude local files directory
1329 patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
1330 GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec)
1331 for dirpath, dirnames, filenames in os.walk(destdir):
1332 new_patches = filenames
1333 reldirpath = os.path.relpath(dirpath, destdir)
1334 for new_patch in new_patches:
1335 # Strip numbering from patch names. If it's a git sequence named patch,
1336 # the numbers might not match up since we are starting from a different
1337 # revision This does assume that people are using unique shortlog
1338 # values, but they ought to be anyway...
1339 new_basename = seqpatch_re.match(new_patch).group(2)
1340 match_name = None
1341 for old_patch in existing_patches:
1342 old_basename = seqpatch_re.match(old_patch).group(2)
1343 old_basename_splitext = os.path.splitext(old_basename)
1344 if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
1345 old_patch_noext = os.path.splitext(old_patch)[0]
1346 match_name = old_patch_noext
1347 break
1348 elif new_basename == old_basename:
1349 match_name = old_patch
1350 break
1351 if match_name:
1352 # Rename patch files
1353 if new_patch != match_name:
1354 bb.utils.rename(os.path.join(destdir, new_patch),
1355 os.path.join(destdir, match_name))
1356 # Need to pop it off the list now before checking changed_revs
1357 oldpath = existing_patches.pop(old_patch)
1358 if changed_revs is not None and dirpath in changed_revs:
1359 # Avoid updating patches that have not actually changed
1360 with open(os.path.join(dirpath, match_name), 'r') as f:
1361 firstlineitems = f.readline().split()
1362 # Looking for "From <hash>" line
1363 if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
1364 if not firstlineitems[1] in changed_revs[dirpath]:
1365 continue
1366 # Recompress if necessary
1367 if oldpath.endswith(('.gz', '.Z')):
1368 bb.process.run(['gzip', match_name], cwd=destdir)
1369 if oldpath.endswith('.gz'):
1370 match_name += '.gz'
1371 else:
1372 match_name += '.Z'
1373 elif oldpath.endswith('.bz2'):
1374 bb.process.run(['bzip2', match_name], cwd=destdir)
1375 match_name += '.bz2'
1376 updated[match_name] = {'path' : oldpath}
1377 if reldirpath != ".":
1378 updated[match_name]['patchdir'] = reldirpath
1379 else:
1380 added[new_patch] = {}
1381 if reldirpath != ".":
1382 added[new_patch]['patchdir'] = reldirpath
1383
1384 return (updated, added, existing_patches)
1385
1386
1387def _create_kconfig_diff(srctree, rd, outfile):
1388 """Create a kconfig fragment"""
1389 # Only update config fragment if both config files exist
1390 orig_config = os.path.join(srctree, '.config.baseline')
1391 new_config = os.path.join(srctree, '.config.new')
1392 if os.path.exists(orig_config) and os.path.exists(new_config):
1393 cmd = ['diff', '--new-line-format=%L', '--old-line-format=',
1394 '--unchanged-line-format=', orig_config, new_config]
1395 pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
1396 stderr=subprocess.PIPE)
1397 stdout, stderr = pipe.communicate()
1398 if pipe.returncode == 1:
1399 logger.info("Updating config fragment %s" % outfile)
1400 with open(outfile, 'wb') as fobj:
1401 fobj.write(stdout)
1402 elif pipe.returncode == 0:
1403 logger.info("Would remove config fragment %s" % outfile)
1404 if os.path.exists(outfile):
1405 # Remove fragment file in case of empty diff
1406 logger.info("Removing config fragment %s" % outfile)
1407 os.unlink(outfile)
1408 else:
1409 raise bb.process.ExecutionError(cmd, pipe.returncode, stdout, stderr)
1410 return True
1411 return False
1412
1413
1414def _export_local_files(srctree, rd, destdir, srctreebase):
1415 """Copy local files from srctree to given location.
1416 Returns three-tuple of dicts:
1417 1. updated - files that already exist in SRCURI
1418 2. added - new files files that don't exist in SRCURI
1419 3 removed - files that exist in SRCURI but not in exported files
1420 In each dict the key is the 'basepath' of the URI and value is:
1421 - for updated and added dicts, a dict with 1 optionnal key:
1422 - 'path': the absolute path to the existing file in recipe space (if any)
1423 - for removed dict, the absolute path to the existing file in recipe space
1424 """
1425 import oe.recipeutils
1426
1427 # Find out local files (SRC_URI files that exist in the "recipe space").
1428 # Local files that reside in srctree are not included in patch generation.
1429 # Instead they are directly copied over the original source files (in
1430 # recipe space).
1431 existing_files = oe.recipeutils.get_recipe_local_files(rd)
1432
1433 new_set = None
1434 updated = OrderedDict()
1435 added = OrderedDict()
1436 removed = OrderedDict()
1437
1438 # Get current branch and return early with empty lists
1439 # if on one of the override branches
1440 # (local files are provided only for the main branch and processing
1441 # them against lists from recipe overrides will result in mismatches
1442 # and broken modifications to recipes).
1443 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
1444 cwd=srctree)
1445 branchname = stdout.rstrip()
1446 if branchname.startswith(override_branch_prefix):
1447 return (updated, added, removed)
1448
1449 files = _git_modified(srctree)
1450 #if not files:
1451 # files = _ls_tree(srctree)
1452 for f in files:
1453 fullfile = os.path.join(srctree, f)
1454 if os.path.exists(os.path.join(fullfile, ".git")):
1455 # submodules handled elsewhere
1456 continue
1457 if f not in existing_files:
1458 added[f] = {}
1459 if os.path.isdir(os.path.join(srctree, f)):
1460 shutil.copytree(fullfile, os.path.join(destdir, f))
1461 else:
1462 shutil.copy2(fullfile, os.path.join(destdir, f))
1463 elif not os.path.exists(fullfile):
1464 removed[f] = existing_files[f]
1465 elif f in existing_files:
1466 updated[f] = {'path' : existing_files[f]}
1467 if os.path.isdir(os.path.join(srctree, f)):
1468 shutil.copytree(fullfile, os.path.join(destdir, f))
1469 else:
1470 shutil.copy2(fullfile, os.path.join(destdir, f))
1471
1472 # Special handling for kernel config
1473 if bb.data.inherits_class('kernel-yocto', rd):
1474 fragment_fn = 'devtool-fragment.cfg'
1475 fragment_path = os.path.join(destdir, fragment_fn)
1476 if _create_kconfig_diff(srctree, rd, fragment_path):
1477 if os.path.exists(fragment_path):
1478 if fragment_fn in removed:
1479 del removed[fragment_fn]
1480 if fragment_fn not in updated and fragment_fn not in added:
1481 added[fragment_fn] = {}
1482 else:
1483 if fragment_fn in updated:
1484 revoved[fragment_fn] = updated[fragment_fn]
1485 del updated[fragment_fn]
1486
1487 # Special handling for cml1, ccmake, etc bbclasses that generated
1488 # configuration fragment files that are consumed as source files
1489 for frag_class, frag_name in [("cml1", "fragment.cfg"), ("ccmake", "site-file.cmake")]:
1490 if bb.data.inherits_class(frag_class, rd):
1491 srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name)
1492 if os.path.exists(srcpath):
1493 if frag_name in removed:
1494 del removed[frag_name]
1495 if frag_name not in updated:
1496 added[frag_name] = {}
1497 # copy fragment into destdir
1498 shutil.copy2(srcpath, destdir)
1499
1500 return (updated, added, removed)
1501
1502
1503def _determine_files_dir(rd):
1504 """Determine the appropriate files directory for a recipe"""
1505 recipedir = rd.getVar('FILE_DIRNAME')
1506 for entry in rd.getVar('FILESPATH').split(':'):
1507 relpth = os.path.relpath(entry, recipedir)
1508 if not os.sep in relpth:
1509 # One (or zero) levels below only, so we don't put anything in machine-specific directories
1510 if os.path.isdir(entry):
1511 return entry
1512 return os.path.join(recipedir, rd.getVar('BPN'))
1513
1514
1515def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None):
1516 """Implement the 'srcrev' mode of update-recipe"""
1517 import bb
1518 import oe.recipeutils
1519
1520 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
1521
1522 recipefile = rd.getVar('FILE')
1523 recipedir = os.path.basename(recipefile)
1524 logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix))
1525
1526 # Get original SRCREV
1527 old_srcrev = rd.getVar('SRCREV') or ''
1528 if old_srcrev == "INVALID":
1529 raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository')
1530 old_srcrev = {'.': old_srcrev}
1531
1532 # Get HEAD revision
1533 try:
1534 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
1535 except bb.process.ExecutionError as err:
1536 raise DevtoolError('Failed to get HEAD revision in %s: %s' %
1537 (srctree, err))
1538 srcrev = stdout.strip()
1539 if len(srcrev) != 40:
1540 raise DevtoolError('Invalid hash returned by git: %s' % stdout)
1541
1542 destpath = None
1543 remove_files = []
1544 patchfields = {}
1545 patchfields['SRCREV'] = srcrev
1546 orig_src_uri = rd.getVar('SRC_URI', False) or ''
1547 srcuri = orig_src_uri.split()
1548 tempdir = tempfile.mkdtemp(prefix='devtool')
1549 update_srcuri = False
1550 appendfile = None
1551 try:
1552 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1553 srctreebase = workspace[recipename]['srctreebase']
1554 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1555 if not no_remove:
1556 # Find list of existing patches in recipe file
1557 patches_dir = tempfile.mkdtemp(dir=tempdir)
1558 upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
1559 patches_dir)
1560 logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
1561
1562 # Remove deleted local files and "overlapping" patches
1563 remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value]
1564 if remove_files:
1565 removedentries = _remove_file_entries(srcuri, remove_files)[0]
1566 update_srcuri = True
1567
1568 if appendlayerdir:
1569 files = dict((os.path.join(local_files_dir, key), val) for
1570 key, val in list(upd_f.items()) + list(new_f.items()))
1571 removevalues = {}
1572 if update_srcuri:
1573 removevalues = {'SRC_URI': removedentries}
1574 patchfields['SRC_URI'] = '\\\n '.join(srcuri)
1575 if dry_run_outdir:
1576 logger.info('Creating bbappend (dry-run)')
1577 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1578 rd, appendlayerdir, files, wildcardver=wildcard_version,
1579 extralines=patchfields, removevalues=removevalues,
1580 redirect_output=dry_run_outdir)
1581 else:
1582 files_dir = _determine_files_dir(rd)
1583 for basepath, param in upd_f.items():
1584 path = param['path']
1585 logger.info('Updating file %s%s' % (basepath, dry_run_suffix))
1586 if os.path.isabs(basepath):
1587 # Original file (probably with subdir pointing inside source tree)
1588 # so we do not want to move it, just copy
1589 _copy_file(basepath, path, dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1590 else:
1591 _move_file(os.path.join(local_files_dir, basepath), path,
1592 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1593 update_srcuri= True
1594 for basepath, param in new_f.items():
1595 path = param['path']
1596 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1597 _move_file(os.path.join(local_files_dir, basepath),
1598 os.path.join(files_dir, basepath),
1599 dry_run_outdir=dry_run_outdir,
1600 base_outdir=recipedir)
1601 srcuri.append('file://%s' % basepath)
1602 update_srcuri = True
1603 if update_srcuri:
1604 patchfields['SRC_URI'] = ' '.join(srcuri)
1605 ret = oe.recipeutils.patch_recipe(rd, recipefile, patchfields, redirect_output=dry_run_outdir)
1606 finally:
1607 shutil.rmtree(tempdir)
1608 if not 'git://' in orig_src_uri:
1609 logger.info('You will need to update SRC_URI within the recipe to '
1610 'point to a git repository where you have pushed your '
1611 'changes')
1612
1613 _remove_source_files(appendlayerdir, remove_files, destpath, no_report_remove, dry_run=dry_run_outdir)
1614 return True, appendfile, remove_files
1615
1616def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False):
1617 """Implement the 'patch' mode of update-recipe"""
1618 import bb
1619 import oe.recipeutils
1620
1621 recipefile = rd.getVar('FILE')
1622 recipedir = os.path.dirname(recipefile)
1623 append = workspace[recipename]['bbappend']
1624 if not os.path.exists(append):
1625 raise DevtoolError('unable to find workspace bbappend for recipe %s' %
1626 recipename)
1627 srctreebase = workspace[recipename]['srctreebase']
1628 relpatchdir = os.path.relpath(srctreebase, srctree)
1629 if relpatchdir == '.':
1630 patchdir_params = {}
1631 else:
1632 patchdir_params = {'patchdir': relpatchdir}
1633
1634 def srcuri_entry(basepath, patchdir_params):
1635 if patchdir_params:
1636 paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items())
1637 else:
1638 paramstr = ''
1639 return 'file://%s%s' % (basepath, paramstr)
1640
1641 initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
1642 if not initial_revs:
1643 raise DevtoolError('Unable to find initial revision - please specify '
1644 'it with --initial-rev')
1645
1646 appendfile = None
1647 dl_dir = rd.getVar('DL_DIR')
1648 if not dl_dir.endswith('/'):
1649 dl_dir += '/'
1650
1651 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
1652
1653 tempdir = tempfile.mkdtemp(prefix='devtool')
1654 try:
1655 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1656 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1657
1658 # Get updated patches from source tree
1659 patches_dir = tempfile.mkdtemp(dir=tempdir)
1660 upd_p, new_p, _ = _export_patches(srctree, rd, update_revs,
1661 patches_dir, changed_revs)
1662 # Get all patches from source tree and check if any should be removed
1663 all_patches_dir = tempfile.mkdtemp(dir=tempdir)
1664 _, _, del_p = _export_patches(srctree, rd, initial_revs,
1665 all_patches_dir)
1666 logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
1667 if filter_patches:
1668 new_p = OrderedDict()
1669 upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
1670 del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches)
1671 remove_files = []
1672 if not no_remove:
1673 # Remove deleted local files and patches
1674 remove_files = list(del_f.values()) + list(del_p.values())
1675 updatefiles = False
1676 updaterecipe = False
1677 destpath = None
1678 srcuri = (rd.getVar('SRC_URI', False) or '').split()
1679
1680 if appendlayerdir:
1681 files = OrderedDict((os.path.join(local_files_dir, key), val) for
1682 key, val in list(upd_f.items()) + list(new_f.items()))
1683 files.update(OrderedDict((os.path.join(patches_dir, key), val) for
1684 key, val in list(upd_p.items()) + list(new_p.items())))
1685
1686 params = []
1687 for file, param in files.items():
1688 patchdir_param = dict(patchdir_params)
1689 patchdir = param.get('patchdir', ".")
1690 if patchdir != "." :
1691 if patchdir_param:
1692 patchdir_param['patchdir'] += patchdir
1693 else:
1694 patchdir_param['patchdir'] = patchdir
1695 params.append(patchdir_param)
1696
1697 if files or remove_files:
1698 removevalues = None
1699 if remove_files:
1700 removedentries, remaining = _remove_file_entries(
1701 srcuri, remove_files)
1702 if removedentries or remaining:
1703 remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for
1704 item in remaining]
1705 removevalues = {'SRC_URI': removedentries + remaining}
1706 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1707 rd, appendlayerdir, files,
1708 wildcardver=wildcard_version,
1709 removevalues=removevalues,
1710 redirect_output=dry_run_outdir,
1711 params=params)
1712 else:
1713 logger.info('No patches or local source files needed updating')
1714 else:
1715 # Update existing files
1716 files_dir = _determine_files_dir(rd)
1717 for basepath, param in upd_f.items():
1718 path = param['path']
1719 logger.info('Updating file %s' % basepath)
1720 if os.path.isabs(basepath):
1721 # Original file (probably with subdir pointing inside source tree)
1722 # so we do not want to move it, just copy
1723 _copy_file(basepath, path,
1724 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1725 else:
1726 _move_file(os.path.join(local_files_dir, basepath), path,
1727 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1728 updatefiles = True
1729 for basepath, param in upd_p.items():
1730 path = param['path']
1731 patchdir = param.get('patchdir', ".")
1732 if patchdir != "." :
1733 patchdir_param = dict(patchdir_params)
1734 if patchdir_param:
1735 patchdir_param['patchdir'] += patchdir
1736 else:
1737 patchdir_param['patchdir'] = patchdir
1738 patchfn = os.path.join(patches_dir, patchdir, basepath)
1739 if os.path.dirname(path) + '/' == dl_dir:
1740 # This is a a downloaded patch file - we now need to
1741 # replace the entry in SRC_URI with our local version
1742 logger.info('Replacing remote patch %s with updated local version' % basepath)
1743 path = os.path.join(files_dir, basepath)
1744 _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param))
1745 updaterecipe = True
1746 else:
1747 logger.info('Updating patch %s%s' % (basepath, dry_run_suffix))
1748 _move_file(patchfn, path,
1749 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1750 updatefiles = True
1751 # Add any new files
1752 for basepath, param in new_f.items():
1753 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1754 _move_file(os.path.join(local_files_dir, basepath),
1755 os.path.join(files_dir, basepath),
1756 dry_run_outdir=dry_run_outdir,
1757 base_outdir=recipedir)
1758 srcuri.append(srcuri_entry(basepath, patchdir_params))
1759 updaterecipe = True
1760 for basepath, param in new_p.items():
1761 patchdir = param.get('patchdir', ".")
1762 logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix))
1763 _move_file(os.path.join(patches_dir, patchdir, basepath),
1764 os.path.join(files_dir, basepath),
1765 dry_run_outdir=dry_run_outdir,
1766 base_outdir=recipedir)
1767 params = dict(patchdir_params)
1768 if patchdir != "." :
1769 if params:
1770 params['patchdir'] += patchdir
1771 else:
1772 params['patchdir'] = patchdir
1773
1774 srcuri.append(srcuri_entry(basepath, params))
1775 updaterecipe = True
1776 # Update recipe, if needed
1777 if _remove_file_entries(srcuri, remove_files)[0]:
1778 updaterecipe = True
1779 if updaterecipe:
1780 if not dry_run_outdir:
1781 logger.info('Updating recipe %s' % os.path.basename(recipefile))
1782 ret = oe.recipeutils.patch_recipe(rd, recipefile,
1783 {'SRC_URI': ' '.join(srcuri)},
1784 redirect_output=dry_run_outdir)
1785 elif not updatefiles:
1786 # Neither patches nor recipe were updated
1787 logger.info('No patches or files need updating')
1788 return False, None, []
1789 finally:
1790 shutil.rmtree(tempdir)
1791
1792 _remove_source_files(appendlayerdir, remove_files, destpath, no_report_remove, dry_run=dry_run_outdir)
1793 return True, appendfile, remove_files
1794
1795def _guess_recipe_update_mode(srctree, rdata):
1796 """Guess the recipe update mode to use"""
1797 src_uri = (rdata.getVar('SRC_URI') or '').split()
1798 git_uris = [uri for uri in src_uri if uri.startswith('git://')]
1799 if not git_uris:
1800 return 'patch'
1801 # Just use the first URI for now
1802 uri = git_uris[0]
1803 # Check remote branch
1804 params = bb.fetch.decodeurl(uri)[5]
1805 upstr_branch = params['branch'] if 'branch' in params else 'master'
1806 # Check if current branch HEAD is found in upstream branch
1807 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
1808 head_rev = stdout.rstrip()
1809 stdout, _ = bb.process.run('git branch -r --contains %s' % head_rev,
1810 cwd=srctree)
1811 remote_brs = [branch.strip() for branch in stdout.splitlines()]
1812 if 'origin/' + upstr_branch in remote_brs:
1813 return 'srcrev'
1814
1815 return 'patch'
1816
1817def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False):
1818 srctree = workspace[recipename]['srctree']
1819 if mode == 'auto':
1820 mode = _guess_recipe_update_mode(srctree, rd)
1821
1822 override_branches = []
1823 mainbranch = None
1824 startbranch = None
1825 if not no_overrides:
1826 stdout, _ = bb.process.run('git branch', cwd=srctree)
1827 other_branches = []
1828 for line in stdout.splitlines():
1829 branchname = line[2:]
1830 if line.startswith('* '):
1831 if 'HEAD' in line:
1832 raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"')
1833 startbranch = branchname
1834 if branchname.startswith(override_branch_prefix):
1835 override_branches.append(branchname)
1836 else:
1837 other_branches.append(branchname)
1838
1839 if override_branches:
1840 logger.debug('_update_recipe: override branches: %s' % override_branches)
1841 logger.debug('_update_recipe: other branches: %s' % other_branches)
1842 if startbranch.startswith(override_branch_prefix):
1843 if len(other_branches) == 1:
1844 mainbranch = other_branches[1]
1845 else:
1846 raise DevtoolError('Unable to determine main branch - please check out the main branch in source tree first')
1847 else:
1848 mainbranch = startbranch
1849
1850 checkedout = None
1851 anyupdated = False
1852 appendfile = None
1853 allremoved = []
1854 if override_branches:
1855 logger.info('Handling main branch (%s)...' % mainbranch)
1856 if startbranch != mainbranch:
1857 bb.process.run('git checkout %s' % mainbranch, cwd=srctree)
1858 checkedout = mainbranch
1859 try:
1860 branchlist = [mainbranch] + override_branches
1861 for branch in branchlist:
1862 crd = bb.data.createCopy(rd)
1863 if branch != mainbranch:
1864 logger.info('Handling branch %s...' % branch)
1865 override = branch[len(override_branch_prefix):]
1866 crd.appendVar('OVERRIDES', ':%s' % override)
1867 bb.process.run('git checkout %s' % branch, cwd=srctree)
1868 checkedout = branch
1869
1870 if mode == 'srcrev':
1871 updated, appendf, removed = _update_recipe_srcrev(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir)
1872 elif mode == 'patch':
1873 updated, appendf, removed = _update_recipe_patch(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir, force_patch_refresh)
1874 else:
1875 raise DevtoolError('update_recipe: invalid mode %s' % mode)
1876 if updated:
1877 anyupdated = True
1878 if appendf:
1879 appendfile = appendf
1880 allremoved.extend(removed)
1881 finally:
1882 if startbranch and checkedout != startbranch:
1883 bb.process.run('git checkout %s' % startbranch, cwd=srctree)
1884
1885 return anyupdated, appendfile, allremoved
1886
1887def update_recipe(args, config, basepath, workspace):
1888 """Entry point for the devtool 'update-recipe' subcommand"""
1889 check_workspace_recipe(workspace, args.recipename)
1890
1891 if args.append:
1892 if not os.path.exists(args.append):
1893 raise DevtoolError('bbappend destination layer directory "%s" '
1894 'does not exist' % args.append)
1895 if not os.path.exists(os.path.join(args.append, 'conf', 'layer.conf')):
1896 raise DevtoolError('conf/layer.conf not found in bbappend '
1897 'destination layer "%s"' % args.append)
1898
1899 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1900 try:
1901
1902 rd = parse_recipe(config, tinfoil, args.recipename, True)
1903 if not rd:
1904 return 1
1905
1906 dry_run_output = None
1907 dry_run_outdir = None
1908 if args.dry_run:
1909 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
1910 dry_run_outdir = dry_run_output.name
1911 updated, _, _ = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
1912
1913 if updated:
1914 rf = rd.getVar('FILE')
1915 if rf.startswith(config.workspace_path):
1916 logger.warning('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
1917 finally:
1918 tinfoil.shutdown()
1919
1920 return 0
1921
1922
1923def status(args, config, basepath, workspace):
1924 """Entry point for the devtool 'status' subcommand"""
1925 if workspace:
1926 for recipe, value in sorted(workspace.items()):
1927 recipefile = value['recipefile']
1928 if recipefile:
1929 recipestr = ' (%s)' % recipefile
1930 else:
1931 recipestr = ''
1932 print("%s: %s%s" % (recipe, value['srctree'], recipestr))
1933 else:
1934 logger.info('No recipes currently in your workspace - you can use "devtool modify" to work on an existing recipe or "devtool add" to add a new one')
1935 return 0
1936
1937
1938def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1939 """Reset one or more recipes"""
1940 import oe.path
1941
1942 def clean_preferred_provider(pn, layerconf_path):
1943 """Remove PREFERRED_PROVIDER from layer.conf'"""
1944 import re
1945 layerconf_file = os.path.join(layerconf_path, 'conf', 'layer.conf')
1946 new_layerconf_file = os.path.join(layerconf_path, 'conf', '.layer.conf')
1947 pprovider_found = False
1948 with open(layerconf_file, 'r') as f:
1949 lines = f.readlines()
1950 with open(new_layerconf_file, 'a') as nf:
1951 for line in lines:
1952 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$'
1953 if not re.match(pprovider_exp, line):
1954 nf.write(line)
1955 else:
1956 pprovider_found = True
1957 if pprovider_found:
1958 shutil.move(new_layerconf_file, layerconf_file)
1959 else:
1960 os.remove(new_layerconf_file)
1961
1962 if recipes and not no_clean:
1963 if len(recipes) == 1:
1964 logger.info('Cleaning sysroot for recipe %s...' % recipes[0])
1965 else:
1966 logger.info('Cleaning sysroot for recipes %s...' % ', '.join(recipes))
1967 # If the recipe file itself was created in the workspace, and
1968 # it uses BBCLASSEXTEND, then we need to also clean the other
1969 # variants
1970 targets = []
1971 for recipe in recipes:
1972 targets.append(recipe)
1973 recipefile = workspace[recipe]['recipefile']
1974 if recipefile and os.path.exists(recipefile):
1975 targets.extend(get_bbclassextend_targets(recipefile, recipe))
1976 try:
1977 exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % ' '.join(targets))
1978 except bb.process.ExecutionError as e:
1979 raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you '
1980 'wish, you may specify -n/--no-clean to '
1981 'skip running this command when resetting' %
1982 (e.command, e.stdout))
1983
1984 for pn in recipes:
1985 _check_preserve(config, pn)
1986
1987 appendfile = workspace[pn]['bbappend']
1988 if os.path.exists(appendfile):
1989 # This shouldn't happen, but is possible if devtool errored out prior to
1990 # writing the md5 file. We need to delete this here or the recipe won't
1991 # actually be reset
1992 os.remove(appendfile)
1993
1994 preservepath = os.path.join(config.workspace_path, 'attic', pn, pn)
1995 def preservedir(origdir):
1996 if os.path.exists(origdir):
1997 for root, dirs, files in os.walk(origdir):
1998 for fn in files:
1999 logger.warning('Preserving %s in %s' % (fn, preservepath))
2000 _move_file(os.path.join(origdir, fn),
2001 os.path.join(preservepath, fn))
2002 for dn in dirs:
2003 preservedir(os.path.join(root, dn))
2004 os.rmdir(origdir)
2005
2006 recipefile = workspace[pn]['recipefile']
2007 if recipefile and oe.path.is_path_parent(config.workspace_path, recipefile):
2008 # This should always be true if recipefile is set, but just in case
2009 preservedir(os.path.dirname(recipefile))
2010 # We don't automatically create this dir next to appends, but the user can
2011 preservedir(os.path.join(config.workspace_path, 'appends', pn))
2012
2013 srctreebase = workspace[pn]['srctreebase']
2014 if os.path.isdir(srctreebase):
2015 if os.listdir(srctreebase):
2016 if remove_work:
2017 logger.info('-r argument used on %s, removing source tree.'
2018 ' You will lose any unsaved work' %pn)
2019 shutil.rmtree(srctreebase)
2020 else:
2021 # We don't want to risk wiping out any work in progress
2022 if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')):
2023 from datetime import datetime
2024 preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S")))
2025 logger.info('Preserving source tree in %s\nIf you no '
2026 'longer need it then please delete it manually.\n'
2027 'It is also possible to reuse it via devtool source tree argument.'
2028 % preservesrc)
2029 bb.utils.mkdirhier(os.path.dirname(preservesrc))
2030 shutil.move(srctreebase, preservesrc)
2031 else:
2032 logger.info('Leaving source tree %s as-is; if you no '
2033 'longer need it then please delete it manually'
2034 % srctreebase)
2035 else:
2036 # This is unlikely, but if it's empty we can just remove it
2037 os.rmdir(srctreebase)
2038
2039 clean_preferred_provider(pn, config.workspace_path)
2040
2041def reset(args, config, basepath, workspace):
2042 """Entry point for the devtool 'reset' subcommand"""
2043 import bb
2044 import shutil
2045
2046 recipes = ""
2047
2048 if args.recipename:
2049 if args.all:
2050 raise DevtoolError("Recipe cannot be specified if -a/--all is used")
2051 else:
2052 for recipe in args.recipename:
2053 check_workspace_recipe(workspace, recipe, checksrc=False)
2054 elif not args.all:
2055 raise DevtoolError("Recipe must be specified, or specify -a/--all to "
2056 "reset all recipes")
2057 if args.all:
2058 recipes = list(workspace.keys())
2059 else:
2060 recipes = args.recipename
2061
2062 _reset(recipes, args.no_clean, args.remove_work, config, basepath, workspace)
2063
2064 return 0
2065
2066
2067def _get_layer(layername, d):
2068 """Determine the base layer path for the specified layer name/path"""
2069 layerdirs = d.getVar('BBLAYERS').split()
2070 layers = {} # {basename: layer_paths}
2071 for p in layerdirs:
2072 bn = os.path.basename(p)
2073 if bn not in layers:
2074 layers[bn] = [p]
2075 else:
2076 layers[bn].append(p)
2077 # Provide some shortcuts
2078 if layername.lower() in ['oe-core', 'openembedded-core']:
2079 layername = 'meta'
2080 layer_paths = layers.get(layername, None)
2081 if not layer_paths:
2082 return os.path.abspath(layername)
2083 elif len(layer_paths) == 1:
2084 return os.path.abspath(layer_paths[0])
2085 else:
2086 # multiple layers having the same base name
2087 logger.warning("Multiple layers have the same base name '%s', use the first one '%s'." % (layername, layer_paths[0]))
2088 logger.warning("Consider using path instead of base name to specify layer:\n\t\t%s" % '\n\t\t'.join(layer_paths))
2089 return os.path.abspath(layer_paths[0])
2090
2091
2092def finish(args, config, basepath, workspace):
2093 """Entry point for the devtool 'finish' subcommand"""
2094 import bb
2095 import oe.recipeutils
2096
2097 check_workspace_recipe(workspace, args.recipename)
2098
2099 dry_run_suffix = ' (dry-run)' if args.dry_run else ''
2100
2101 # Grab the equivalent of COREBASE without having to initialise tinfoil
2102 corebasedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
2103
2104 srctree = workspace[args.recipename]['srctree']
2105 check_git_repo_op(srctree, [corebasedir])
2106 dirty = check_git_repo_dirty(srctree)
2107 if dirty:
2108 if args.force:
2109 logger.warning('Source tree is not clean, continuing as requested by -f/--force')
2110 else:
2111 raise DevtoolError('Source tree is not clean:\n\n%s\nEnsure you have committed your changes or use -f/--force if you are sure there\'s nothing that needs to be committed' % dirty)
2112
2113 no_clean = args.no_clean
2114 remove_work=args.remove_work
2115 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
2116 try:
2117 rd = parse_recipe(config, tinfoil, args.recipename, True)
2118 if not rd:
2119 return 1
2120
2121 destlayerdir = _get_layer(args.destination, tinfoil.config_data)
2122 recipefile = rd.getVar('FILE')
2123 recipedir = os.path.dirname(recipefile)
2124 origlayerdir = oe.recipeutils.find_layerdir(recipefile)
2125
2126 if not os.path.isdir(destlayerdir):
2127 raise DevtoolError('Unable to find layer or directory matching "%s"' % args.destination)
2128
2129 if os.path.abspath(destlayerdir) == config.workspace_path:
2130 raise DevtoolError('"%s" specifies the workspace layer - that is not a valid destination' % args.destination)
2131
2132 # If it's an upgrade, grab the original path
2133 origpath = None
2134 origfilelist = None
2135 append = workspace[args.recipename]['bbappend']
2136 with open(append, 'r') as f:
2137 for line in f:
2138 if line.startswith('# original_path:'):
2139 origpath = line.split(':')[1].strip()
2140 elif line.startswith('# original_files:'):
2141 origfilelist = line.split(':')[1].split()
2142
2143 destlayerbasedir = oe.recipeutils.find_layerdir(destlayerdir)
2144
2145 if origlayerdir == config.workspace_path:
2146 # Recipe file itself is in workspace, update it there first
2147 appendlayerdir = None
2148 origrelpath = None
2149 if origpath:
2150 origlayerpath = oe.recipeutils.find_layerdir(origpath)
2151 if origlayerpath:
2152 origrelpath = os.path.relpath(origpath, origlayerpath)
2153 destpath = oe.recipeutils.get_bbfile_path(rd, destlayerdir, origrelpath)
2154 if not destpath:
2155 raise DevtoolError("Unable to determine destination layer path - check that %s specifies an actual layer and %s/conf/layer.conf specifies BBFILES. You may also need to specify a more complete path." % (args.destination, destlayerdir))
2156 # Warn if the layer isn't in bblayers.conf (the code to create a bbappend will do this in other cases)
2157 layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
2158 if not os.path.abspath(destlayerbasedir) in layerdirs:
2159 bb.warn('Specified destination layer is not currently enabled in bblayers.conf, so the %s recipe will now be unavailable in your current configuration until you add the layer there' % args.recipename)
2160
2161 elif destlayerdir == origlayerdir:
2162 # Same layer, update the original recipe
2163 appendlayerdir = None
2164 destpath = None
2165 else:
2166 # Create/update a bbappend in the specified layer
2167 appendlayerdir = destlayerdir
2168 destpath = None
2169
2170 # Actually update the recipe / bbappend
2171 removing_original = (origpath and origfilelist and oe.recipeutils.find_layerdir(origpath) == destlayerbasedir)
2172 dry_run_output = None
2173 dry_run_outdir = None
2174 if args.dry_run:
2175 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
2176 dry_run_outdir = dry_run_output.name
2177 updated, appendfile, removed = _update_recipe(args.recipename, workspace, rd, args.mode, appendlayerdir, wildcard_version=True, no_remove=False, no_report_remove=removing_original, initial_rev=args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
2178 removed = [os.path.relpath(pth, recipedir) for pth in removed]
2179
2180 # Remove any old files in the case of an upgrade
2181 if removing_original:
2182 for fn in origfilelist:
2183 fnp = os.path.join(origpath, fn)
2184 if fn in removed or not os.path.exists(os.path.join(recipedir, fn)):
2185 logger.info('Removing file %s%s' % (fnp, dry_run_suffix))
2186 if not args.dry_run:
2187 try:
2188 os.remove(fnp)
2189 except FileNotFoundError:
2190 pass
2191
2192 if origlayerdir == config.workspace_path and destpath:
2193 # Recipe file itself is in the workspace - need to move it and any
2194 # associated files to the specified layer
2195 no_clean = True
2196 logger.info('Moving recipe file to %s%s' % (destpath, dry_run_suffix))
2197 for root, _, files in os.walk(recipedir):
2198 for fn in files:
2199 srcpath = os.path.join(root, fn)
2200 relpth = os.path.relpath(os.path.dirname(srcpath), recipedir)
2201 destdir = os.path.abspath(os.path.join(destpath, relpth))
2202 destfp = os.path.join(destdir, fn)
2203 _move_file(srcpath, destfp, dry_run_outdir=dry_run_outdir, base_outdir=destpath)
2204
2205 if dry_run_outdir:
2206 import difflib
2207 comparelist = []
2208 for root, _, files in os.walk(dry_run_outdir):
2209 for fn in files:
2210 outf = os.path.join(root, fn)
2211 relf = os.path.relpath(outf, dry_run_outdir)
2212 logger.debug('dry-run: output file %s' % relf)
2213 if fn.endswith('.bb'):
2214 if origfilelist and origpath and destpath:
2215 # Need to match this up with the pre-upgrade recipe file
2216 for origf in origfilelist:
2217 if origf.endswith('.bb'):
2218 comparelist.append((os.path.abspath(os.path.join(origpath, origf)),
2219 outf,
2220 os.path.abspath(os.path.join(destpath, relf))))
2221 break
2222 else:
2223 # Compare to the existing recipe
2224 comparelist.append((recipefile, outf, recipefile))
2225 elif fn.endswith('.bbappend'):
2226 if appendfile:
2227 if os.path.exists(appendfile):
2228 comparelist.append((appendfile, outf, appendfile))
2229 else:
2230 comparelist.append((None, outf, appendfile))
2231 else:
2232 if destpath:
2233 recipedest = destpath
2234 elif appendfile:
2235 recipedest = os.path.dirname(appendfile)
2236 else:
2237 recipedest = os.path.dirname(recipefile)
2238 destfp = os.path.join(recipedest, relf)
2239 if os.path.exists(destfp):
2240 comparelist.append((destfp, outf, destfp))
2241 output = ''
2242 for oldfile, newfile, newfileshow in comparelist:
2243 if oldfile:
2244 with open(oldfile, 'r') as f:
2245 oldlines = f.readlines()
2246 else:
2247 oldfile = '/dev/null'
2248 oldlines = []
2249 with open(newfile, 'r') as f:
2250 newlines = f.readlines()
2251 if not newfileshow:
2252 newfileshow = newfile
2253 diff = difflib.unified_diff(oldlines, newlines, oldfile, newfileshow)
2254 difflines = list(diff)
2255 if difflines:
2256 output += ''.join(difflines)
2257 if output:
2258 logger.info('Diff of changed files:\n%s' % output)
2259 finally:
2260 tinfoil.shutdown()
2261
2262 # Everything else has succeeded, we can now reset
2263 if args.dry_run:
2264 logger.info('Resetting recipe (dry-run)')
2265 else:
2266 _reset([args.recipename], no_clean=no_clean, remove_work=remove_work, config=config, basepath=basepath, workspace=workspace)
2267
2268 return 0
2269
2270
2271def get_default_srctree(config, recipename=''):
2272 """Get the default srctree path"""
2273 srctreeparent = config.get('General', 'default_source_parent_dir', config.workspace_path)
2274 if recipename:
2275 return os.path.join(srctreeparent, 'sources', recipename)
2276 else:
2277 return os.path.join(srctreeparent, 'sources')
2278
2279def register_commands(subparsers, context):
2280 """Register devtool subcommands from this plugin"""
2281
2282 defsrctree = get_default_srctree(context.config)
2283 parser_add = subparsers.add_parser('add', help='Add a new recipe',
2284 description='Adds a new recipe to the workspace to build a specified source tree. Can optionally fetch a remote URI and unpack it to create the source tree.',
2285 group='starting', order=100)
2286 parser_add.add_argument('recipename', nargs='?', help='Name for new recipe to add (just name - no version, path or extension). If not specified, will attempt to auto-detect it.')
2287 parser_add.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
2288 parser_add.add_argument('fetchuri', nargs='?', help='Fetch the specified URI and extract it to create the source tree')
2289 group = parser_add.add_mutually_exclusive_group()
2290 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
2291 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
2292 parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
2293 parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
2294 parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true")
2295 parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
2296 parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
2297 group = parser_add.add_mutually_exclusive_group()
2298 group.add_argument('--srcrev', '-S', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
2299 group.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
2300 parser_add.add_argument('--srcbranch', '-B', help='Branch in source repository if fetching from an SCM such as git (default master)')
2301 parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure). Useful with binary packages e.g. RPMs.', action='store_true')
2302 parser_add.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
2303 parser_add.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
2304 parser_add.add_argument('--mirrors', help='Enable PREMIRRORS and MIRRORS for source tree fetching (disable by default).', action="store_true")
2305 parser_add.add_argument('--provides', '-p', help='Specify an alias for the item provided by the recipe. E.g. virtual/libgl')
2306 parser_add.set_defaults(func=add, fixed_setup=context.fixed_setup)
2307
2308 parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
2309 description='Sets up the build environment to modify the source for an existing recipe. The default behaviour is to extract the source being fetched by the recipe into a git tree so you can work on it; alternatively if you already have your own pre-prepared source tree you can specify -n/--no-extract.',
2310 group='starting', order=90)
2311 parser_modify.add_argument('recipename', help='Name of existing recipe to edit (just name - no version, path or extension)')
2312 parser_modify.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
2313 parser_modify.add_argument('--wildcard', '-w', action="store_true", help='Use wildcard for unversioned bbappend')
2314 group = parser_modify.add_mutually_exclusive_group()
2315 group.add_argument('--extract', '-x', action="store_true", help='Extract source for recipe (default)')
2316 group.add_argument('--no-extract', '-n', action="store_true", help='Do not extract source, expect it to exist')
2317 group = parser_modify.add_mutually_exclusive_group()
2318 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
2319 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
2320 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
2321 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2322 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
2323 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup)
2324
2325 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
2326 description='Extracts the source for an existing recipe',
2327 group='advanced')
2328 parser_extract.add_argument('recipename', help='Name of recipe to extract the source for')
2329 parser_extract.add_argument('srctree', help='Path to where to extract the source tree')
2330 parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (default "%(default)s")')
2331 parser_extract.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2332 parser_extract.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
2333 parser_extract.set_defaults(func=extract, fixed_setup=context.fixed_setup)
2334
2335 parser_sync = subparsers.add_parser('sync', help='Synchronize the source tree for an existing recipe',
2336 description='Synchronize the previously extracted source tree for an existing recipe',
2337 formatter_class=argparse.ArgumentDefaultsHelpFormatter,
2338 group='advanced')
2339 parser_sync.add_argument('recipename', help='Name of recipe to sync the source for')
2340 parser_sync.add_argument('srctree', help='Path to the source tree')
2341 parser_sync.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
2342 parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
2343 parser_sync.set_defaults(func=sync, fixed_setup=context.fixed_setup)
2344
2345 parser_rename = subparsers.add_parser('rename', help='Rename a recipe file in the workspace',
2346 description='Renames the recipe file for a recipe in the workspace, changing the name or version part or both, ensuring that all references within the workspace are updated at the same time. Only works when the recipe file itself is in the workspace, e.g. after devtool add. Particularly useful when devtool add did not automatically determine the correct name.',
2347 group='working', order=10)
2348 parser_rename.add_argument('recipename', help='Current name of recipe to rename')
2349 parser_rename.add_argument('newname', nargs='?', help='New name for recipe (optional, not needed if you only want to change the version)')
2350 parser_rename.add_argument('--version', '-V', help='Change the version (NOTE: this does not change the version fetched by the recipe, just the version in the recipe file name)')
2351 parser_rename.add_argument('--no-srctree', '-s', action='store_true', help='Do not rename the source tree directory (if the default source tree path has been used) - keeping the old name may be desirable if there are internal/other external references to this path')
2352 parser_rename.set_defaults(func=rename)
2353
2354 parser_update_recipe = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe',
2355 description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV). Note that these changes need to have been committed to the git repository in order to be recognised.',
2356 group='working', order=-90)
2357 parser_update_recipe.add_argument('recipename', help='Name of recipe to update')
2358 parser_update_recipe.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
2359 parser_update_recipe.add_argument('--initial-rev', help='Override starting revision for patches')
2360 parser_update_recipe.add_argument('--append', '-a', help='Write changes to a bbappend in the specified layer instead of the recipe', metavar='LAYERDIR')
2361 parser_update_recipe.add_argument('--wildcard-version', '-w', help='In conjunction with -a/--append, use a wildcard to make the bbappend apply to any recipe version', action='store_true')
2362 parser_update_recipe.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
2363 parser_update_recipe.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
2364 parser_update_recipe.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
2365 parser_update_recipe.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
2366 parser_update_recipe.set_defaults(func=update_recipe)
2367
2368 parser_status = subparsers.add_parser('status', help='Show workspace status',
2369 description='Lists recipes currently in your workspace and the paths to their respective external source trees',
2370 group='info', order=100)
2371 parser_status.set_defaults(func=status)
2372
2373 parser_reset = subparsers.add_parser('reset', help='Remove a recipe from your workspace',
2374 description='Removes the specified recipe(s) from your workspace (resetting its state back to that defined by the metadata).',
2375 group='working', order=-100)
2376 parser_reset.add_argument('recipename', nargs='*', help='Recipe to reset')
2377 parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
2378 parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
2379 parser_reset.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory along with append')
2380 parser_reset.set_defaults(func=reset)
2381
2382 parser_finish = subparsers.add_parser('finish', help='Finish working on a recipe in your workspace',
2383 description='Pushes any committed changes to the specified recipe to the specified layer and removes it from your workspace. Roughly equivalent to an update-recipe followed by reset, except the update-recipe step will do the "right thing" depending on the recipe and the destination layer specified. Note that your changes must have been committed to the git repository in order to be recognised.',
2384 group='working', order=-100)
2385 parser_finish.add_argument('recipename', help='Recipe to finish')
2386 parser_finish.add_argument('destination', help='Layer/path to put recipe into. Can be the name of a layer configured in your bblayers.conf, the path to the base of a layer, or a partial path inside a layer. %(prog)s will attempt to complete the path based on the layer\'s structure.')
2387 parser_finish.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
2388 parser_finish.add_argument('--initial-rev', help='Override starting revision for patches')
2389 parser_finish.add_argument('--force', '-f', action="store_true", help='Force continuing even if there are uncommitted changes in the source tree repository')
2390 parser_finish.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory under workspace')
2391 parser_finish.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
2392 parser_finish.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
2393 parser_finish.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
2394 parser_finish.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
2395 parser_finish.set_defaults(func=finish)
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
deleted file mode 100644
index fa5b8ef3c7..0000000000
--- a/scripts/lib/devtool/upgrade.py
+++ /dev/null
@@ -1,693 +0,0 @@
1# Development tool - upgrade command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool upgrade plugin"""
8
9import os
10import sys
11import re
12import shutil
13import tempfile
14import logging
15import argparse
16import scriptutils
17import errno
18import bb
19
20devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib'
21sys.path = sys.path + [devtool_path]
22
23import oe.recipeutils
24from devtool import standard
25from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs, check_prerelease_version
26
27logger = logging.getLogger('devtool')
28
29def _run(cmd, cwd=''):
30 logger.debug("Running command %s> %s" % (cwd,cmd))
31 return bb.process.run('%s' % cmd, cwd=cwd)
32
33def _get_srctree(tmpdir):
34 srctree = tmpdir
35 dirs = scriptutils.filter_src_subdirs(tmpdir)
36 if len(dirs) == 1:
37 srctree = os.path.join(tmpdir, dirs[0])
38 else:
39 raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
40 return srctree
41
42def _copy_source_code(orig, dest):
43 for path in standard._ls_tree(orig):
44 dest_dir = os.path.join(dest, os.path.dirname(path))
45 bb.utils.mkdirhier(dest_dir)
46 dest_path = os.path.join(dest, path)
47 shutil.move(os.path.join(orig, path), dest_path)
48
49def _remove_patch_dirs(recipefolder):
50 for root, dirs, files in os.walk(recipefolder):
51 for d in dirs:
52 shutil.rmtree(os.path.join(root,d))
53
54def _recipe_contains(rd, var):
55 rf = rd.getVar('FILE')
56 varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
57 for var, fn in varfiles.items():
58 if fn and fn.startswith(os.path.dirname(rf) + os.sep):
59 return True
60 return False
61
62def _rename_recipe_dirs(oldpv, newpv, path):
63 for root, dirs, files in os.walk(path):
64 # Rename directories with the version in their name
65 for olddir in dirs:
66 if olddir.find(oldpv) != -1:
67 newdir = olddir.replace(oldpv, newpv)
68 if olddir != newdir:
69 shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
70 # Rename any inc files with the version in their name (unusual, but possible)
71 for oldfile in files:
72 if oldfile.endswith('.inc'):
73 if oldfile.find(oldpv) != -1:
74 newfile = oldfile.replace(oldpv, newpv)
75 if oldfile != newfile:
76 bb.utils.rename(os.path.join(path, oldfile),
77 os.path.join(path, newfile))
78
79def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
80 oldrecipe = os.path.basename(oldrecipe)
81 if oldrecipe.endswith('_%s.bb' % oldpv):
82 newrecipe = '%s_%s.bb' % (bpn, newpv)
83 if oldrecipe != newrecipe:
84 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
85 else:
86 newrecipe = oldrecipe
87 return os.path.join(path, newrecipe)
88
89def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
90 _rename_recipe_dirs(oldpv, newpv, path)
91 return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
92
93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
94 """Writes an append file"""
95 if not os.path.exists(rc):
96 raise DevtoolError("bbappend not created because %s does not exist" % rc)
97
98 appendpath = os.path.join(workspace, 'appends')
99 if not os.path.exists(appendpath):
100 bb.utils.mkdirhier(appendpath)
101
102 brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
103
104 srctree = os.path.abspath(srctree)
105 pn = d.getVar('PN')
106 af = os.path.join(appendpath, '%s.bbappend' % brf)
107 with open(af, 'w') as f:
108 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
109 # Local files can be modified/tracked in separate subdir under srctree
110 # Mostly useful for packages with S != WORKDIR
111 f.write('FILESPATH:prepend := "%s:"\n' %
112 os.path.join(srctreebase, 'oe-local-files'))
113 f.write('# srctreebase: %s\n' % srctreebase)
114 f.write('inherit externalsrc\n')
115 f.write(('# NOTE: We use pn- overrides here to avoid affecting'
116 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
117 f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
118 b_is_s = use_external_build(same_dir, no_same_dir, d)
119 if b_is_s:
120 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
121 f.write('\n')
122 if revs:
123 for name, rev in revs.items():
124 f.write('# initial_rev %s: %s\n' % (name, rev))
125 if copied:
126 f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
127 f.write('# original_files: %s\n' % ' '.join(copied))
128 return af
129
130def _cleanup_on_error(rd, srctree):
131 if os.path.exists(rd):
132 shutil.rmtree(rd)
133 srctree = os.path.abspath(srctree)
134 if os.path.exists(srctree):
135 shutil.rmtree(srctree)
136
137def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
138 if not keep_failure:
139 _cleanup_on_error(rd, srctree)
140 logger.error(e)
141 if extramsg:
142 logger.error(extramsg)
143 if keep_failure:
144 logger.info('Preserving failed upgrade files (--keep-failure)')
145 sys.exit(1)
146
147def _get_uri(rd):
148 srcuris = rd.getVar('SRC_URI').split()
149 if not len(srcuris):
150 raise DevtoolError('SRC_URI not found on recipe')
151 # Get first non-local entry in SRC_URI - usually by convention it's
152 # the first entry, but not always!
153 srcuri = None
154 for entry in srcuris:
155 if not entry.startswith('file://'):
156 srcuri = entry
157 break
158 if not srcuri:
159 raise DevtoolError('Unable to find non-local entry in SRC_URI')
160 srcrev = '${AUTOREV}'
161 if '://' in srcuri:
162 # Fetch a URL
163 rev_re = re.compile(';rev=([^;]+)')
164 res = rev_re.search(srcuri)
165 if res:
166 srcrev = res.group(1)
167 srcuri = rev_re.sub('', srcuri)
168 return srcuri, srcrev
169
170def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
171 """Extract sources of a recipe with a new version"""
172
173 def __run(cmd):
174 """Simple wrapper which calls _run with srctree as cwd"""
175 return _run(cmd, srctree)
176
177 crd = rd.createCopy()
178
179 pv = crd.getVar('PV')
180 crd.setVar('PV', newpv)
181
182 tmpsrctree = None
183 uri, rev = _get_uri(crd)
184 if srcrev:
185 rev = srcrev
186 paths = [srctree]
187 if uri.startswith('git://') or uri.startswith('gitsm://'):
188 __run('git fetch')
189 __run('git checkout %s' % rev)
190 __run('git tag -f devtool-base-new')
191 __run('git submodule update --recursive')
192 __run('git submodule foreach \'git tag -f devtool-base-new\'')
193 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
194 paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
195 checksums = {}
196 _, _, _, _, _, params = bb.fetch2.decodeurl(uri)
197 srcsubdir_rel = params.get('destsuffix', 'git')
198 if not srcbranch:
199 check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
200 get_branch = [x.strip() for x in check_branch.splitlines()]
201 # Remove HEAD reference point and drop remote prefix
202 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
203 if len(get_branch) == 1:
204 # If srcrev is on only ONE branch, then use that branch
205 srcbranch = get_branch[0]
206 elif 'main' in get_branch:
207 # If srcrev is on multiple branches, then choose 'main' if it is one of them
208 srcbranch = 'main'
209 elif 'master' in get_branch:
210 # Otherwise choose 'master' if it is one of the branches
211 srcbranch = 'master'
212 else:
213 # If get_branch contains more than one objects, then display error and exit.
214 mbrch = '\n ' + '\n '.join(get_branch)
215 raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
216 else:
217 __run('git checkout devtool-base -b devtool-%s' % newpv)
218
219 tmpdir = tempfile.mkdtemp(prefix='devtool')
220 try:
221 checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
222 except scriptutils.FetchUrlFailure as e:
223 raise DevtoolError(e)
224
225 if ftmpdir and keep_temp:
226 logger.info('Fetch temp directory is %s' % ftmpdir)
227
228 tmpsrctree = _get_srctree(tmpdir)
229 srctree = os.path.abspath(srctree)
230 srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
231
232 # Delete all sources so we ensure no stray files are left over
233 for item in os.listdir(srctree):
234 if item in ['.git', 'oe-local-files']:
235 continue
236 itempath = os.path.join(srctree, item)
237 if os.path.isdir(itempath):
238 shutil.rmtree(itempath)
239 else:
240 os.remove(itempath)
241
242 # Copy in new ones
243 _copy_source_code(tmpsrctree, srctree)
244
245 (stdout,_) = __run('git ls-files --modified --others')
246 filelist = stdout.splitlines()
247 pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist))
248 pbar.start()
249 batchsize = 100
250 for i in range(0, len(filelist), batchsize):
251 batch = filelist[i:i+batchsize]
252 __run('git add -f -A %s' % ' '.join(['"%s"' % item for item in batch]))
253 pbar.update(i)
254 pbar.finish()
255
256 useroptions = []
257 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
258 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
259 __run('git tag -f devtool-base-%s' % newpv)
260
261 revs = {}
262 for path in paths:
263 (stdout, _) = _run('git rev-parse HEAD', cwd=path)
264 revs[os.path.relpath(path, srctree)] = stdout.rstrip()
265
266 if no_patch:
267 patches = oe.recipeutils.get_recipe_patches(crd)
268 if patches:
269 logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
270 else:
271 for path in paths:
272 _run('git checkout devtool-patched -b %s' % branch, cwd=path)
273 (stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
274 branches_to_rebase = [branch] + stdout.split()
275 target_branch = revs[os.path.relpath(path, srctree)]
276
277 # There is a bug (or feature?) in git rebase where if a commit with
278 # a note is fully rebased away by being part of an old commit, the
279 # note is still attached to the old commit. Avoid this by making
280 # sure all old devtool related commits have a note attached to them
281 # (this assumes git config notes.rewriteMode is set to ignore).
282 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
283 for rev in stdout.splitlines():
284 if not oe.patch.GitApplyTree.getNotes(path, rev):
285 oe.patch.GitApplyTree.addNote(path, rev, "dummy")
286
287 for b in branches_to_rebase:
288 logger.info("Rebasing {} onto {}".format(b, target_branch))
289 _run('git checkout %s' % b, cwd=path)
290 try:
291 _run('git rebase %s' % target_branch, cwd=path)
292 except bb.process.ExecutionError as e:
293 if 'conflict' in e.stdout:
294 logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
295 _run('git rebase --abort', cwd=path)
296 else:
297 logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
298
299 # Remove any dummy notes added above.
300 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
301 for rev in stdout.splitlines():
302 oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
303
304 _run('git checkout %s' % branch, cwd=path)
305
306 if tmpsrctree:
307 if keep_temp:
308 logger.info('Preserving temporary directory %s' % tmpsrctree)
309 else:
310 shutil.rmtree(tmpsrctree)
311 if tmpdir != tmpsrctree:
312 shutil.rmtree(tmpdir)
313
314 return (revs, checksums, srcbranch, srcsubdir_rel)
315
316def _add_license_diff_to_recipe(path, diff):
317 notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
318# The following is the difference between the old and the new license text.
319# Please update the LICENSE value if needed, and summarize the changes in
320# the commit message via 'License-Update:' tag.
321# (example: 'License-Update: copyright years updated.')
322#
323# The changes:
324#
325"""
326 commented_diff = "\n".join(["# {}".format(l) for l in diff.split('\n')])
327 with open(path, 'rb') as f:
328 orig_content = f.read()
329 with open(path, 'wb') as f:
330 f.write(notice_text.encode())
331 f.write(commented_diff.encode())
332 f.write("\n#\n\n".encode())
333 f.write(orig_content)
334
335def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
336 """Creates the new recipe under workspace"""
337
338 bpn = rd.getVar('BPN')
339 path = os.path.join(workspace, 'recipes', bpn)
340 bb.utils.mkdirhier(path)
341 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
342 if not copied:
343 raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn)
344 logger.debug('Copied %s to %s' % (copied, path))
345
346 oldpv = rd.getVar('PV')
347 if not newpv:
348 newpv = oldpv
349 origpath = rd.getVar('FILE')
350 fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
351 logger.debug('Upgraded %s => %s' % (origpath, fullpath))
352
353 newvalues = {}
354 if _recipe_contains(rd, 'PV') and newpv != oldpv:
355 newvalues['PV'] = newpv
356
357 if srcrev:
358 newvalues['SRCREV'] = srcrev
359
360 if srcbranch:
361 src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
362 changed = False
363 replacing = True
364 new_src_uri = []
365 for entry in src_uri:
366 try:
367 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
368 except bb.fetch2.MalformedUrl as e:
369 raise DevtoolError("Could not decode SRC_URI: {}".format(e))
370 if replacing and scheme in ['git', 'gitsm']:
371 branch = params.get('branch', 'master')
372 if rd.expand(branch) != srcbranch:
373 # Handle case where branch is set through a variable
374 res = re.match(r'\$\{([^}@]+)\}', branch)
375 if res:
376 newvalues[res.group(1)] = srcbranch
377 # We know we won't change SRC_URI now, so break out
378 break
379 else:
380 params['branch'] = srcbranch
381 entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
382 changed = True
383 replacing = False
384 new_src_uri.append(entry)
385 if changed:
386 newvalues['SRC_URI'] = ' '.join(new_src_uri)
387
388 newvalues['PR'] = None
389
390 # Work out which SRC_URI entries have changed in case the entry uses a name
391 crd = rd.createCopy()
392 crd.setVar('PV', newpv)
393 for var, value in newvalues.items():
394 crd.setVar(var, value)
395 old_src_uri = (rd.getVar('SRC_URI') or '').split()
396 new_src_uri = (crd.getVar('SRC_URI') or '').split()
397 newnames = []
398 addnames = []
399 for newentry in new_src_uri:
400 _, _, _, _, _, params = bb.fetch2.decodeurl(newentry)
401 if 'name' in params:
402 newnames.append(params['name'])
403 if newentry not in old_src_uri:
404 addnames.append(params['name'])
405 # Find what's been set in the original recipe
406 oldnames = []
407 oldsums = []
408 noname = False
409 for varflag in rd.getVarFlags('SRC_URI'):
410 for checksum in checksums:
411 if varflag.endswith('.' + checksum):
412 name = varflag.rsplit('.', 1)[0]
413 if name not in oldnames:
414 oldnames.append(name)
415 oldsums.append(checksum)
416 elif varflag == checksum:
417 noname = True
418 oldsums.append(checksum)
419 # Even if SRC_URI has named entries it doesn't have to actually use the name
420 if noname and addnames and addnames[0] not in oldnames:
421 addnames = []
422 # Drop any old names (the name actually might include ${PV})
423 for name in oldnames:
424 if name not in newnames:
425 for checksum in oldsums:
426 newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
427
428 nameprefix = '%s.' % addnames[0] if addnames else ''
429
430 # md5sum is deprecated, remove any traces of it. If it was the only old
431 # checksum, then replace it with the default checksums.
432 if 'md5sum' in oldsums:
433 newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
434 oldsums.remove('md5sum')
435 if not oldsums:
436 oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
437
438 for checksum in oldsums:
439 newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
440
441 if srcsubdir_new != srcsubdir_old:
442 s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
443 s_subdir_new = os.path.relpath(os.path.abspath(crd.getVar('S')), crd.getVar('WORKDIR'))
444 if srcsubdir_old == s_subdir_old and srcsubdir_new != s_subdir_new:
445 # Subdir for old extracted source matches what S points to (it should!)
446 # but subdir for new extracted source doesn't match what S will be
447 newvalues['S'] = '${WORKDIR}/%s' % srcsubdir_new.replace(newpv, '${PV}')
448 if crd.expand(newvalues['S']) == crd.expand('${WORKDIR}/${BP}'):
449 # It's the default, drop it
450 # FIXME what if S is being set in a .inc?
451 newvalues['S'] = None
452 logger.info('Source subdirectory has changed, dropping S value since it now matches the default ("${WORKDIR}/${BP}")')
453 else:
454 logger.info('Source subdirectory has changed, updating S value')
455
456 if license_diff:
457 newlicchksum = " ".join(["file://{}".format(l['path']) +
458 (";beginline={}".format(l['beginline']) if l['beginline'] else "") +
459 (";endline={}".format(l['endline']) if l['endline'] else "") +
460 (";md5={}".format(l['actual_md5'])) for l in new_licenses])
461 newvalues["LIC_FILES_CHKSUM"] = newlicchksum
462 _add_license_diff_to_recipe(fullpath, license_diff)
463
464 tinfoil.modified_files()
465 try:
466 rd = tinfoil.parse_recipe_file(fullpath, False)
467 except bb.tinfoil.TinfoilCommandFailed as e:
468 _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
469 oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
470
471 return fullpath, copied
472
473
474def _check_git_config():
475 def getconfig(name):
476 try:
477 value = bb.process.run('git config %s' % name)[0].strip()
478 except bb.process.ExecutionError as e:
479 if e.exitcode == 1:
480 value = None
481 else:
482 raise
483 return value
484
485 username = getconfig('user.name')
486 useremail = getconfig('user.email')
487 configerr = []
488 if not username:
489 configerr.append('Please set your name using:\n git config --global user.name')
490 if not useremail:
491 configerr.append('Please set your email using:\n git config --global user.email')
492 if configerr:
493 raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
494
495def _extract_licenses(srcpath, recipe_licenses):
496 licenses = []
497 for url in recipe_licenses.split():
498 license = {}
499 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
500 license['path'] = path
501 license['md5'] = parm.get('md5', '')
502 license['beginline'], license['endline'] = 0, 0
503 if 'beginline' in parm:
504 license['beginline'] = int(parm['beginline'])
505 if 'endline' in parm:
506 license['endline'] = int(parm['endline'])
507 license['text'] = []
508 with open(os.path.join(srcpath, path), 'rb') as f:
509 import hashlib
510 actual_md5 = hashlib.md5()
511 lineno = 0
512 for line in f:
513 lineno += 1
514 if (lineno >= license['beginline']) and ((lineno <= license['endline']) or not license['endline']):
515 license['text'].append(line.decode(errors='ignore'))
516 actual_md5.update(line)
517 license['actual_md5'] = actual_md5.hexdigest()
518 licenses.append(license)
519 return licenses
520
521def _generate_license_diff(old_licenses, new_licenses):
522 need_diff = False
523 for l in new_licenses:
524 if l['md5'] != l['actual_md5']:
525 need_diff = True
526 break
527 if need_diff == False:
528 return None
529
530 import difflib
531 diff = ''
532 for old, new in zip(old_licenses, new_licenses):
533 for line in difflib.unified_diff(old['text'], new['text'], old['path'], new['path']):
534 diff = diff + line
535 return diff
536
537def upgrade(args, config, basepath, workspace):
538 """Entry point for the devtool 'upgrade' subcommand"""
539
540 if args.recipename in workspace:
541 raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
542 if args.srcbranch and not args.srcrev:
543 raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
544
545 _check_git_config()
546
547 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
548 try:
549 rd = parse_recipe(config, tinfoil, args.recipename, True)
550 if not rd:
551 return 1
552
553 pn = rd.getVar('PN')
554 if pn != args.recipename:
555 logger.info('Mapping %s to %s' % (args.recipename, pn))
556 if pn in workspace:
557 raise DevtoolError("recipe %s is already in your workspace" % pn)
558
559 if args.srctree:
560 srctree = os.path.abspath(args.srctree)
561 else:
562 srctree = standard.get_default_srctree(config, pn)
563
564 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
565
566 # try to automatically discover latest version and revision if not provided on command line
567 if not args.version and not args.srcrev:
568 version_info = oe.recipeutils.get_recipe_upstream_version(rd)
569 if version_info['version'] and not version_info['version'].endswith("new-commits-available"):
570 args.version = version_info['version']
571 if version_info['revision']:
572 args.srcrev = version_info['revision']
573 if not args.version and not args.srcrev:
574 raise DevtoolError("Automatic discovery of latest version/revision failed - you must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option.")
575
576 standard._check_compatible_recipe(pn, rd)
577 old_srcrev = rd.getVar('SRCREV')
578 if old_srcrev == 'INVALID':
579 old_srcrev = None
580 if old_srcrev and not args.srcrev:
581 raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
582 old_ver = rd.getVar('PV')
583 if old_ver == args.version and old_srcrev == args.srcrev:
584 raise DevtoolError("Current and upgrade versions are the same version")
585 if args.version:
586 if bb.utils.vercmp_string(args.version, old_ver) < 0:
587 logger.warning('Upgrade version %s compares as less than the current version %s. If you are using a package feed for on-target upgrades or providing this recipe for general consumption, then you should increment PE in the recipe (or if there is no current PE value set, set it to "1")' % (args.version, old_ver))
588 check_prerelease_version(args.version, 'devtool upgrade')
589
590 rf = None
591 license_diff = None
592 try:
593 logger.info('Extracting current version source...')
594 rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
595 old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
596 logger.info('Extracting upgraded version source...')
597 rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
598 args.srcrev, args.srcbranch, args.branch, args.keep_temp,
599 tinfoil, rd)
600 new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
601 license_diff = _generate_license_diff(old_licenses, new_licenses)
602 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
603 except (bb.process.CmdError, DevtoolError) as e:
604 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN'))
605 _upgrade_error(e, recipedir, srctree, args.keep_failure)
606 standard._add_md5(config, pn, os.path.dirname(rf))
607
608 af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
609 copied, config.workspace_path, rd)
610 standard._add_md5(config, pn, af)
611
612 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
613
614 logger.info('Upgraded source extracted to %s' % srctree)
615 logger.info('New recipe is %s' % rf)
616 if license_diff:
617 logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.')
618 preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN'))
619 if preferred_version:
620 logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version)
621 finally:
622 tinfoil.shutdown()
623 return 0
624
625def latest_version(args, config, basepath, workspace):
626 """Entry point for the devtool 'latest_version' subcommand"""
627 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
628 try:
629 rd = parse_recipe(config, tinfoil, args.recipename, True)
630 if not rd:
631 return 1
632 version_info = oe.recipeutils.get_recipe_upstream_version(rd)
633 # "new-commits-available" is an indication that upstream never issues version tags
634 if not version_info['version'].endswith("new-commits-available"):
635 logger.info("Current version: {}".format(version_info['current_version']))
636 logger.info("Latest version: {}".format(version_info['version']))
637 if version_info['revision']:
638 logger.info("Latest version's commit: {}".format(version_info['revision']))
639 else:
640 logger.info("Latest commit: {}".format(version_info['revision']))
641 finally:
642 tinfoil.shutdown()
643 return 0
644
645def check_upgrade_status(args, config, basepath, workspace):
646 if not args.recipe:
647 logger.info("Checking the upstream status for all recipes may take a few minutes")
648 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
649 for result in results:
650 # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
651 if args.all or result[1] != 'MATCH':
652 print("{:25} {:15} {:15} {} {} {}".format( result[0],
653 result[2],
654 result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
655 result[4],
656 result[5] if result[5] != 'N/A' else "",
657 "cannot be updated due to: %s" %(result[6]) if result[6] else ""))
658
659def register_commands(subparsers, context):
660 """Register devtool subcommands from this plugin"""
661
662 defsrctree = standard.get_default_srctree(context.config)
663
664 parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
665 description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
666 group='starting')
667 parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
668 parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
669 parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV). If omitted, latest upstream version will be determined and used, if possible.')
670 parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (useful when fetching from an SCM such as git)')
671 parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
672 parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
673 parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
674 parser_upgrade.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
675 group = parser_upgrade.add_mutually_exclusive_group()
676 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
677 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
678 parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
679 parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
680 parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
681
682 parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
683 description='Queries the upstream server for what the latest upstream release is (for git, tags are checked, for tarballs, a list of them is obtained, and one with the highest version number is reported)',
684 group='info')
685 parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
686 parser_latest_version.set_defaults(func=latest_version)
687
688 parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
689 description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
690 group='info')
691 parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
692 parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
693 parser_check_upgrade_status.set_defaults(func=check_upgrade_status)
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py
deleted file mode 100644
index 964817766b..0000000000
--- a/scripts/lib/devtool/utilcmds.py
+++ /dev/null
@@ -1,242 +0,0 @@
1# Development tool - utility commands plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool utility plugins"""
9
10import os
11import sys
12import shutil
13import tempfile
14import logging
15import argparse
16import subprocess
17import scriptutils
18from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
19from devtool import parse_recipe
20
21logger = logging.getLogger('devtool')
22
23def _find_recipe_path(args, config, basepath, workspace):
24 if args.any_recipe:
25 logger.warning('-a/--any-recipe option is now always active, and thus the option will be removed in a future release')
26 if args.recipename in workspace:
27 recipefile = workspace[args.recipename]['recipefile']
28 else:
29 recipefile = None
30 if not recipefile:
31 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
32 try:
33 rd = parse_recipe(config, tinfoil, args.recipename, True)
34 if not rd:
35 raise DevtoolError("Failed to find specified recipe")
36 recipefile = rd.getVar('FILE')
37 finally:
38 tinfoil.shutdown()
39 return recipefile
40
41
42def find_recipe(args, config, basepath, workspace):
43 """Entry point for the devtool 'find-recipe' subcommand"""
44 recipefile = _find_recipe_path(args, config, basepath, workspace)
45 print(recipefile)
46 return 0
47
48
49def edit_recipe(args, config, basepath, workspace):
50 """Entry point for the devtool 'edit-recipe' subcommand"""
51 return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
52
53
54def configure_help(args, config, basepath, workspace):
55 """Entry point for the devtool 'configure-help' subcommand"""
56 import oe.utils
57
58 check_workspace_recipe(workspace, args.recipename)
59 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
60 try:
61 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
62 if not rd:
63 return 1
64 b = rd.getVar('B')
65 s = rd.getVar('S')
66 configurescript = os.path.join(s, 'configure')
67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or [])
68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
71 do_configure = rd.getVar('do_configure') or ''
72 do_configure_noexpand = rd.getVar('do_configure', False) or ''
73 packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
74 autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
75 cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
76 cmake_do_configure = rd.getVar('cmake_do_configure')
77 pn = rd.getVar('PN')
78 finally:
79 tinfoil.shutdown()
80
81 if 'doc' in packageconfig:
82 del packageconfig['doc']
83
84 if autotools and not os.path.exists(configurescript):
85 logger.info('Running do_configure to generate configure script')
86 try:
87 stdout, _ = exec_build_env_command(config.init_path, basepath,
88 'bitbake -c configure %s' % args.recipename,
89 stderr=subprocess.STDOUT)
90 except bb.process.ExecutionError:
91 pass
92
93 if confdisabled or do_configure.strip() in ('', ':'):
94 raise DevtoolError("do_configure task has been disabled for this recipe")
95 elif args.no_pager and not os.path.exists(configurescript):
96 raise DevtoolError("No configure script found and no other information to display")
97 else:
98 configopttext = ''
99 if autotools and configureopts:
100 configopttext = '''
101Arguments currently passed to the configure script:
102
103%s
104
105Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
106 if extra_oeconf:
107 configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
108
109%s''' % extra_oeconf
110
111 elif cmake:
112 in_cmake = False
113 cmake_cmd = ''
114 for line in cmake_do_configure.splitlines():
115 if in_cmake:
116 cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
117 if not line.endswith('\\'):
118 break
119 if line.lstrip().startswith('cmake '):
120 cmake_cmd = line.strip().rstrip('\\')
121 if line.endswith('\\'):
122 in_cmake = True
123 else:
124 break
125 if cmake_cmd:
126 configopttext = '''
127The current cmake command line:
128
129%s
130
131Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
132
133%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
134 else:
135 configopttext = '''
136The current implementation of cmake_do_configure:
137
138cmake_do_configure() {
139%s
140}
141
142Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
143
144%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
145
146 elif do_configure:
147 configopttext = '''
148The current implementation of do_configure:
149
150do_configure() {
151%s
152}''' % do_configure.rstrip()
153 if '${EXTRA_OECONF}' in do_configure_noexpand:
154 configopttext += '''
155
156Arguments specified through EXTRA_OECONF (which you can change or add to easily):
157
158%s''' % extra_oeconf
159
160 if packageconfig:
161 configopttext += '''
162
163Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
164
165 if args.arg:
166 helpargs = ' '.join(args.arg)
167 elif cmake:
168 helpargs = '-LH'
169 else:
170 helpargs = '--help'
171
172 msg = '''configure information for %s
173------------------------------------------
174%s''' % (pn, configopttext)
175
176 if cmake:
177 msg += '''
178
179The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
180------------------------------------------''' % (helpargs, pn)
181 elif os.path.exists(configurescript):
182 msg += '''
183
184The ./configure %s output for %s follows.
185------------------------------------------''' % (helpargs, pn)
186
187 olddir = os.getcwd()
188 tmppath = tempfile.mkdtemp()
189 with tempfile.NamedTemporaryFile('w', delete=False) as tf:
190 if not args.no_header:
191 tf.write(msg + '\n')
192 tf.close()
193 try:
194 try:
195 cmd = 'cat %s' % tf.name
196 if cmake:
197 cmd += '; cmake %s %s 2>&1' % (helpargs, s)
198 os.chdir(b)
199 elif os.path.exists(configurescript):
200 cmd += '; %s %s' % (configurescript, helpargs)
201 if sys.stdout.isatty() and not args.no_pager:
202 pager = os.environ.get('PAGER', 'less')
203 cmd = '(%s) | %s' % (cmd, pager)
204 subprocess.check_call(cmd, shell=True)
205 except subprocess.CalledProcessError as e:
206 return e.returncode
207 finally:
208 os.chdir(olddir)
209 shutil.rmtree(tmppath)
210 os.remove(tf.name)
211
212
213def register_commands(subparsers, context):
214 """Register devtool subcommands from this plugin"""
215 parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file',
216 description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
217 group='working')
218 parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
219 # FIXME drop -a at some point in future
220 parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
221 parser_edit_recipe.set_defaults(func=edit_recipe)
222
223 # Find-recipe
224 parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file',
225 description='Finds a recipe file. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
226 group='working')
227 parser_find_recipe.add_argument('recipename', help='Recipe to find')
228 # FIXME drop -a at some point in future
229 parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
230 parser_find_recipe.set_defaults(func=find_recipe)
231
232 # NOTE: Needed to override the usage string here since the default
233 # gets the order wrong - recipename must come before --arg
234 parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
235 usage='devtool configure-help [options] recipename [--arg ...]',
236 description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
237 group='working')
238 parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
239 parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
240 parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
241 parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
242 parser_configure_help.set_defaults(func=configure_help)