summaryrefslogtreecommitdiffstats
path: root/scripts/lib/devtool
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/devtool')
-rw-r--r--scripts/lib/devtool/__init__.py404
-rw-r--r--scripts/lib/devtool/build.py92
-rw-r--r--scripts/lib/devtool/build_image.py164
-rw-r--r--scripts/lib/devtool/build_sdk.py48
-rw-r--r--scripts/lib/devtool/deploy.py378
-rw-r--r--scripts/lib/devtool/export.py109
-rw-r--r--scripts/lib/devtool/ide_plugins/__init__.py282
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py463
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_none.py53
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py1009
-rw-r--r--scripts/lib/devtool/import.py134
-rw-r--r--scripts/lib/devtool/menuconfig.py76
-rw-r--r--scripts/lib/devtool/package.py50
-rw-r--r--scripts/lib/devtool/runqemu.py64
-rw-r--r--scripts/lib/devtool/sdk.py330
-rw-r--r--scripts/lib/devtool/search.py109
-rw-r--r--scripts/lib/devtool/standard.py2396
-rw-r--r--scripts/lib/devtool/upgrade.py715
-rw-r--r--scripts/lib/devtool/utilcmds.py242
19 files changed, 0 insertions, 7118 deletions
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
deleted file mode 100644
index fa6e1a34fd..0000000000
--- a/scripts/lib/devtool/__init__.py
+++ /dev/null
@@ -1,404 +0,0 @@
1#!/usr/bin/env python3
2
3# Development tool - utility functions for plugins
4#
5# Copyright (C) 2014 Intel Corporation
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9"""Devtool plugins module"""
10
11import os
12import sys
13import subprocess
14import logging
15import re
16import codecs
17
18logger = logging.getLogger('devtool')
19
20class DevtoolError(Exception):
21 """Exception for handling devtool errors"""
22 def __init__(self, message, exitcode=1):
23 super(DevtoolError, self).__init__(message)
24 self.exitcode = exitcode
25
26
27def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
28 """Run a program in bitbake build context"""
29 import bb
30 if not 'cwd' in options:
31 options["cwd"] = builddir
32 if init_path:
33 # As the OE init script makes use of BASH_SOURCE to determine OEROOT,
34 # and can't determine it when running under dash, we need to set
35 # the executable to bash to correctly set things up
36 if not 'executable' in options:
37 options['executable'] = 'bash'
38 logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
39 init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
40 else:
41 logger.debug('Executing command "%s"' % cmd)
42 init_prefix = ''
43 if watch:
44 if sys.stdout.isatty():
45 # Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
46 cmd = 'script -e -q -c "%s" /dev/null' % cmd
47 return exec_watch('%s%s' % (init_prefix, cmd), **options)
48 else:
49 return bb.process.run('%s%s' % (init_prefix, cmd), **options)
50
51def exec_watch(cmd, **options):
52 """Run program with stdout shown on sys.stdout"""
53 import bb
54 if isinstance(cmd, str) and not "shell" in options:
55 options["shell"] = True
56
57 process = subprocess.Popen(
58 cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
59 )
60
61 reader = codecs.getreader('utf-8')(process.stdout)
62 buf = ''
63 while True:
64 out = reader.read(1, 1)
65 if out:
66 sys.stdout.write(out)
67 sys.stdout.flush()
68 buf += out
69 elif out == '' and process.poll() != None:
70 break
71
72 if process.returncode != 0:
73 raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
74
75 return buf, None
76
77def exec_fakeroot(d, cmd, **kwargs):
78 """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
79 # Grab the command and check it actually exists
80 fakerootcmd = d.getVar('FAKEROOTCMD')
81 fakerootenv = d.getVar('FAKEROOTENV')
82 exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
83
84def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
85 if not os.path.exists(fakerootcmd):
86 logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
87 return 2
88 # Set up the appropriate environment
89 newenv = dict(os.environ)
90 for varvalue in fakerootenv.split():
91 if '=' in varvalue:
92 splitval = varvalue.split('=', 1)
93 newenv[splitval[0]] = splitval[1]
94 return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
95
96def setup_tinfoil(config_only=False, basepath=None, tracking=False):
97 """Initialize tinfoil api from bitbake"""
98 import scriptpath
99 orig_cwd = os.path.abspath(os.curdir)
100 try:
101 if basepath:
102 os.chdir(basepath)
103 bitbakepath = scriptpath.add_bitbake_lib_path()
104 if not bitbakepath:
105 logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
106 sys.exit(1)
107
108 import bb.tinfoil
109 tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
110 try:
111 tinfoil.logger.setLevel(logger.getEffectiveLevel())
112 tinfoil.prepare(config_only)
113 except bb.tinfoil.TinfoilUIException:
114 tinfoil.shutdown()
115 raise DevtoolError('Failed to start bitbake environment')
116 except:
117 tinfoil.shutdown()
118 raise
119 finally:
120 os.chdir(orig_cwd)
121 return tinfoil
122
123def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
124 """Parse the specified recipe"""
125 try:
126 recipefile = tinfoil.get_recipe_file(pn)
127 except bb.providers.NoProvider as e:
128 logger.error(str(e))
129 return None
130 if appends:
131 append_files = tinfoil.get_file_appends(recipefile)
132 if filter_workspace:
133 # Filter out appends from the workspace
134 append_files = [path for path in append_files if
135 not path.startswith(config.workspace_path)]
136 else:
137 append_files = None
138 try:
139 rd = tinfoil.parse_recipe_file(recipefile, appends, append_files)
140 except Exception as e:
141 logger.error(str(e))
142 return None
143 return rd
144
145def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
146 """
147 Check that a recipe is in the workspace and (optionally) that source
148 is present.
149 """
150
151 workspacepn = pn
152
153 for recipe, value in workspace.items():
154 if recipe == pn:
155 break
156 if bbclassextend:
157 recipefile = value['recipefile']
158 if recipefile:
159 targets = get_bbclassextend_targets(recipefile, recipe)
160 if pn in targets:
161 workspacepn = recipe
162 break
163 else:
164 raise DevtoolError("No recipe named '%s' in your workspace" % pn)
165
166 if checksrc:
167 srctree = workspace[workspacepn]['srctree']
168 if not os.path.exists(srctree):
169 raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
170 if not os.listdir(srctree):
171 raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
172
173 return workspacepn
174
175def use_external_build(same_dir, no_same_dir, d):
176 """
177 Determine if we should use B!=S (separate build and source directories) or not
178 """
179 b_is_s = True
180 if no_same_dir:
181 logger.info('Using separate build directory since --no-same-dir specified')
182 b_is_s = False
183 elif same_dir:
184 logger.info('Using source tree as build directory since --same-dir specified')
185 elif bb.data.inherits_class('autotools-brokensep', d):
186 logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
187 elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
188 logger.info('Using source tree as build directory since that would be the default for this recipe')
189 else:
190 b_is_s = False
191 return b_is_s
192
193def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
194 """
195 Set up the git repository for the source tree
196 """
197 import bb.process
198 import oe.patch
199 if not os.path.exists(os.path.join(repodir, '.git')):
200 bb.process.run('git init', cwd=repodir)
201 bb.process.run('git config --local gc.autodetach 0', cwd=repodir)
202 bb.process.run('git add -f -A .', cwd=repodir)
203 commit_cmd = ['git']
204 oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
205 commit_cmd += ['commit', '-q']
206 stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
207 if not stdout:
208 commit_cmd.append('--allow-empty')
209 commitmsg = "Initial empty commit with no upstream sources"
210 elif version:
211 commitmsg = "Initial commit from upstream at version %s" % version
212 else:
213 commitmsg = "Initial commit from upstream"
214 commit_cmd += ['-m', commitmsg]
215 bb.process.run(commit_cmd, cwd=repodir)
216
217 # Ensure singletask.lock (as used by externalsrc.bbclass) is ignored by git
218 gitinfodir = os.path.join(repodir, '.git', 'info')
219 try:
220 os.mkdir(gitinfodir)
221 except FileExistsError:
222 pass
223 excludes = []
224 excludefile = os.path.join(gitinfodir, 'exclude')
225 try:
226 with open(excludefile, 'r') as f:
227 excludes = f.readlines()
228 except FileNotFoundError:
229 pass
230 if 'singletask.lock\n' not in excludes:
231 excludes.append('singletask.lock\n')
232 with open(excludefile, 'w') as f:
233 for line in excludes:
234 f.write(line)
235
236 bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
237 bb.process.run('git tag -f --no-sign %s' % basetag, cwd=repodir)
238
239 # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
240 # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
241 stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
242 found = False
243 for line in stdout.splitlines():
244 if line.endswith("/"):
245 new_dir = line.split()[1]
246 for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
247 if ".git" in dirs + files:
248 (stdout, _) = bb.process.run('git remote', cwd=root)
249 remote = stdout.splitlines()[0]
250 (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
251 remote_url = stdout.splitlines()[0]
252 logger.error(os.path.relpath(os.path.join(root, ".."), root))
253 bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
254 found = True
255 if found:
256 oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
257 found = False
258 if os.path.exists(os.path.join(repodir, '.gitmodules')):
259 bb.process.run('git submodule foreach --recursive "git tag -f --no-sign %s"' % basetag, cwd=repodir)
260
261def recipe_to_append(recipefile, config, wildcard=False):
262 """
263 Convert a recipe file to a bbappend file path within the workspace.
264 NOTE: if the bbappend already exists, you should be using
265 workspace[args.recipename]['bbappend'] instead of calling this
266 function.
267 """
268 appendname = os.path.splitext(os.path.basename(recipefile))[0]
269 if wildcard:
270 appendname = re.sub(r'_.*', '_%', appendname)
271 appendpath = os.path.join(config.workspace_path, 'appends')
272 appendfile = os.path.join(appendpath, appendname + '.bbappend')
273 return appendfile
274
275def get_bbclassextend_targets(recipefile, pn):
276 """
277 Cheap function to get BBCLASSEXTEND and then convert that to the
278 list of targets that would result.
279 """
280 import bb.utils
281
282 values = {}
283 def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
284 values[varname] = origvalue
285 return origvalue, None, 0, True
286 with open(recipefile, 'r') as f:
287 bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
288
289 targets = []
290 bbclassextend = values.get('BBCLASSEXTEND', '').split()
291 if bbclassextend:
292 for variant in bbclassextend:
293 if variant == 'nativesdk':
294 targets.append('%s-%s' % (variant, pn))
295 elif variant in ['native', 'cross', 'crosssdk']:
296 targets.append('%s-%s' % (pn, variant))
297 return targets
298
299def replace_from_file(path, old, new):
300 """Replace strings on a file"""
301
302 def read_file(path):
303 data = None
304 with open(path) as f:
305 data = f.read()
306 return data
307
308 def write_file(path, data):
309 if data is None:
310 return
311 wdata = data.rstrip() + "\n"
312 with open(path, "w") as f:
313 f.write(wdata)
314
315 # In case old is None, return immediately
316 if old is None:
317 return
318 try:
319 rdata = read_file(path)
320 except IOError as e:
321 # if file does not exit, just quit, otherwise raise an exception
322 if e.errno == errno.ENOENT:
323 return
324 else:
325 raise
326
327 old_contents = rdata.splitlines()
328 new_contents = []
329 for old_content in old_contents:
330 try:
331 new_contents.append(old_content.replace(old, new))
332 except ValueError:
333 pass
334 write_file(path, "\n".join(new_contents))
335
336
337def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
338 """ This function will make unlocked-sigs.inc match the recipes in the
339 workspace plus any extras we want unlocked. """
340
341 if not fixed_setup:
342 # Only need to write this out within the eSDK
343 return
344
345 if not extra:
346 extra = []
347
348 confdir = os.path.join(basepath, 'conf')
349 unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
350
351 # Get current unlocked list if any
352 values = {}
353 def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
354 values[varname] = origvalue
355 return origvalue, None, 0, True
356 if os.path.exists(unlockedsigs):
357 with open(unlockedsigs, 'r') as f:
358 bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
359 unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
360
361 # If the new list is different to the current list, write it out
362 newunlocked = sorted(list(workspace.keys()) + extra)
363 if unlocked != newunlocked:
364 bb.utils.mkdirhier(confdir)
365 with open(unlockedsigs, 'w') as f:
366 f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
367 "# This layer was created by the OpenEmbedded devtool" +
368 " utility in order to\n" +
369 "# contain recipes that are unlocked.\n")
370
371 f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
372 for pn in newunlocked:
373 f.write(' ' + pn)
374 f.write('"')
375
376def check_prerelease_version(ver, operation):
377 if 'pre' in ver or 'rc' in ver:
378 logger.warning('Version "%s" looks like a pre-release version. '
379 'If that is the case, in order to ensure that the '
380 'version doesn\'t appear to go backwards when you '
381 'later upgrade to the final release version, it is '
382 'recommmended that instead you use '
383 '<current version>+<pre-release version> e.g. if '
384 'upgrading from 1.9 to 2.0-rc2 use "1.9+2.0-rc2". '
385 'If you prefer not to reset and re-try, you can change '
386 'the version after %s succeeds using "devtool rename" '
387 'with -V/--version.' % (ver, operation))
388
389def check_git_repo_dirty(repodir):
390 """Check if a git repository is clean or not"""
391 stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
392 return stdout
393
394def check_git_repo_op(srctree, ignoredirs=None):
395 """Check if a git repository is in the middle of a rebase"""
396 stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree)
397 topleveldir = stdout.strip()
398 if ignoredirs and topleveldir in ignoredirs:
399 return
400 gitdir = os.path.join(topleveldir, '.git')
401 if os.path.exists(os.path.join(gitdir, 'rebase-merge')):
402 raise DevtoolError("Source tree %s appears to be in the middle of a rebase - please resolve this first" % srctree)
403 if os.path.exists(os.path.join(gitdir, 'rebase-apply')):
404 raise DevtoolError("Source tree %s appears to be in the middle of 'git am' or 'git apply' - please resolve this first" % srctree)
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py
deleted file mode 100644
index 0b2c3d33dc..0000000000
--- a/scripts/lib/devtool/build.py
+++ /dev/null
@@ -1,92 +0,0 @@
1# Development tool - build command plugin
2#
3# Copyright (C) 2014-2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool build plugin"""
8
9import os
10import bb
11import logging
12import argparse
13import tempfile
14from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
15from devtool import parse_recipe
16
17logger = logging.getLogger('devtool')
18
19
20def _set_file_values(fn, values):
21 remaining = list(values.keys())
22
23 def varfunc(varname, origvalue, op, newlines):
24 newvalue = values.get(varname, origvalue)
25 remaining.remove(varname)
26 return (newvalue, '=', 0, True)
27
28 with open(fn, 'r') as f:
29 (updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
30
31 for item in remaining:
32 updated = True
33 newlines.append('%s = "%s"' % (item, values[item]))
34
35 if updated:
36 with open(fn, 'w') as f:
37 f.writelines(newlines)
38 return updated
39
40def _get_build_tasks(config):
41 tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
42 return ['do_%s' % task.strip() for task in tasks]
43
44def build(args, config, basepath, workspace):
45 """Entry point for the devtool 'build' subcommand"""
46 workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
47 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
48 try:
49 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
50 if not rd:
51 return 1
52 deploytask = 'do_deploy' in bb.build.listtasks(rd)
53 finally:
54 tinfoil.shutdown()
55
56 if args.clean:
57 # use clean instead of cleansstate to avoid messing things up in eSDK
58 build_tasks = ['do_clean']
59 else:
60 build_tasks = _get_build_tasks(config)
61 if deploytask:
62 build_tasks.append('do_deploy')
63
64 bbappend = workspace[workspacepn]['bbappend']
65 if args.disable_parallel_make:
66 logger.info("Disabling 'make' parallelism")
67 _set_file_values(bbappend, {'PARALLEL_MAKE': ''})
68 try:
69 bbargs = []
70 for task in build_tasks:
71 if args.recipename.endswith('-native') and 'package' in task:
72 continue
73 bbargs.append('%s:%s' % (args.recipename, task))
74 exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
75 except bb.process.ExecutionError as e:
76 # We've already seen the output since watch=True, so just ensure we return something to the user
77 return e.exitcode
78 finally:
79 if args.disable_parallel_make:
80 _set_file_values(bbappend, {'PARALLEL_MAKE': None})
81
82 return 0
83
84def register_commands(subparsers, context):
85 """Register devtool subcommands from this plugin"""
86 parser_build = subparsers.add_parser('build', help='Build a recipe',
87 description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
88 group='working', order=50)
89 parser_build.add_argument('recipename', help='Recipe to build')
90 parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
91 parser_build.add_argument('-c', '--clean', action='store_true', help='clean up recipe building results')
92 parser_build.set_defaults(func=build)
diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py
deleted file mode 100644
index 980f90ddd6..0000000000
--- a/scripts/lib/devtool/build_image.py
+++ /dev/null
@@ -1,164 +0,0 @@
1# Development tool - build-image plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool plugin containing the build-image subcommand."""
9
10import os
11import errno
12import logging
13
14from bb.process import ExecutionError
15from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19class TargetNotImageError(Exception):
20 pass
21
22def _get_packages(tinfoil, workspace, config):
23 """Get list of packages from recipes in the workspace."""
24 result = []
25 for recipe in workspace:
26 data = parse_recipe(config, tinfoil, recipe, True)
27 if 'class-target' in data.getVar('OVERRIDES').split(':'):
28 if recipe in data.getVar('PACKAGES').split():
29 result.append(recipe)
30 else:
31 logger.warning("Skipping recipe %s as it doesn't produce a "
32 "package with the same name", recipe)
33 return result
34
35def build_image(args, config, basepath, workspace):
36 """Entry point for the devtool 'build-image' subcommand."""
37
38 image = args.imagename
39 auto_image = False
40 if not image:
41 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
42 if sdk_targets:
43 image = sdk_targets[0]
44 auto_image = True
45 if not image:
46 raise DevtoolError('Unable to determine image to build, please specify one')
47
48 try:
49 if args.add_packages:
50 add_packages = args.add_packages.split(',')
51 else:
52 add_packages = None
53 result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
54 except TargetNotImageError:
55 if auto_image:
56 raise DevtoolError('Unable to determine image to build, please specify one')
57 else:
58 raise DevtoolError('Specified recipe %s is not an image recipe' % image)
59
60 if result == 0:
61 logger.info('Successfully built %s. You can find output files in %s'
62 % (image, outputdir))
63 return result
64
65def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
66 # remove <image>.bbappend to make sure setup_tinfoil doesn't
67 # break because of it
68 target_basename = config.get('SDK', 'target_basename', '')
69 if target_basename:
70 appendfile = os.path.join(config.workspace_path, 'appends',
71 '%s.bbappend' % target_basename)
72 try:
73 os.unlink(appendfile)
74 except OSError as exc:
75 if exc.errno != errno.ENOENT:
76 raise
77
78 tinfoil = setup_tinfoil(basepath=basepath)
79 try:
80 rd = parse_recipe(config, tinfoil, image, True)
81 if not rd:
82 # Error already shown
83 return (1, None)
84 if not bb.data.inherits_class('image', rd):
85 raise TargetNotImageError()
86
87 # Get the actual filename used and strip the .bb and full path
88 target_basename = rd.getVar('FILE')
89 target_basename = os.path.splitext(os.path.basename(target_basename))[0]
90 config.set('SDK', 'target_basename', target_basename)
91 config.write()
92
93 appendfile = os.path.join(config.workspace_path, 'appends',
94 '%s.bbappend' % target_basename)
95
96 outputdir = None
97 try:
98 if workspace or add_packages:
99 if add_packages:
100 packages = add_packages
101 else:
102 packages = _get_packages(tinfoil, workspace, config)
103 else:
104 packages = None
105 if not task:
106 if not packages and not add_packages and workspace:
107 logger.warning('No recipes in workspace, building image %s unmodified', image)
108 elif not packages:
109 logger.warning('No packages to add, building image %s unmodified', image)
110
111 if packages or extra_append:
112 bb.utils.mkdirhier(os.path.dirname(appendfile))
113 with open(appendfile, 'w') as afile:
114 if packages:
115 # include packages from workspace recipes into the image
116 afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages))
117 if not task:
118 logger.info('Building image %s with the following '
119 'additional packages: %s', image, ' '.join(packages))
120 if extra_append:
121 for line in extra_append:
122 afile.write('%s\n' % line)
123
124 if task in ['populate_sdk', 'populate_sdk_ext']:
125 outputdir = rd.getVar('SDK_DEPLOY')
126 else:
127 outputdir = rd.getVar('DEPLOY_DIR_IMAGE')
128
129 tmp_tinfoil = tinfoil
130 tinfoil = None
131 tmp_tinfoil.shutdown()
132
133 options = ''
134 if task:
135 options += '-c %s' % task
136
137 # run bitbake to build image (or specified task)
138 try:
139 exec_build_env_command(config.init_path, basepath,
140 'bitbake %s %s' % (options, image), watch=True)
141 except ExecutionError as err:
142 return (err.exitcode, None)
143 finally:
144 if os.path.isfile(appendfile):
145 os.unlink(appendfile)
146 finally:
147 if tinfoil:
148 tinfoil.shutdown()
149 return (0, outputdir)
150
151
152def register_commands(subparsers, context):
153 """Register devtool subcommands from the build-image plugin"""
154 parser = subparsers.add_parser('build-image',
155 help='Build image including workspace recipe packages',
156 description='Builds an image, extending it to include '
157 'packages from recipes in the workspace',
158 group='testbuild', order=-10)
159 parser.add_argument('imagename', help='Image recipe to build', nargs='?')
160 parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
161 'entire workspace, specify packages to be added to the image '
162 '(separate multiple packages by commas)',
163 metavar='PACKAGES')
164 parser.set_defaults(func=build_image)
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
deleted file mode 100644
index 990303982c..0000000000
--- a/scripts/lib/devtool/build_sdk.py
+++ /dev/null
@@ -1,48 +0,0 @@
1# Development tool - build-sdk command plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import logging
9from devtool import DevtoolError
10from devtool import build_image
11
12logger = logging.getLogger('devtool')
13
14
15def build_sdk(args, config, basepath, workspace):
16 """Entry point for the devtool build-sdk command"""
17
18 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
19 if sdk_targets:
20 image = sdk_targets[0]
21 else:
22 raise DevtoolError('Unable to determine image to build SDK for')
23
24 extra_append = ['SDK_DERIVATIVE = "1"']
25 try:
26 result, outputdir = build_image.build_image_task(config,
27 basepath,
28 workspace,
29 image,
30 task='populate_sdk_ext',
31 extra_append=extra_append)
32 except build_image.TargetNotImageError:
33 raise DevtoolError('Unable to determine image to build SDK for')
34
35 if result == 0:
36 logger.info('Successfully built SDK. You can find output files in %s'
37 % outputdir)
38 return result
39
40
41def register_commands(subparsers, context):
42 """Register devtool subcommands"""
43 if context.fixed_setup:
44 parser_build_sdk = subparsers.add_parser('build-sdk',
45 help='Build a derivative SDK of this one',
46 description='Builds an extensible SDK based upon this one and the items in your workspace',
47 group='advanced')
48 parser_build_sdk.set_defaults(func=build_sdk)
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
deleted file mode 100644
index b5ca8f2c2f..0000000000
--- a/scripts/lib/devtool/deploy.py
+++ /dev/null
@@ -1,378 +0,0 @@
1# Development tool - deploy/undeploy command plugin
2#
3# Copyright (C) 2014-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool plugin containing the deploy subcommands"""
8
9import logging
10import os
11import shutil
12import subprocess
13import tempfile
14
15import bb.utils
16import argparse_oe
17import oe.types
18
19from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
20
21logger = logging.getLogger('devtool')
22
23deploylist_path = '/.devtool'
24
25def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
26 """
27 Prepare a shell script for running on the target to
28 deploy/undeploy files. We have to be careful what we put in this
29 script - only commands that are likely to be available on the
30 target are suitable (the target might be constrained, e.g. using
31 busybox rather than bash with coreutils).
32 """
33 lines = []
34 lines.append('#!/bin/sh')
35 lines.append('set -e')
36 if undeployall:
37 # Yes, I know this is crude - but it does work
38 lines.append('for entry in %s/*.list; do' % deploylist_path)
39 lines.append('[ ! -f $entry ] && exit')
40 lines.append('set `basename $entry | sed "s/.list//"`')
41 if dryrun:
42 if not deploy:
43 lines.append('echo "Previously deployed files for $1:"')
44 lines.append('manifest="%s/$1.list"' % deploylist_path)
45 lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
46 lines.append('if [ -f $manifest ] ; then')
47 # Read manifest in reverse and delete files / remove empty dirs
48 lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
49 lines.append(' do')
50 if dryrun:
51 lines.append(' if [ ! -d $file ] ; then')
52 lines.append(' echo $file')
53 lines.append(' fi')
54 else:
55 lines.append(' if [ -d $file ] ; then')
56 # Avoid deleting a preserved directory in case it has special perms
57 lines.append(' if [ ! -d $preservedir/$file ] ; then')
58 lines.append(' rmdir $file > /dev/null 2>&1 || true')
59 lines.append(' fi')
60 lines.append(' else')
61 lines.append(' rm -f $file')
62 lines.append(' fi')
63 lines.append(' done')
64 if not dryrun:
65 lines.append(' rm $manifest')
66 if not deploy and not dryrun:
67 # May as well remove all traces
68 lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
69 lines.append('fi')
70
71 if deploy:
72 if not nocheckspace:
73 # Check for available space
74 # FIXME This doesn't take into account files spread across multiple
75 # partitions, but doing that is non-trivial
76 # Find the part of the destination path that exists
77 lines.append('checkpath="$2"')
78 lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
79 lines.append('do')
80 lines.append(' checkpath=`dirname "$checkpath"`')
81 lines.append('done')
82 lines.append(r'freespace=$(df -P $checkpath | sed -nre "s/^(\S+\s+){3}([0-9]+).*/\2/p")')
83 # First line of the file is the total space
84 lines.append('total=`head -n1 $3`')
85 lines.append('if [ $total -gt $freespace ] ; then')
86 lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
87 lines.append(' exit 1')
88 lines.append('fi')
89 if not nopreserve:
90 # Preserve any files that exist. Note that this will add to the
91 # preserved list with successive deployments if the list of files
92 # deployed changes, but because we've deleted any previously
93 # deployed files at this point it will never preserve anything
94 # that was deployed, only files that existed prior to any deploying
95 # (which makes the most sense)
96 lines.append('cat $3 | sed "1d" | while read file fsize')
97 lines.append('do')
98 lines.append(' if [ -e $file ] ; then')
99 lines.append(' dest="$preservedir/$file"')
100 lines.append(' mkdir -p `dirname $dest`')
101 lines.append(' mv $file $dest')
102 lines.append(' fi')
103 lines.append('done')
104 lines.append('rm $3')
105 lines.append('mkdir -p `dirname $manifest`')
106 lines.append('mkdir -p $2')
107 if verbose:
108 lines.append(' tar xv -C $2 -f - | tee $manifest')
109 else:
110 lines.append(' tar xv -C $2 -f - > $manifest')
111 lines.append('sed -i "s!^./!$2!" $manifest')
112 elif not dryrun:
113 # Put any preserved files back
114 lines.append('if [ -d $preservedir ] ; then')
115 lines.append(' cd $preservedir')
116 # find from busybox might not have -exec, so we don't use that
117 lines.append(' find . -type f | while read file')
118 lines.append(' do')
119 lines.append(' mv $file /$file')
120 lines.append(' done')
121 lines.append(' cd /')
122 lines.append(' rm -rf $preservedir')
123 lines.append('fi')
124
125 if undeployall:
126 if not dryrun:
127 lines.append('echo "NOTE: Successfully undeployed $1"')
128 lines.append('done')
129
130 # Delete the script itself
131 lines.append('rm $0')
132 lines.append('')
133
134 return '\n'.join(lines)
135
136def deploy(args, config, basepath, workspace):
137 """Entry point for the devtool 'deploy' subcommand"""
138 import oe.utils
139
140 check_workspace_recipe(workspace, args.recipename, checksrc=False)
141
142 tinfoil = setup_tinfoil(basepath=basepath)
143 try:
144 try:
145 rd = tinfoil.parse_recipe(args.recipename)
146 except Exception as e:
147 raise DevtoolError('Exception parsing recipe %s: %s' %
148 (args.recipename, e))
149
150 srcdir = rd.getVar('D')
151 workdir = rd.getVar('WORKDIR')
152 path = rd.getVar('PATH')
153 strip_cmd = rd.getVar('STRIP')
154 libdir = rd.getVar('libdir')
155 base_libdir = rd.getVar('base_libdir')
156 max_process = oe.utils.get_bb_number_threads(rd)
157 fakerootcmd = rd.getVar('FAKEROOTCMD')
158 fakerootenv = rd.getVar('FAKEROOTENV')
159 finally:
160 tinfoil.shutdown()
161
162 return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
163
164def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
165 import math
166 import oe.package
167
168 try:
169 host, destdir = args.target.split(':')
170 except ValueError:
171 destdir = '/'
172 else:
173 args.target = host
174 if not destdir.endswith('/'):
175 destdir += '/'
176
177 recipe_outdir = srcdir
178 if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
179 raise DevtoolError('No files to deploy - have you built the %s '
180 'recipe? If so, the install step has not installed '
181 'any files.' % args.recipename)
182
183 if args.strip and not args.dry_run:
184 # Fakeroot copy to new destination
185 srcdir = recipe_outdir
186 recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
187 if os.path.isdir(recipe_outdir):
188 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
189 exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
190 os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
191 oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
192
193 filelist = []
194 inodes = set({})
195 ftotalsize = 0
196 for root, _, files in os.walk(recipe_outdir):
197 for fn in files:
198 fstat = os.lstat(os.path.join(root, fn))
199 # Get the size in kiB (since we'll be comparing it to the output of du -k)
200 # MUST use lstat() here not stat() or getfilesize() since we don't want to
201 # dereference symlinks
202 if fstat.st_ino in inodes:
203 fsize = 0
204 else:
205 fsize = int(math.ceil(float(fstat.st_size)/1024))
206 inodes.add(fstat.st_ino)
207 ftotalsize += fsize
208 # The path as it would appear on the target
209 fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
210 filelist.append((fpath, fsize))
211
212 if args.dry_run:
213 print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
214 for item, _ in filelist:
215 print(' %s' % item)
216 return 0
217
218 extraoptions = ''
219 if args.no_host_check:
220 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
221 if not args.show_status:
222 extraoptions += ' -q'
223
224 scp_sshexec = ''
225 ssh_sshexec = 'ssh'
226 if args.ssh_exec:
227 scp_sshexec = "-S %s" % args.ssh_exec
228 ssh_sshexec = args.ssh_exec
229 scp_port = ''
230 ssh_port = ''
231 if args.port:
232 scp_port = "-P %s" % args.port
233 ssh_port = "-p %s" % args.port
234
235 if args.key:
236 extraoptions += ' -i %s' % args.key
237
238 # In order to delete previously deployed files and have the manifest file on
239 # the target, we write out a shell script and then copy it to the target
240 # so we can then run it (piping tar output to it).
241 # (We cannot use scp here, because it doesn't preserve symlinks.)
242 tmpdir = tempfile.mkdtemp(prefix='devtool')
243 try:
244 tmpscript = '/tmp/devtool_deploy.sh'
245 tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
246 shellscript = _prepare_remote_script(deploy=True,
247 verbose=args.show_status,
248 nopreserve=args.no_preserve,
249 nocheckspace=args.no_check_space)
250 # Write out the script to a file
251 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
252 f.write(shellscript)
253 # Write out the file list
254 with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
255 f.write('%d\n' % ftotalsize)
256 for fpath, fsize in filelist:
257 f.write('%s %d\n' % (fpath, fsize))
258 # Copy them to the target
259 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
260 if ret != 0:
261 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
262 'get a complete error message' % args.target)
263 finally:
264 shutil.rmtree(tmpdir)
265
266 # Now run the script
267 ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
268 if ret != 0:
269 raise DevtoolError('Deploy failed - rerun with -s to get a complete '
270 'error message')
271
272 logger.info('Successfully deployed %s' % recipe_outdir)
273
274 files_list = []
275 for root, _, files in os.walk(recipe_outdir):
276 for filename in files:
277 filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
278 files_list.append(os.path.join(destdir, filename))
279
280 return 0
281
282def undeploy(args, config, basepath, workspace):
283 """Entry point for the devtool 'undeploy' subcommand"""
284 if args.all and args.recipename:
285 raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
286 elif not args.recipename and not args.all:
287 raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
288
289 extraoptions = ''
290 if args.no_host_check:
291 extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
292 if not args.show_status:
293 extraoptions += ' -q'
294
295 scp_sshexec = ''
296 ssh_sshexec = 'ssh'
297 if args.ssh_exec:
298 scp_sshexec = "-S %s" % args.ssh_exec
299 ssh_sshexec = args.ssh_exec
300 scp_port = ''
301 ssh_port = ''
302 if args.port:
303 scp_port = "-P %s" % args.port
304 ssh_port = "-p %s" % args.port
305
306 args.target = args.target.split(':')[0]
307
308 tmpdir = tempfile.mkdtemp(prefix='devtool')
309 try:
310 tmpscript = '/tmp/devtool_undeploy.sh'
311 shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
312 # Write out the script to a file
313 with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
314 f.write(shellscript)
315 # Copy it to the target
316 ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
317 if ret != 0:
318 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
319 'get a complete error message' % args.target)
320 finally:
321 shutil.rmtree(tmpdir)
322
323 # Now run the script
324 ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
325 if ret != 0:
326 raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
327 'error message')
328
329 if not args.all and not args.dry_run:
330 logger.info('Successfully undeployed %s' % args.recipename)
331 return 0
332
333
334def register_commands(subparsers, context):
335 """Register devtool subcommands from the deploy plugin"""
336
337 parser_deploy = subparsers.add_parser('deploy-target',
338 help='Deploy recipe output files to live target machine',
339 description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
340 group='testbuild')
341 parser_deploy.add_argument('recipename', help='Recipe to deploy')
342 parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
343 parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
344 parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
345 parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
346 parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
347 parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
348 parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
349 parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
350 parser_deploy.add_argument('-I', '--key',
351 help='Specify ssh private key for connection to the target')
352
353 strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
354 strip_opts.add_argument('-S', '--strip',
355 help='Strip executables prior to deploying (default: %(default)s). '
356 'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
357 default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
358 action='store_true')
359 strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
360
361 parser_deploy.set_defaults(func=deploy)
362
363 parser_undeploy = subparsers.add_parser('undeploy-target',
364 help='Undeploy recipe output files in live target machine',
365 description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
366 group='testbuild')
367 parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
368 parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
369 parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
370 parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
371 parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
372 parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
373 parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
374 parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
375 parser_undeploy.add_argument('-I', '--key',
376 help='Specify ssh private key for connection to the target')
377
378 parser_undeploy.set_defaults(func=undeploy)
diff --git a/scripts/lib/devtool/export.py b/scripts/lib/devtool/export.py
deleted file mode 100644
index 01174edae5..0000000000
--- a/scripts/lib/devtool/export.py
+++ /dev/null
@@ -1,109 +0,0 @@
1# Development tool - export command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool export plugin"""
8
9import os
10import argparse
11import tarfile
12import logging
13import datetime
14import json
15
16logger = logging.getLogger('devtool')
17
18# output files
19default_arcname_prefix = "workspace-export"
20metadata = '.export_metadata'
21
22def export(args, config, basepath, workspace):
23 """Entry point for the devtool 'export' subcommand"""
24
25 def add_metadata(tar):
26 """Archive the workspace object"""
27 # finally store the workspace metadata
28 with open(metadata, 'w') as fd:
29 fd.write(json.dumps((config.workspace_path, workspace)))
30 tar.add(metadata)
31 os.unlink(metadata)
32
33 def add_recipe(tar, recipe, data):
34 """Archive recipe with proper arcname"""
35 # Create a map of name/arcnames
36 arcnames = []
37 for key, name in data.items():
38 if name:
39 if key == 'srctree':
40 # all sources, no matter where are located, goes into the sources directory
41 arcname = 'sources/%s' % recipe
42 else:
43 arcname = name.replace(config.workspace_path, '')
44 arcnames.append((name, arcname))
45
46 for name, arcname in arcnames:
47 tar.add(name, arcname=arcname)
48
49
50 # Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
51 if not workspace:
52 logger.info('Workspace contains no recipes, nothing to export')
53 return 0
54 else:
55 for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
56 for recipe in recipes:
57 if recipe not in workspace:
58 logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
59 return 1
60
61 name = args.file
62
63 default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
64 if not name:
65 name = default_name
66 else:
67 # if name is a directory, append the default name
68 if os.path.isdir(name):
69 name = os.path.join(name, default_name)
70
71 if os.path.exists(name) and not args.overwrite:
72 logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
73 return 1
74
75 # if all workspace is excluded, quit
76 if not len(set(workspace.keys()).difference(set(args.exclude))):
77 logger.warning('All recipes in workspace excluded, nothing to export')
78 return 0
79
80 exported = []
81 with tarfile.open(name, 'w:gz') as tar:
82 if args.include:
83 for recipe in args.include:
84 add_recipe(tar, recipe, workspace[recipe])
85 exported.append(recipe)
86 else:
87 for recipe, data in workspace.items():
88 if recipe not in args.exclude:
89 add_recipe(tar, recipe, data)
90 exported.append(recipe)
91
92 add_metadata(tar)
93
94 logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
95 return 0
96
97def register_commands(subparsers, context):
98 """Register devtool export subcommands"""
99 parser = subparsers.add_parser('export',
100 help='Export workspace into a tar archive',
101 description='Export one or more recipes from current workspace into a tar archive',
102 group='advanced')
103
104 parser.add_argument('--file', '-f', help='Output archive file name')
105 parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
106 group = parser.add_mutually_exclusive_group()
107 group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
108 group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
109 parser.set_defaults(func=export)
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py
deleted file mode 100644
index 19c2f61c5f..0000000000
--- a/scripts/lib/devtool/ide_plugins/__init__.py
+++ /dev/null
@@ -1,282 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
7
8import errno
9import json
10import logging
11import os
12import stat
13from enum import Enum, auto
14from devtool import DevtoolError
15from bb.utils import mkdirhier
16
17logger = logging.getLogger('devtool')
18
19
20class BuildTool(Enum):
21 UNDEFINED = auto()
22 CMAKE = auto()
23 MESON = auto()
24
25 @property
26 def is_c_ccp(self):
27 if self is BuildTool.CMAKE:
28 return True
29 if self is BuildTool.MESON:
30 return True
31 return False
32
33
34class GdbCrossConfig:
35 """Base class defining the GDB configuration generator interface
36
37 Generate a GDB configuration for a binary on the target device.
38 Only one instance per binary is allowed. This allows to assign unique port
39 numbers for all gdbserver instances.
40 """
41 _gdbserver_port_next = 1234
42 _binaries = []
43
44 def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
45 self.image_recipe = image_recipe
46 self.modified_recipe = modified_recipe
47 self.gdb_cross = modified_recipe.gdb_cross
48 self.binary = binary
49 if binary in GdbCrossConfig._binaries:
50 raise DevtoolError(
51 "gdbserver config for binary %s is already generated" % binary)
52 GdbCrossConfig._binaries.append(binary)
53 self.script_dir = modified_recipe.ide_sdk_scripts_dir
54 self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
55 self.gdbserver_multi = gdbserver_multi
56 self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
57 self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
58 GdbCrossConfig._gdbserver_port_next += 1
59 self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
60 # gdbserver start script
61 gdbserver_script_file = 'gdbserver_' + self.id_pretty
62 if self.gdbserver_multi:
63 gdbserver_script_file += "_m"
64 self.gdbserver_script = os.path.join(
65 self.script_dir, gdbserver_script_file)
66 # gdbinit file
67 self.gdbinit = os.path.join(
68 self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
69 # gdb start script
70 self.gdb_script = os.path.join(
71 self.script_dir, 'gdb_' + self.id_pretty)
72
73 def _gen_gdbserver_start_script(self):
74 """Generate a shell command starting the gdbserver on the remote device via ssh
75
76 GDB supports two modes:
77 multi: gdbserver remains running over several debug sessions
78 once: gdbserver terminates after the debugged process terminates
79 """
80 cmd_lines = ['#!/bin/sh']
81 if self.gdbserver_multi:
82 temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
83 gdbserver_cmd_start = temp_dir
84 gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
85 gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
86 gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
87 self.gdb_cross.gdbserver_path, self.gdbserver_port)
88 gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
89
90 gdbserver_cmd_stop = temp_dir
91 gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
92 gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
93
94 gdbserver_cmd_l = []
95 gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
96 gdbserver_cmd_l.append(' shift')
97 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
98 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
99 gdbserver_cmd_l.append('else')
100 gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
101 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
102 gdbserver_cmd_l.append('fi')
103 gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
104 else:
105 gdbserver_cmd_start = "%s --once :%s %s" % (
106 self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
107 gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
108 self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
109 cmd_lines.append(gdbserver_cmd)
110 GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
111
112 def _gen_gdbinit_config(self):
113 """Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
114 gdbinit_lines = ['# This file is generated by devtool ide-sdk']
115 if self.gdbserver_multi:
116 target_help = '# gdbserver --multi :%d' % self.gdbserver_port
117 remote_cmd = 'target extended-remote'
118 else:
119 target_help = '# gdbserver :%d %s' % (
120 self.gdbserver_port, self.binary)
121 remote_cmd = 'target remote'
122 gdbinit_lines.append('# On the remote target:')
123 gdbinit_lines.append(target_help)
124 gdbinit_lines.append('# On the build machine:')
125 gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
126 gdbinit_lines.append(
127 '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
128
129 gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
130 gdbinit_lines.append('set substitute-path "/usr/include" "' +
131 os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
132 # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
133 gdbinit_lines.append('set debuginfod enabled off')
134 if self.image_recipe.rootfs_dbg:
135 gdbinit_lines.append(
136 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
137 # First: Search for sources of this recipe in the workspace folder
138 if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
139 gdbinit_lines.append('set substitute-path "%s" "%s"' %
140 (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
141 else:
142 logger.error(
143 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
144 # Second: Search for sources of other recipes in the rootfs-dbg
145 if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
146 gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
147 self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
148 else:
149 logger.error(
150 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
151 else:
152 logger.warning(
153 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
154 gdbinit_lines.append(
155 '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
156 gdbinit_lines.append('set remote exec-file ' + self.binary)
157 gdbinit_lines.append(
158 'run ' + os.path.join(self.modified_recipe.d, self.binary))
159
160 GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
161
162 def _gen_gdb_start_script(self):
163 """Generate a script starting GDB with the corresponding gdbinit configuration."""
164 cmd_lines = ['#!/bin/sh']
165 cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
166 cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
167 self.gdbinit + ' "$@"')
168 GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
169
170 def initialize(self):
171 self._gen_gdbserver_start_script()
172 self._gen_gdbinit_config()
173 self._gen_gdb_start_script()
174
175 @staticmethod
176 def write_file(script_file, cmd_lines, executable=False):
177 script_dir = os.path.dirname(script_file)
178 mkdirhier(script_dir)
179 with open(script_file, 'w') as script_f:
180 script_f.write(os.linesep.join(cmd_lines))
181 script_f.write(os.linesep)
182 if executable:
183 st = os.stat(script_file)
184 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
185 logger.info("Created: %s" % script_file)
186
187
188class IdeBase:
189 """Base class defining the interface for IDE plugins"""
190
191 def __init__(self):
192 self.ide_name = 'undefined'
193 self.gdb_cross_configs = []
194
195 @classmethod
196 def ide_plugin_priority(cls):
197 """Used to find the default ide handler if --ide is not passed"""
198 return 10
199
200 def setup_shared_sysroots(self, shared_env):
201 logger.warn("Shared sysroot mode is not supported for IDE %s" %
202 self.ide_name)
203
204 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
205 logger.warn("Modified recipe mode is not supported for IDE %s" %
206 self.ide_name)
207
208 def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
209 binaries = modified_recipe.find_installed_binaries()
210 for binary in binaries:
211 gdb_cross_config = gdb_cross_config_class(
212 image_recipe, modified_recipe, binary)
213 gdb_cross_config.initialize()
214 self.gdb_cross_configs.append(gdb_cross_config)
215
216 @staticmethod
217 def gen_oe_scrtips_sym_link(modified_recipe):
218 # create a sym-link from sources to the scripts directory
219 if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
220 IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
221 os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
222
223 @staticmethod
224 def update_json_file(json_dir, json_file, update_dict):
225 """Update a json file
226
227 By default it uses the dict.update function. If this is not sutiable
228 the update function might be passed via update_func parameter.
229 """
230 json_path = os.path.join(json_dir, json_file)
231 logger.info("Updating IDE config file: %s (%s)" %
232 (json_file, json_path))
233 if not os.path.exists(json_dir):
234 os.makedirs(json_dir)
235 try:
236 with open(json_path) as f:
237 orig_dict = json.load(f)
238 except json.decoder.JSONDecodeError:
239 logger.info(
240 "Decoding %s failed. Probably because of comments in the json file" % json_path)
241 orig_dict = {}
242 except FileNotFoundError:
243 orig_dict = {}
244 orig_dict.update(update_dict)
245 with open(json_path, 'w') as f:
246 json.dump(orig_dict, f, indent=4)
247
248 @staticmethod
249 def symlink_force(tgt, dst):
250 try:
251 os.symlink(tgt, dst)
252 except OSError as err:
253 if err.errno == errno.EEXIST:
254 if os.readlink(dst) != tgt:
255 os.remove(dst)
256 os.symlink(tgt, dst)
257 else:
258 raise err
259
260
261def get_devtool_deploy_opts(args):
262 """Filter args for devtool deploy-target args"""
263 if not args.target:
264 return None
265 devtool_deploy_opts = [args.target]
266 if args.no_host_check:
267 devtool_deploy_opts += ["-c"]
268 if args.show_status:
269 devtool_deploy_opts += ["-s"]
270 if args.no_preserve:
271 devtool_deploy_opts += ["-p"]
272 if args.no_check_space:
273 devtool_deploy_opts += ["--no-check-space"]
274 if args.ssh_exec:
275 devtool_deploy_opts += ["-e", args.ssh.exec]
276 if args.port:
277 devtool_deploy_opts += ["-P", args.port]
278 if args.key:
279 devtool_deploy_opts += ["-I", args.key]
280 if args.strip is False:
281 devtool_deploy_opts += ["--no-strip"]
282 return devtool_deploy_opts
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
deleted file mode 100644
index a62b93224e..0000000000
--- a/scripts/lib/devtool/ide_plugins/ide_code.py
+++ /dev/null
@@ -1,463 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
7
8import json
9import logging
10import os
11import shutil
12from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
13
14logger = logging.getLogger('devtool')
15
16
17class GdbCrossConfigVSCode(GdbCrossConfig):
18 def __init__(self, image_recipe, modified_recipe, binary):
19 super().__init__(image_recipe, modified_recipe, binary, False)
20
21 def initialize(self):
22 self._gen_gdbserver_start_script()
23
24
25class IdeVSCode(IdeBase):
26 """Manage IDE configurations for VSCode
27
28 Modified recipe mode:
29 - cmake: use the cmake-preset generated by devtool ide-sdk
30 - meson: meson is called via a wrapper script generated by devtool ide-sdk
31
32 Shared sysroot mode:
33 In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
34 A workspace cannot be created because there is no recipe that defines how a workspace could
35 be set up.
36 - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
37 The cmake-kit uses the environment script and the tool-chain file
38 generated by meta-ide-support.
39 - meson: Meson needs manual workspace configuration.
40 """
41
42 @classmethod
43 def ide_plugin_priority(cls):
44 """If --ide is not passed this is the default plugin"""
45 if shutil.which('code'):
46 return 100
47 return 0
48
49 def setup_shared_sysroots(self, shared_env):
50 """Expose the toolchain of the shared sysroots SDK"""
51 datadir = shared_env.ide_support.datadir
52 deploy_dir_image = shared_env.ide_support.deploy_dir_image
53 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
54 standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
55 vscode_ws_path = os.path.join(
56 os.environ['HOME'], '.local', 'share', 'CMakeTools')
57 cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
58 oecmake_generator = "Ninja"
59 env_script = os.path.join(
60 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
61
62 if not os.path.isdir(vscode_ws_path):
63 os.makedirs(vscode_ws_path)
64 cmake_kits_old = []
65 if os.path.exists(cmake_kits_path):
66 with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
67 cmake_kits_old = json.load(cmake_kits_file)
68 cmake_kits = cmake_kits_old.copy()
69
70 cmake_kit_new = {
71 "name": "OE " + real_multimach_target_sys,
72 "environmentSetupScript": env_script,
73 "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
74 "preferredGenerator": {
75 "name": oecmake_generator
76 }
77 }
78
79 def merge_kit(cmake_kits, cmake_kit_new):
80 i = 0
81 while i < len(cmake_kits):
82 if 'environmentSetupScript' in cmake_kits[i] and \
83 cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
84 cmake_kits[i] = cmake_kit_new
85 return
86 i += 1
87 cmake_kits.append(cmake_kit_new)
88 merge_kit(cmake_kits, cmake_kit_new)
89
90 if cmake_kits != cmake_kits_old:
91 logger.info("Updating: %s" % cmake_kits_path)
92 with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
93 json.dump(cmake_kits, cmake_kits_file, indent=4)
94 else:
95 logger.info("Already up to date: %s" % cmake_kits_path)
96
97 cmake_native = os.path.join(
98 shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
99 if os.path.isfile(cmake_native):
100 logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
101 else:
102 logger.error("Cannot find cmake native at: %s" % cmake_native)
103
104 def dot_code_dir(self, modified_recipe):
105 return os.path.join(modified_recipe.srctree, '.vscode')
106
107 def __vscode_settings_meson(self, settings_dict, modified_recipe):
108 if modified_recipe.build_tool is not BuildTool.MESON:
109 return
110 settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
111
112 confopts = modified_recipe.mesonopts.split()
113 confopts += modified_recipe.meson_cross_file.split()
114 confopts += modified_recipe.extra_oemeson.split()
115 settings_dict["mesonbuild.configureOptions"] = confopts
116 settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
117
118 def __vscode_settings_cmake(self, settings_dict, modified_recipe):
119 """Add cmake specific settings to settings.json.
120
121 Note: most settings are passed to the cmake preset.
122 """
123 if modified_recipe.build_tool is not BuildTool.CMAKE:
124 return
125 settings_dict["cmake.configureOnOpen"] = True
126 settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
127
128 def vscode_settings(self, modified_recipe, image_recipe):
129 files_excludes = {
130 "**/.git/**": True,
131 "**/oe-logs/**": True,
132 "**/oe-workdir/**": True,
133 "**/source-date-epoch/**": True
134 }
135 python_exclude = [
136 "**/.git/**",
137 "**/oe-logs/**",
138 "**/oe-workdir/**",
139 "**/source-date-epoch/**"
140 ]
141 files_readonly = {
142 modified_recipe.recipe_sysroot + '/**': True,
143 modified_recipe.recipe_sysroot_native + '/**': True,
144 }
145 if image_recipe.rootfs_dbg is not None:
146 files_readonly[image_recipe.rootfs_dbg + '/**'] = True
147 settings_dict = {
148 "files.watcherExclude": files_excludes,
149 "files.exclude": files_excludes,
150 "files.readonlyInclude": files_readonly,
151 "python.analysis.exclude": python_exclude
152 }
153 self.__vscode_settings_cmake(settings_dict, modified_recipe)
154 self.__vscode_settings_meson(settings_dict, modified_recipe)
155
156 settings_file = 'settings.json'
157 IdeBase.update_json_file(
158 self.dot_code_dir(modified_recipe), settings_file, settings_dict)
159
160 def __vscode_extensions_cmake(self, modified_recipe, recommendations):
161 if modified_recipe.build_tool is not BuildTool.CMAKE:
162 return
163 recommendations += [
164 "twxs.cmake",
165 "ms-vscode.cmake-tools",
166 "ms-vscode.cpptools",
167 "ms-vscode.cpptools-extension-pack",
168 "ms-vscode.cpptools-themes"
169 ]
170
171 def __vscode_extensions_meson(self, modified_recipe, recommendations):
172 if modified_recipe.build_tool is not BuildTool.MESON:
173 return
174 recommendations += [
175 'mesonbuild.mesonbuild',
176 "ms-vscode.cpptools",
177 "ms-vscode.cpptools-extension-pack",
178 "ms-vscode.cpptools-themes"
179 ]
180
181 def vscode_extensions(self, modified_recipe):
182 recommendations = []
183 self.__vscode_extensions_cmake(modified_recipe, recommendations)
184 self.__vscode_extensions_meson(modified_recipe, recommendations)
185 extensions_file = 'extensions.json'
186 IdeBase.update_json_file(
187 self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
188
189 def vscode_c_cpp_properties(self, modified_recipe):
190 properties_dict = {
191 "name": modified_recipe.recipe_id_pretty,
192 }
193 if modified_recipe.build_tool is BuildTool.CMAKE:
194 properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
195 elif modified_recipe.build_tool is BuildTool.MESON:
196 properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
197 properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
198 else: # no C/C++ build
199 return
200
201 properties_dicts = {
202 "configurations": [
203 properties_dict
204 ],
205 "version": 4
206 }
207 prop_file = 'c_cpp_properties.json'
208 IdeBase.update_json_file(
209 self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
210
211 def vscode_launch_bin_dbg(self, gdb_cross_config):
212 modified_recipe = gdb_cross_config.modified_recipe
213
214 launch_config = {
215 "name": gdb_cross_config.id_pretty,
216 "type": "cppdbg",
217 "request": "launch",
218 "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
219 "stopAtEntry": True,
220 "cwd": "${workspaceFolder}",
221 "environment": [],
222 "externalConsole": False,
223 "MIMode": "gdb",
224 "preLaunchTask": gdb_cross_config.id_pretty,
225 "miDebuggerPath": modified_recipe.gdb_cross.gdb,
226 "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
227 }
228
229 # Search for header files in recipe-sysroot.
230 src_file_map = {
231 "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
232 }
233 # First of all search for not stripped binaries in the image folder.
234 # These binaries are copied (and optionally stripped) by deploy-target
235 setup_commands = [
236 {
237 "description": "sysroot",
238 "text": "set sysroot " + modified_recipe.d
239 }
240 ]
241
242 if gdb_cross_config.image_recipe.rootfs_dbg:
243 launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
244 gdb_cross_config.image_recipe)
245 # First: Search for sources of this recipe in the workspace folder
246 if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
247 src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
248 else:
249 logger.error(
250 "TARGET_DBGSRC_DIR must contain the recipe name PN.")
251 # Second: Search for sources of other recipes in the rootfs-dbg
252 if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
253 src_file_map["/usr/src/debug"] = os.path.join(
254 gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
255 else:
256 logger.error(
257 "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
258 else:
259 logger.warning(
260 "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
261
262 launch_config['sourceFileMap'] = src_file_map
263 launch_config['setupCommands'] = setup_commands
264 return launch_config
265
266 def vscode_launch(self, modified_recipe):
267 """GDB Launch configuration for binaries (elf files)"""
268
269 configurations = []
270 for gdb_cross_config in self.gdb_cross_configs:
271 if gdb_cross_config.modified_recipe is modified_recipe:
272 configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
273 launch_dict = {
274 "version": "0.2.0",
275 "configurations": configurations
276 }
277 launch_file = 'launch.json'
278 IdeBase.update_json_file(
279 self.dot_code_dir(modified_recipe), launch_file, launch_dict)
280
281 def vscode_tasks_cpp(self, args, modified_recipe):
282 run_install_deploy = modified_recipe.gen_install_deploy_script(args)
283 install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
284 tasks_dict = {
285 "version": "2.0.0",
286 "tasks": [
287 {
288 "label": install_task_name,
289 "type": "shell",
290 "command": run_install_deploy,
291 "problemMatcher": []
292 }
293 ]
294 }
295 for gdb_cross_config in self.gdb_cross_configs:
296 if gdb_cross_config.modified_recipe is not modified_recipe:
297 continue
298 tasks_dict['tasks'].append(
299 {
300 "label": gdb_cross_config.id_pretty,
301 "type": "shell",
302 "isBackground": True,
303 "dependsOn": [
304 install_task_name
305 ],
306 "command": gdb_cross_config.gdbserver_script,
307 "problemMatcher": [
308 {
309 "pattern": [
310 {
311 "regexp": ".",
312 "file": 1,
313 "location": 2,
314 "message": 3
315 }
316 ],
317 "background": {
318 "activeOnStart": True,
319 "beginsPattern": ".",
320 "endsPattern": ".",
321 }
322 }
323 ]
324 })
325 tasks_file = 'tasks.json'
326 IdeBase.update_json_file(
327 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
328
329 def vscode_tasks_fallback(self, args, modified_recipe):
330 oe_init_dir = modified_recipe.oe_init_dir
331 oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
332 dt_build = "devtool build "
333 dt_build_label = dt_build + modified_recipe.recipe_id_pretty
334 dt_build_cmd = dt_build + modified_recipe.bpn
335 clean_opt = " --clean"
336 dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
337 dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
338 dt_deploy = "devtool deploy-target "
339 dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
340 dt_deploy_cmd = dt_deploy + modified_recipe.bpn
341 dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
342 deploy_opts = ' '.join(get_devtool_deploy_opts(args))
343 tasks_dict = {
344 "version": "2.0.0",
345 "tasks": [
346 {
347 "label": dt_build_label,
348 "type": "shell",
349 "command": "bash",
350 "linux": {
351 "options": {
352 "cwd": oe_init_dir
353 }
354 },
355 "args": [
356 "--login",
357 "-c",
358 "%s%s" % (oe_init, dt_build_cmd)
359 ],
360 "problemMatcher": []
361 },
362 {
363 "label": dt_deploy_label,
364 "type": "shell",
365 "command": "bash",
366 "linux": {
367 "options": {
368 "cwd": oe_init_dir
369 }
370 },
371 "args": [
372 "--login",
373 "-c",
374 "%s%s %s" % (
375 oe_init, dt_deploy_cmd, deploy_opts)
376 ],
377 "problemMatcher": []
378 },
379 {
380 "label": dt_build_deploy_label,
381 "dependsOrder": "sequence",
382 "dependsOn": [
383 dt_build_label,
384 dt_deploy_label
385 ],
386 "problemMatcher": [],
387 "group": {
388 "kind": "build",
389 "isDefault": True
390 }
391 },
392 {
393 "label": dt_build_clean_label,
394 "type": "shell",
395 "command": "bash",
396 "linux": {
397 "options": {
398 "cwd": oe_init_dir
399 }
400 },
401 "args": [
402 "--login",
403 "-c",
404 "%s%s" % (oe_init, dt_build_clean_cmd)
405 ],
406 "problemMatcher": []
407 }
408 ]
409 }
410 if modified_recipe.gdb_cross:
411 for gdb_cross_config in self.gdb_cross_configs:
412 if gdb_cross_config.modified_recipe is not modified_recipe:
413 continue
414 tasks_dict['tasks'].append(
415 {
416 "label": gdb_cross_config.id_pretty,
417 "type": "shell",
418 "isBackground": True,
419 "dependsOn": [
420 dt_build_deploy_label
421 ],
422 "command": gdb_cross_config.gdbserver_script,
423 "problemMatcher": [
424 {
425 "pattern": [
426 {
427 "regexp": ".",
428 "file": 1,
429 "location": 2,
430 "message": 3
431 }
432 ],
433 "background": {
434 "activeOnStart": True,
435 "beginsPattern": ".",
436 "endsPattern": ".",
437 }
438 }
439 ]
440 })
441 tasks_file = 'tasks.json'
442 IdeBase.update_json_file(
443 self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
444
445 def vscode_tasks(self, args, modified_recipe):
446 if modified_recipe.build_tool.is_c_ccp:
447 self.vscode_tasks_cpp(args, modified_recipe)
448 else:
449 self.vscode_tasks_fallback(args, modified_recipe)
450
451 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
452 self.vscode_settings(modified_recipe, image_recipe)
453 self.vscode_extensions(modified_recipe)
454 self.vscode_c_cpp_properties(modified_recipe)
455 if args.target:
456 self.initialize_gdb_cross_configs(
457 image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
458 self.vscode_launch(modified_recipe)
459 self.vscode_tasks(args, modified_recipe)
460
461
462def register_ide_plugin(ide_plugins):
463 ide_plugins['code'] = IdeVSCode
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py
deleted file mode 100644
index f106c5a026..0000000000
--- a/scripts/lib/devtool/ide_plugins/ide_none.py
+++ /dev/null
@@ -1,53 +0,0 @@
1#
2# Copyright (C) 2023-2024 Siemens AG
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6"""Devtool ide-sdk generic IDE plugin"""
7
8import os
9import logging
10from devtool.ide_plugins import IdeBase, GdbCrossConfig
11
12logger = logging.getLogger('devtool')
13
14
15class IdeNone(IdeBase):
16 """Generate some generic helpers for other IDEs
17
18 Modified recipe mode:
19 Generate some helper scripts for remote debugging with GDB
20
21 Shared sysroot mode:
22 A wrapper for bitbake meta-ide-support and bitbake build-sysroots
23 """
24
25 def __init__(self):
26 super().__init__()
27
28 def setup_shared_sysroots(self, shared_env):
29 real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
30 deploy_dir_image = shared_env.ide_support.deploy_dir_image
31 env_script = os.path.join(
32 deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
33 logger.info(
34 "To use this SDK please source this: %s" % env_script)
35
36 def setup_modified_recipe(self, args, image_recipe, modified_recipe):
37 """generate some helper scripts and config files
38
39 - Execute the do_install task
40 - Execute devtool deploy-target
41 - Generate a gdbinit file per executable
42 - Generate the oe-scripts sym-link
43 """
44 script_path = modified_recipe.gen_install_deploy_script(args)
45 logger.info("Created: %s" % script_path)
46
47 self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
48
49 IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
50
51
52def register_ide_plugin(ide_plugins):
53 ide_plugins['none'] = IdeNone
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
deleted file mode 100755
index f8cf65f4a8..0000000000
--- a/scripts/lib/devtool/ide_sdk.py
+++ /dev/null
@@ -1,1009 +0,0 @@
1# Development tool - ide-sdk command plugin
2#
3# Copyright (C) 2023-2024 Siemens AG
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool ide-sdk plugin"""
8
9import json
10import logging
11import os
12import re
13import shutil
14import stat
15import subprocess
16import sys
17from argparse import RawTextHelpFormatter
18from enum import Enum
19
20import scriptutils
21import bb
22from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe
23from devtool.standard import get_real_srctree
24from devtool.ide_plugins import BuildTool
25
26
27logger = logging.getLogger('devtool')
28
29# dict of classes derived from IdeBase
30ide_plugins = {}
31
32
33class DevtoolIdeMode(Enum):
34 """Different modes are supported by the ide-sdk plugin.
35
36 The enum might be extended by more advanced modes in the future. Some ideas:
37 - auto: modified if all recipes are modified, shared if none of the recipes is modified.
38 - mixed: modified mode for modified recipes, shared mode for all other recipes.
39 """
40
41 modified = 'modified'
42 shared = 'shared'
43
44
45class TargetDevice:
46 """SSH remote login parameters"""
47
48 def __init__(self, args):
49 self.extraoptions = ''
50 if args.no_host_check:
51 self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
52 self.ssh_sshexec = 'ssh'
53 if args.ssh_exec:
54 self.ssh_sshexec = args.ssh_exec
55 self.ssh_port = ''
56 if args.port:
57 self.ssh_port = "-p %s" % args.port
58 if args.key:
59 self.extraoptions += ' -i %s' % args.key
60
61 self.target = args.target
62 target_sp = args.target.split('@')
63 if len(target_sp) == 1:
64 self.login = ""
65 self.host = target_sp[0]
66 elif len(target_sp) == 2:
67 self.login = target_sp[0]
68 self.host = target_sp[1]
69 else:
70 logger.error("Invalid target argument: %s" % args.target)
71
72
73class RecipeNative:
74 """Base class for calling bitbake to provide a -native recipe"""
75
76 def __init__(self, name, target_arch=None):
77 self.name = name
78 self.target_arch = target_arch
79 self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot']
80 self.staging_bindir_native = None
81 self.target_sys = None
82 self.__native_bin = None
83
84 def _initialize(self, config, workspace, tinfoil):
85 """Get the parsed recipe"""
86 recipe_d = parse_recipe(
87 config, tinfoil, self.name, appends=True, filter_workspace=False)
88 if not recipe_d:
89 raise DevtoolError("Parsing %s recipe failed" % self.name)
90 self.staging_bindir_native = os.path.realpath(
91 recipe_d.getVar('STAGING_BINDIR_NATIVE'))
92 self.target_sys = recipe_d.getVar('TARGET_SYS')
93 return recipe_d
94
95 def initialize(self, config, workspace, tinfoil):
96 """Basic initialization that can be overridden by a derived class"""
97 self._initialize(config, workspace, tinfoil)
98
99 @property
100 def native_bin(self):
101 if not self.__native_bin:
102 raise DevtoolError("native binary name is not defined.")
103 return self.__native_bin
104
105
106class RecipeGdbCross(RecipeNative):
107 """Handle handle gdb-cross on the host and the gdbserver on the target device"""
108
109 def __init__(self, args, target_arch, target_device):
110 super().__init__('gdb-cross-' + target_arch, target_arch)
111 self.target_device = target_device
112 self.gdb = None
113 self.gdbserver_port_next = int(args.gdbserver_port_start)
114 self.config_db = {}
115
116 def __find_gdbserver(self, config, tinfoil):
117 """Absolute path of the gdbserver"""
118 recipe_d_gdb = parse_recipe(
119 config, tinfoil, 'gdb', appends=True, filter_workspace=False)
120 if not recipe_d_gdb:
121 raise DevtoolError("Parsing gdb recipe failed")
122 return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver')
123
124 def initialize(self, config, workspace, tinfoil):
125 super()._initialize(config, workspace, tinfoil)
126 gdb_bin = self.target_sys + '-gdb'
127 gdb_path = os.path.join(
128 self.staging_bindir_native, self.target_sys, gdb_bin)
129 self.gdb = gdb_path
130 self.gdbserver_path = self.__find_gdbserver(config, tinfoil)
131
132 @property
133 def host(self):
134 return self.target_device.host
135
136
137class RecipeImage:
138 """Handle some image recipe related properties
139
140 Most workflows require firmware that runs on the target device.
141 This firmware must be consistent with the setup of the host system.
142 In particular, the debug symbols must be compatible. For this, the
143 rootfs must be created as part of the SDK.
144 """
145
146 def __init__(self, name):
147 self.combine_dbg_image = False
148 self.gdbserver_missing = False
149 self.name = name
150 self.rootfs = None
151 self.__rootfs_dbg = None
152 self.bootstrap_tasks = [self.name + ':do_build']
153
154 def initialize(self, config, tinfoil):
155 image_d = parse_recipe(
156 config, tinfoil, self.name, appends=True, filter_workspace=False)
157 if not image_d:
158 raise DevtoolError(
159 "Parsing image recipe %s failed" % self.name)
160
161 self.combine_dbg_image = bb.data.inherits_class(
162 'image-combined-dbg', image_d)
163
164 workdir = image_d.getVar('WORKDIR')
165 self.rootfs = os.path.join(workdir, 'rootfs')
166 if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1":
167 self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
168
169 self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
170 'IMAGE_INSTALL') and 'tools-debug' not in image_d.getVar('IMAGE_FEATURES')
171
172 @property
173 def debug_support(self):
174 return bool(self.rootfs_dbg)
175
176 @property
177 def rootfs_dbg(self):
178 if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg):
179 return self.__rootfs_dbg
180 return None
181
182
183class RecipeMetaIdeSupport:
184 """For the shared sysroots mode meta-ide-support is needed
185
186 For use cases where just a cross tool-chain is required but
187 no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support
188 and bitbake build-sysroots. This also allows to expose the cross-toolchains
189 to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits.
190 """
191
192 def __init__(self):
193 self.bootstrap_tasks = ['meta-ide-support:do_build']
194 self.topdir = None
195 self.datadir = None
196 self.deploy_dir_image = None
197 self.build_sys = None
198 # From toolchain-scripts
199 self.real_multimach_target_sys = None
200
201 def initialize(self, config, tinfoil):
202 meta_ide_support_d = parse_recipe(
203 config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False)
204 if not meta_ide_support_d:
205 raise DevtoolError("Parsing meta-ide-support recipe failed")
206
207 self.topdir = meta_ide_support_d.getVar('TOPDIR')
208 self.datadir = meta_ide_support_d.getVar('datadir')
209 self.deploy_dir_image = meta_ide_support_d.getVar(
210 'DEPLOY_DIR_IMAGE')
211 self.build_sys = meta_ide_support_d.getVar('BUILD_SYS')
212 self.real_multimach_target_sys = meta_ide_support_d.getVar(
213 'REAL_MULTIMACH_TARGET_SYS')
214
215
216class RecipeBuildSysroots:
217 """For the shared sysroots mode build-sysroots is needed"""
218
219 def __init__(self):
220 self.standalone_sysroot = None
221 self.standalone_sysroot_native = None
222 self.bootstrap_tasks = [
223 'build-sysroots:do_build_target_sysroot',
224 'build-sysroots:do_build_native_sysroot'
225 ]
226
227 def initialize(self, config, tinfoil):
228 build_sysroots_d = parse_recipe(
229 config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False)
230 if not build_sysroots_d:
231 raise DevtoolError("Parsing build-sysroots recipe failed")
232 self.standalone_sysroot = build_sysroots_d.getVar(
233 'STANDALONE_SYSROOT')
234 self.standalone_sysroot_native = build_sysroots_d.getVar(
235 'STANDALONE_SYSROOT_NATIVE')
236
237
238class SharedSysrootsEnv:
239 """Handle the shared sysroots based workflow
240
241 Support the workflow with just a tool-chain without a recipe.
242 It's basically like:
243 bitbake some-dependencies
244 bitbake meta-ide-support
245 bitbake build-sysroots
246 Use the environment-* file found in the deploy folder
247 """
248
249 def __init__(self):
250 self.ide_support = None
251 self.build_sysroots = None
252
253 def initialize(self, ide_support, build_sysroots):
254 self.ide_support = ide_support
255 self.build_sysroots = build_sysroots
256
257 def setup_ide(self, ide):
258 ide.setup(self)
259
260
261class RecipeNotModified:
262 """Handling of recipes added to the Direct DSK shared sysroots."""
263
264 def __init__(self, name):
265 self.name = name
266 self.bootstrap_tasks = [name + ':do_populate_sysroot']
267
268
269class RecipeModified:
270 """Handling of recipes in the workspace created by devtool modify"""
271 OE_INIT_BUILD_ENV = 'oe-init-build-env'
272
273 VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$")
274
275 def __init__(self, name):
276 self.name = name
277 self.bootstrap_tasks = [name + ':do_install']
278 self.gdb_cross = None
279 # workspace
280 self.real_srctree = None
281 self.srctree = None
282 self.ide_sdk_dir = None
283 self.ide_sdk_scripts_dir = None
284 self.bbappend = None
285 # recipe variables from d.getVar
286 self.b = None
287 self.base_libdir = None
288 self.bblayers = None
289 self.bpn = None
290 self.d = None
291 self.debug_build = None
292 self.fakerootcmd = None
293 self.fakerootenv = None
294 self.libdir = None
295 self.max_process = None
296 self.package_arch = None
297 self.package_debug_split_style = None
298 self.path = None
299 self.pn = None
300 self.recipe_sysroot = None
301 self.recipe_sysroot_native = None
302 self.staging_incdir = None
303 self.strip_cmd = None
304 self.target_arch = None
305 self.target_dbgsrc_dir = None
306 self.topdir = None
307 self.workdir = None
308 self.recipe_id = None
309 # replicate bitbake build environment
310 self.exported_vars = None
311 self.cmd_compile = None
312 self.__oe_init_dir = None
313 # main build tool used by this recipe
314 self.build_tool = BuildTool.UNDEFINED
315 # build_tool = cmake
316 self.oecmake_generator = None
317 self.cmake_cache_vars = None
318 # build_tool = meson
319 self.meson_buildtype = None
320 self.meson_wrapper = None
321 self.mesonopts = None
322 self.extra_oemeson = None
323 self.meson_cross_file = None
324
325 def initialize(self, config, workspace, tinfoil):
326 recipe_d = parse_recipe(
327 config, tinfoil, self.name, appends=True, filter_workspace=False)
328 if not recipe_d:
329 raise DevtoolError("Parsing %s recipe failed" % self.name)
330
331 # Verify this recipe is built as externalsrc setup by devtool modify
332 workspacepn = check_workspace_recipe(
333 workspace, self.name, bbclassextend=True)
334 self.srctree = workspace[workspacepn]['srctree']
335 # Need to grab this here in case the source is within a subdirectory
336 self.real_srctree = get_real_srctree(
337 self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR'))
338 self.bbappend = workspace[workspacepn]['bbappend']
339
340 self.ide_sdk_dir = os.path.join(
341 config.workspace_path, 'ide-sdk', self.name)
342 if os.path.exists(self.ide_sdk_dir):
343 shutil.rmtree(self.ide_sdk_dir)
344 self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts')
345
346 self.b = recipe_d.getVar('B')
347 self.base_libdir = recipe_d.getVar('base_libdir')
348 self.bblayers = recipe_d.getVar('BBLAYERS').split()
349 self.bpn = recipe_d.getVar('BPN')
350 self.cxx = recipe_d.getVar('CXX')
351 self.d = recipe_d.getVar('D')
352 self.debug_build = recipe_d.getVar('DEBUG_BUILD')
353 self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
354 self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
355 self.libdir = recipe_d.getVar('libdir')
356 self.max_process = int(recipe_d.getVar(
357 "BB_NUMBER_THREADS") or os.cpu_count() or 1)
358 self.package_arch = recipe_d.getVar('PACKAGE_ARCH')
359 self.package_debug_split_style = recipe_d.getVar(
360 'PACKAGE_DEBUG_SPLIT_STYLE')
361 self.path = recipe_d.getVar('PATH')
362 self.pn = recipe_d.getVar('PN')
363 self.recipe_sysroot = os.path.realpath(
364 recipe_d.getVar('RECIPE_SYSROOT'))
365 self.recipe_sysroot_native = os.path.realpath(
366 recipe_d.getVar('RECIPE_SYSROOT_NATIVE'))
367 self.staging_bindir_toolchain = os.path.realpath(
368 recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN'))
369 self.staging_incdir = os.path.realpath(
370 recipe_d.getVar('STAGING_INCDIR'))
371 self.strip_cmd = recipe_d.getVar('STRIP')
372 self.target_arch = recipe_d.getVar('TARGET_ARCH')
373 self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR')
374 self.topdir = recipe_d.getVar('TOPDIR')
375 self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR'))
376
377 self.__init_exported_variables(recipe_d)
378
379 if bb.data.inherits_class('cmake', recipe_d):
380 self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR')
381 self.__init_cmake_preset_cache(recipe_d)
382 self.build_tool = BuildTool.CMAKE
383 elif bb.data.inherits_class('meson', recipe_d):
384 self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE')
385 self.mesonopts = recipe_d.getVar('MESONOPTS')
386 self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON')
387 self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE')
388 self.build_tool = BuildTool.MESON
389
390 # Recipe ID is the identifier for IDE config sections
391 self.recipe_id = self.bpn + "-" + self.package_arch
392 self.recipe_id_pretty = self.bpn + ": " + self.package_arch
393
394 @staticmethod
395 def is_valid_shell_variable(var):
396 """Skip strange shell variables like systemd
397
398 prevent from strange bugs because of strange variables which
399 are not used in this context but break various tools.
400 """
401 if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var):
402 bb.debug(1, "ignoring variable: %s" % var)
403 return True
404 return False
405
406 def solib_search_path(self, image):
407 """Search for debug symbols in the rootfs and rootfs-dbg
408
409 The debug symbols of shared libraries which are provided by other packages
410 are grabbed from the -dbg packages in the rootfs-dbg.
411
412 But most cross debugging tools like gdb, perf, and systemtap need to find
413 executable/library first and through it debuglink note find corresponding
414 symbols file. Therefore the library paths from the rootfs are added as well.
415
416 Note: For the devtool modified recipe compiled from the IDE, the debug
417 symbols are taken from the unstripped binaries in the image folder.
418 Also, devtool deploy-target takes the files from the image folder.
419 debug symbols in the image folder refer to the corresponding source files
420 with absolute paths of the build machine. Debug symbols found in the
421 rootfs-dbg are relocated and contain paths which refer to the source files
422 installed on the target device e.g. /usr/src/...
423 """
424 base_libdir = self.base_libdir.lstrip('/')
425 libdir = self.libdir.lstrip('/')
426 so_paths = [
427 # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug
428 os.path.join(image.rootfs_dbg, base_libdir, ".debug"),
429 os.path.join(image.rootfs_dbg, libdir, ".debug"),
430 # debug symbols for package_debug_split_style: debug-file-directory
431 os.path.join(image.rootfs_dbg, "usr", "lib", "debug"),
432
433 # The binaries are required as well, the debug packages are not enough
434 # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg
435 os.path.join(image.rootfs_dbg, base_libdir),
436 os.path.join(image.rootfs_dbg, libdir),
437 # Without image-combined-dbg.bbclass the binaries are only in rootfs.
438 # Note: Stepping into source files located in rootfs-dbg does not
439 # work without image-combined-dbg.bbclass yet.
440 os.path.join(image.rootfs, base_libdir),
441 os.path.join(image.rootfs, libdir)
442 ]
443 return so_paths
444
445 def solib_search_path_str(self, image):
446 """Return a : separated list of paths usable by GDB's set solib-search-path"""
447 return ':'.join(self.solib_search_path(image))
448
449 def __init_exported_variables(self, d):
450 """Find all variables with export flag set.
451
452 This allows to generate IDE configurations which compile with the same
453 environment as bitbake does. That's at least a reasonable default behavior.
454 """
455 exported_vars = {}
456
457 vars = (key for key in d.keys() if not key.startswith(
458 "__") and not d.getVarFlag(key, "func", False))
459 for var in sorted(vars):
460 func = d.getVarFlag(var, "func", False)
461 if d.getVarFlag(var, 'python', False) and func:
462 continue
463 export = d.getVarFlag(var, "export", False)
464 unexport = d.getVarFlag(var, "unexport", False)
465 if not export and not unexport and not func:
466 continue
467 if unexport:
468 continue
469
470 val = d.getVar(var)
471 if val is None:
472 continue
473 if set(var) & set("-.{}+"):
474 logger.warn(
475 "Warning: Found invalid character in variable name %s", str(var))
476 continue
477 varExpanded = d.expand(var)
478 val = str(val)
479
480 if not RecipeModified.is_valid_shell_variable(varExpanded):
481 continue
482
483 if func:
484 code_line = "line: {0}, file: {1}\n".format(
485 d.getVarFlag(var, "lineno", False),
486 d.getVarFlag(var, "filename", False))
487 val = val.rstrip('\n')
488 logger.warn("Warning: exported shell function %s() is not exported (%s)" %
489 (varExpanded, code_line))
490 continue
491
492 if export:
493 exported_vars[varExpanded] = val.strip()
494 continue
495
496 self.exported_vars = exported_vars
497
498 def __init_cmake_preset_cache(self, d):
499 """Get the arguments passed to cmake
500
501 Replicate the cmake configure arguments with all details to
502 share on build folder between bitbake and SDK.
503 """
504 site_file = os.path.join(self.workdir, 'site-file.cmake')
505 if os.path.exists(site_file):
506 print("Warning: site-file.cmake is not supported")
507
508 cache_vars = {}
509 oecmake_args = d.getVar('OECMAKE_ARGS').split()
510 extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
511 for param in sorted(oecmake_args + extra_oecmake):
512 d_pref = "-D"
513 if param.startswith(d_pref):
514 param = param[len(d_pref):]
515 else:
516 print("Error: expected a -D")
517 param_s = param.split('=', 1)
518 param_nt = param_s[0].split(':', 1)
519
520 def handle_undefined_variable(var):
521 if var.startswith('${') and var.endswith('}'):
522 return ''
523 else:
524 return var
525 # Example: FOO=ON
526 if len(param_nt) == 1:
527 cache_vars[param_s[0]] = handle_undefined_variable(param_s[1])
528 # Example: FOO:PATH=/tmp
529 elif len(param_nt) == 2:
530 cache_vars[param_nt[0]] = {
531 "type": param_nt[1],
532 "value": handle_undefined_variable(param_s[1]),
533 }
534 else:
535 print("Error: cannot parse %s" % param)
536 self.cmake_cache_vars = cache_vars
537
538 def cmake_preset(self):
539 """Create a preset for cmake that mimics how bitbake calls cmake"""
540 toolchain_file = os.path.join(self.workdir, 'toolchain.cmake')
541 cmake_executable = os.path.join(
542 self.recipe_sysroot_native, 'usr', 'bin', 'cmake')
543 self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id
544
545 preset_dict_configure = {
546 "name": self.recipe_id,
547 "displayName": self.recipe_id_pretty,
548 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
549 "binaryDir": self.b,
550 "generator": self.oecmake_generator,
551 "toolchainFile": toolchain_file,
552 "cacheVariables": self.cmake_cache_vars,
553 "environment": self.exported_vars,
554 "cmakeExecutable": cmake_executable
555 }
556
557 preset_dict_build = {
558 "name": self.recipe_id,
559 "displayName": self.recipe_id_pretty,
560 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
561 "configurePreset": self.recipe_id,
562 "inheritConfigureEnvironment": True
563 }
564
565 preset_dict_test = {
566 "name": self.recipe_id,
567 "displayName": self.recipe_id_pretty,
568 "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
569 "configurePreset": self.recipe_id,
570 "inheritConfigureEnvironment": True
571 }
572
573 preset_dict = {
574 "version": 3, # cmake 3.21, backward compatible with kirkstone
575 "configurePresets": [preset_dict_configure],
576 "buildPresets": [preset_dict_build],
577 "testPresets": [preset_dict_test]
578 }
579
580 # Finally write the json file
581 json_file = 'CMakeUserPresets.json'
582 json_path = os.path.join(self.real_srctree, json_file)
583 logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path))
584 if not os.path.exists(self.real_srctree):
585 os.makedirs(self.real_srctree)
586 try:
587 with open(json_path) as f:
588 orig_dict = json.load(f)
589 except json.decoder.JSONDecodeError:
590 logger.info(
591 "Decoding %s failed. Probably because of comments in the json file" % json_path)
592 orig_dict = {}
593 except FileNotFoundError:
594 orig_dict = {}
595
596 # Add or update the presets for the recipe and keep other presets
597 for k, v in preset_dict.items():
598 if isinstance(v, list):
599 update_preset = v[0]
600 preset_added = False
601 if k in orig_dict:
602 for index, orig_preset in enumerate(orig_dict[k]):
603 if 'name' in orig_preset:
604 if orig_preset['name'] == update_preset['name']:
605 logger.debug("Updating preset: %s" %
606 orig_preset['name'])
607 orig_dict[k][index] = update_preset
608 preset_added = True
609 break
610 else:
611 logger.debug("keeping preset: %s" %
612 orig_preset['name'])
613 else:
614 logger.warn("preset without a name found")
615 if not preset_added:
616 if not k in orig_dict:
617 orig_dict[k] = []
618 orig_dict[k].append(update_preset)
619 logger.debug("Added preset: %s" %
620 update_preset['name'])
621 else:
622 orig_dict[k] = v
623
624 with open(json_path, 'w') as f:
625 json.dump(orig_dict, f, indent=4)
626
627 def gen_meson_wrapper(self):
628 """Generate a wrapper script to call meson with the cross environment"""
629 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
630 meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson')
631 meson_real = os.path.join(
632 self.recipe_sysroot_native, 'usr', 'bin', 'meson.real')
633 with open(meson_wrapper, 'w') as mwrap:
634 mwrap.write("#!/bin/sh" + os.linesep)
635 for var, val in self.exported_vars.items():
636 mwrap.write('export %s="%s"' % (var, val) + os.linesep)
637 mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep)
638 private_temp = os.path.join(self.b, "meson-private", "tmp")
639 mwrap.write('mkdir -p "%s"' % private_temp + os.linesep)
640 mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep)
641 mwrap.write('exec "%s" "$@"' % meson_real + os.linesep)
642 st = os.stat(meson_wrapper)
643 os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC)
644 self.meson_wrapper = meson_wrapper
645 self.cmd_compile = meson_wrapper + " compile -C " + self.b
646
647 def which(self, executable):
648 bin_path = shutil.which(executable, path=self.path)
649 if not bin_path:
650 raise DevtoolError(
651 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn))
652 return bin_path
653
654 @staticmethod
655 def is_elf_file(file_path):
656 with open(file_path, "rb") as f:
657 data = f.read(4)
658 if data == b'\x7fELF':
659 return True
660 return False
661
662 def find_installed_binaries(self):
663 """find all executable elf files in the image directory"""
664 binaries = []
665 d_len = len(self.d)
666 re_so = re.compile(r'.*\.so[.0-9]*$')
667 for root, _, files in os.walk(self.d, followlinks=False):
668 for file in files:
669 if os.path.islink(file):
670 continue
671 if re_so.match(file):
672 continue
673 abs_name = os.path.join(root, file)
674 if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name):
675 binaries.append(abs_name[d_len:])
676 return sorted(binaries)
677
678 def gen_deploy_target_script(self, args):
679 """Generate a script which does what devtool deploy-target does
680
681 This script is much quicker than devtool target-deploy. Because it
682 does not need to start a bitbake server. All information from tinfoil
683 is hard-coded in the generated script.
684 """
685 cmd_lines = ['#!%s' % str(sys.executable)]
686 cmd_lines.append('import sys')
687 cmd_lines.append('devtool_sys_path = %s' % str(sys.path))
688 cmd_lines.append('devtool_sys_path.reverse()')
689 cmd_lines.append('for p in devtool_sys_path:')
690 cmd_lines.append(' if p not in sys.path:')
691 cmd_lines.append(' sys.path.insert(0, p)')
692 cmd_lines.append('from devtool.deploy import deploy_no_d')
693 args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check',
694 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target']
695 filtered_args_dict = {key: value for key, value in vars(
696 args).items() if key in args_filter}
697 cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict))
698 cmd_lines.append('class Dict2Class(object):')
699 cmd_lines.append(' def __init__(self, my_dict):')
700 cmd_lines.append(' for key in my_dict:')
701 cmd_lines.append(' setattr(self, key, my_dict[key])')
702 cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)')
703 cmd_lines.append(
704 'setattr(filtered_args, "recipename", "%s")' % self.bpn)
705 cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' %
706 (self.d, self.workdir, self.path, self.strip_cmd,
707 self.libdir, self.base_libdir, self.max_process,
708 self.fakerootcmd, self.fakerootenv))
709 return self.write_script(cmd_lines, 'deploy_target')
710
711 def gen_install_deploy_script(self, args):
712 """Generate a script which does install and deploy"""
713 cmd_lines = ['#!/bin/bash']
714
715 # . oe-init-build-env $BUILDDIR
716 # Note: Sourcing scripts with arguments requires bash
717 cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
718 self.oe_init_dir, self.oe_init_dir))
719 cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % (
720 self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir))
721
722 # bitbake -c install
723 cmd_lines.append(
724 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn))
725
726 # Self contained devtool deploy-target
727 cmd_lines.append(self.gen_deploy_target_script(args))
728
729 return self.write_script(cmd_lines, 'install_and_deploy')
730
731 def write_script(self, cmd_lines, script_name):
732 bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
733 script_name_arch = script_name + '_' + self.recipe_id
734 script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch)
735 with open(script_file, 'w') as script_f:
736 script_f.write(os.linesep.join(cmd_lines))
737 st = os.stat(script_file)
738 os.chmod(script_file, st.st_mode | stat.S_IEXEC)
739 return script_file
740
741 @property
742 def oe_init_build_env(self):
743 """Find the oe-init-build-env used for this setup"""
744 oe_init_dir = self.oe_init_dir
745 if oe_init_dir:
746 return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
747 return None
748
749 @property
750 def oe_init_dir(self):
751 """Find the directory where the oe-init-build-env is located
752
753 Assumption: There might be a layer with higher priority than poky
754 which provides to oe-init-build-env in the layer's toplevel folder.
755 """
756 if not self.__oe_init_dir:
757 for layer in reversed(self.bblayers):
758 result = subprocess.run(
759 ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True)
760 if result.returncode == 0:
761 oe_init_dir = result.stdout.decode('utf-8').strip()
762 oe_init_path = os.path.join(
763 oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
764 if os.path.exists(oe_init_path):
765 logger.debug("Using %s from: %s" % (
766 RecipeModified.OE_INIT_BUILD_ENV, oe_init_path))
767 self.__oe_init_dir = oe_init_dir
768 break
769 if not self.__oe_init_dir:
770 logger.error("Cannot find the bitbake top level folder")
771 return self.__oe_init_dir
772
773
774def ide_setup(args, config, basepath, workspace):
775 """Generate the IDE configuration for the workspace"""
776
777 # Explicitely passing some special recipes does not make sense
778 for recipe in args.recipenames:
779 if recipe in ['meta-ide-support', 'build-sysroots']:
780 raise DevtoolError("Invalid recipe: %s." % recipe)
781
782 # Collect information about tasks which need to be bitbaked
783 bootstrap_tasks = []
784 bootstrap_tasks_late = []
785 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
786 try:
787 # define mode depending on recipes which need to be processed
788 recipes_image_names = []
789 recipes_modified_names = []
790 recipes_other_names = []
791 for recipe in args.recipenames:
792 try:
793 check_workspace_recipe(
794 workspace, recipe, bbclassextend=True)
795 recipes_modified_names.append(recipe)
796 except DevtoolError:
797 recipe_d = parse_recipe(
798 config, tinfoil, recipe, appends=True, filter_workspace=False)
799 if not recipe_d:
800 raise DevtoolError("Parsing recipe %s failed" % recipe)
801 if bb.data.inherits_class('image', recipe_d):
802 recipes_image_names.append(recipe)
803 else:
804 recipes_other_names.append(recipe)
805
806 invalid_params = False
807 if args.mode == DevtoolIdeMode.shared:
808 if len(recipes_modified_names):
809 logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str(
810 recipes_modified_names))
811 invalid_params = True
812 if args.mode == DevtoolIdeMode.modified:
813 if len(recipes_other_names):
814 logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str(
815 recipes_other_names))
816 invalid_params = True
817 if len(recipes_image_names) != 1:
818 logger.error(
819 "One image recipe is required as the rootfs for the remote development.")
820 invalid_params = True
821 for modified_recipe_name in recipes_modified_names:
822 if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'):
823 logger.error(
824 "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name)
825 invalid_params = True
826
827 if invalid_params:
828 raise DevtoolError("Invalid parameters are passed.")
829
830 # For the shared sysroots mode, add all dependencies of all the images to the sysroots
831 # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg
832 recipes_images = []
833 for recipes_image_name in recipes_image_names:
834 logger.info("Using image: %s" % recipes_image_name)
835 recipe_image = RecipeImage(recipes_image_name)
836 recipe_image.initialize(config, tinfoil)
837 bootstrap_tasks += recipe_image.bootstrap_tasks
838 recipes_images.append(recipe_image)
839
840 # Provide a Direct SDK with shared sysroots
841 recipes_not_modified = []
842 if args.mode == DevtoolIdeMode.shared:
843 ide_support = RecipeMetaIdeSupport()
844 ide_support.initialize(config, tinfoil)
845 bootstrap_tasks += ide_support.bootstrap_tasks
846
847 logger.info("Adding %s to the Direct SDK sysroots." %
848 str(recipes_other_names))
849 for recipe_name in recipes_other_names:
850 recipe_not_modified = RecipeNotModified(recipe_name)
851 bootstrap_tasks += recipe_not_modified.bootstrap_tasks
852 recipes_not_modified.append(recipe_not_modified)
853
854 build_sysroots = RecipeBuildSysroots()
855 build_sysroots.initialize(config, tinfoil)
856 bootstrap_tasks_late += build_sysroots.bootstrap_tasks
857 shared_env = SharedSysrootsEnv()
858 shared_env.initialize(ide_support, build_sysroots)
859
860 recipes_modified = []
861 if args.mode == DevtoolIdeMode.modified:
862 logger.info("Setting up workspaces for modified recipe: %s" %
863 str(recipes_modified_names))
864 gdbs_cross = {}
865 for recipe_name in recipes_modified_names:
866 recipe_modified = RecipeModified(recipe_name)
867 recipe_modified.initialize(config, workspace, tinfoil)
868 bootstrap_tasks += recipe_modified.bootstrap_tasks
869 recipes_modified.append(recipe_modified)
870
871 if recipe_modified.target_arch not in gdbs_cross:
872 target_device = TargetDevice(args)
873 gdb_cross = RecipeGdbCross(
874 args, recipe_modified.target_arch, target_device)
875 gdb_cross.initialize(config, workspace, tinfoil)
876 bootstrap_tasks += gdb_cross.bootstrap_tasks
877 gdbs_cross[recipe_modified.target_arch] = gdb_cross
878 recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch]
879
880 finally:
881 tinfoil.shutdown()
882
883 if not args.skip_bitbake:
884 bb_cmd = 'bitbake '
885 if args.bitbake_k:
886 bb_cmd += "-k "
887 bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks)
888 exec_build_env_command(
889 config.init_path, basepath, bb_cmd_early, watch=True)
890 if bootstrap_tasks_late:
891 bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late)
892 exec_build_env_command(
893 config.init_path, basepath, bb_cmd_late, watch=True)
894
895 for recipe_image in recipes_images:
896 if (recipe_image.gdbserver_missing):
897 logger.warning(
898 "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image)
899
900 if recipe_image.combine_dbg_image is False:
901 logger.warning(
902 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image)
903
904 # Instantiate the active IDE plugin
905 ide = ide_plugins[args.ide]()
906 if args.mode == DevtoolIdeMode.shared:
907 ide.setup_shared_sysroots(shared_env)
908 elif args.mode == DevtoolIdeMode.modified:
909 for recipe_modified in recipes_modified:
910 if recipe_modified.build_tool is BuildTool.CMAKE:
911 recipe_modified.cmake_preset()
912 if recipe_modified.build_tool is BuildTool.MESON:
913 recipe_modified.gen_meson_wrapper()
914 ide.setup_modified_recipe(
915 args, recipe_image, recipe_modified)
916
917 if recipe_modified.debug_build != '1':
918 logger.warn(
919 'Recipe %s is compiled with release build configuration. '
920 'You might want to add DEBUG_BUILD = "1" to %s. '
921 'Note that devtool modify --debug-build can do this automatically.',
922 recipe_modified.name, recipe_modified.bbappend)
923 else:
924 raise DevtoolError("Must not end up here.")
925
926
927def register_commands(subparsers, context):
928 """Register devtool subcommands from this plugin"""
929
930 # The ide-sdk command bootstraps the SDK from the bitbake environment before the IDE
931 # configuration is generated. In the case of the eSDK, the bootstrapping is performed
932 # during the installation of the eSDK installer. Running the ide-sdk plugin from an
933 # eSDK installer-based setup would require skipping the bootstrapping and probably
934 # taking some other differences into account when generating the IDE configurations.
935 # This would be possible. But it is not implemented.
936 if context.fixed_setup:
937 return
938
939 global ide_plugins
940
941 # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
942 pluginpaths = [os.path.join(path, 'ide_plugins')
943 for path in context.pluginpaths]
944 ide_plugin_modules = []
945 for pluginpath in pluginpaths:
946 scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath)
947
948 for ide_plugin_module in ide_plugin_modules:
949 if hasattr(ide_plugin_module, 'register_ide_plugin'):
950 ide_plugin_module.register_ide_plugin(ide_plugins)
951 # Sort plugins according to their priority. The first entry is the default IDE plugin.
952 ide_plugins = dict(sorted(ide_plugins.items(),
953 key=lambda p: p[1].ide_plugin_priority(), reverse=True))
954
955 parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter,
956 help='Setup the SDK and configure the IDE')
957 parser_ide_sdk.add_argument(
958 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
959 'Depending on the --mode parameter different types of SDKs and IDE configurations are generated.')
960 parser_ide_sdk.add_argument(
961 '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
962 help='Different SDK types are supported:\n'
963 '- "' + DevtoolIdeMode.modified.name + '" (default):\n'
964 ' devtool modify creates a workspace to work on the source code of a recipe.\n'
965 ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n'
966 ' Usage example:\n'
967 ' devtool modify cmake-example\n'
968 ' devtool ide-sdk cmake-example core-image-minimal\n'
969 ' Start the IDE in the workspace folder\n'
970 ' At least one devtool modified recipe plus one image recipe are required:\n'
971 ' The image recipe is used to generate the target image and the remote debug configuration.\n'
972 '- "' + DevtoolIdeMode.shared.name + '":\n'
973 ' Usage example:\n'
974 ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n'
975 ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n'
976 ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n'
977 ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit')
978 default_ide = list(ide_plugins.keys())[0]
979 parser_ide_sdk.add_argument(
980 '-i', '--ide', choices=ide_plugins.keys(), default=default_ide,
981 help='Setup the configuration for this IDE (default: %s)' % default_ide)
982 parser_ide_sdk.add_argument(
983 '-t', '--target', default='root@192.168.7.2',
984 help='Live target machine running an ssh server: user@hostname.')
985 parser_ide_sdk.add_argument(
986 '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.')
987 parser_ide_sdk.add_argument(
988 '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
989 parser_ide_sdk.add_argument(
990 '-e', '--ssh-exec', help='Executable to use in place of ssh')
991 parser_ide_sdk.add_argument(
992 '-P', '--port', help='Specify ssh port to use for connection to the target')
993 parser_ide_sdk.add_argument(
994 '-I', '--key', help='Specify ssh private key for connection to the target')
995 parser_ide_sdk.add_argument(
996 '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true')
997 parser_ide_sdk.add_argument(
998 '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true')
999 parser_ide_sdk.add_argument(
1000 '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
1001 parser_ide_sdk.add_argument(
1002 '-n', '--dry-run', help='List files to be undeployed only', action='store_true')
1003 parser_ide_sdk.add_argument(
1004 '-s', '--show-status', help='Show progress/status output', action='store_true')
1005 parser_ide_sdk.add_argument(
1006 '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
1007 parser_ide_sdk.add_argument(
1008 '--no-check-space', help='Do not check for available space before deploying', action='store_true')
1009 parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/import.py b/scripts/lib/devtool/import.py
deleted file mode 100644
index 6829851669..0000000000
--- a/scripts/lib/devtool/import.py
+++ /dev/null
@@ -1,134 +0,0 @@
1# Development tool - import command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool import plugin"""
8
9import os
10import tarfile
11import logging
12import collections
13import json
14import fnmatch
15
16from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
17from devtool import export
18
19logger = logging.getLogger('devtool')
20
21def devimport(args, config, basepath, workspace):
22 """Entry point for the devtool 'import' subcommand"""
23
24 def get_pn(name):
25 """ Returns the filename of a workspace recipe/append"""
26 metadata = name.split('/')[-1]
27 fn, _ = os.path.splitext(metadata)
28 return fn
29
30 if not os.path.exists(args.file):
31 raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
32
33 with tarfile.open(args.file) as tar:
34 # Get exported metadata
35 export_workspace_path = export_workspace = None
36 try:
37 metadata = tar.getmember(export.metadata)
38 except KeyError as ke:
39 raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
40
41 tar.extract(metadata)
42 with open(metadata.name) as fdm:
43 export_workspace_path, export_workspace = json.load(fdm)
44 os.unlink(metadata.name)
45
46 members = tar.getmembers()
47
48 # Get appends and recipes from the exported archive, these
49 # will be needed to find out those appends without corresponding
50 # recipe pair
51 append_fns, recipe_fns = set(), set()
52 for member in members:
53 if member.name.startswith('appends'):
54 append_fns.add(get_pn(member.name))
55 elif member.name.startswith('recipes'):
56 recipe_fns.add(get_pn(member.name))
57
58 # Setup tinfoil, get required data and shutdown
59 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
60 try:
61 current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
62 finally:
63 tinfoil.shutdown()
64
65 # Find those appends that do not have recipes in current metadata
66 non_importables = []
67 for fn in append_fns - recipe_fns:
68 # Check on current metadata (covering those layers indicated in bblayers.conf)
69 for current_fn in current_fns:
70 if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
71 break
72 else:
73 non_importables.append(fn)
74 logger.warning('No recipe to append %s.bbapppend, skipping' % fn)
75
76 # Extract
77 imported = []
78 for member in members:
79 if member.name == export.metadata:
80 continue
81
82 for nonimp in non_importables:
83 pn = nonimp.split('_')[0]
84 # do not extract data from non-importable recipes or metadata
85 if member.name.startswith('appends/%s' % nonimp) or \
86 member.name.startswith('recipes/%s' % nonimp) or \
87 member.name.startswith('sources/%s' % pn):
88 break
89 else:
90 path = os.path.join(config.workspace_path, member.name)
91 if os.path.exists(path):
92 # by default, no file overwrite is done unless -o is given by the user
93 if args.overwrite:
94 try:
95 tar.extract(member, path=config.workspace_path)
96 except PermissionError as pe:
97 logger.warning(pe)
98 else:
99 logger.warning('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
100 continue
101 else:
102 tar.extract(member, path=config.workspace_path)
103
104 # Update EXTERNALSRC and the devtool md5 file
105 if member.name.startswith('appends'):
106 if export_workspace_path:
107 # appends created by 'devtool modify' just need to update the workspace
108 replace_from_file(path, export_workspace_path, config.workspace_path)
109
110 # appends created by 'devtool add' need replacement of exported source tree
111 pn = get_pn(member.name).split('_')[0]
112 exported_srctree = export_workspace[pn]['srctree']
113 if exported_srctree:
114 replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
115
116 standard._add_md5(config, pn, path)
117 imported.append(pn)
118
119 if imported:
120 logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
121 else:
122 logger.warning('No recipes imported into the workspace')
123
124 return 0
125
126def register_commands(subparsers, context):
127 """Register devtool import subcommands"""
128 parser = subparsers.add_parser('import',
129 help='Import exported tar archive into workspace',
130 description='Import tar archive previously created by "devtool export" into workspace',
131 group='advanced')
132 parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
133 parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
134 parser.set_defaults(func=devimport)
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
deleted file mode 100644
index 1054960551..0000000000
--- a/scripts/lib/devtool/menuconfig.py
+++ /dev/null
@@ -1,76 +0,0 @@
1# OpenEmbedded Development tool - menuconfig command plugin
2#
3# Copyright (C) 2018 Xilinx
4# Written by: Chandana Kalluri <ckalluri@xilinx.com>
5#
6# SPDX-License-Identifier: MIT
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21"""Devtool menuconfig plugin"""
22
23import os
24import bb
25import logging
26from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
27from devtool import check_workspace_recipe
28logger = logging.getLogger('devtool')
29
30def menuconfig(args, config, basepath, workspace):
31 """Entry point for the devtool 'menuconfig' subcommand"""
32
33 rd = ""
34 pn_src = ""
35 localfilesdir = ""
36 workspace_dir = ""
37 tinfoil = setup_tinfoil(basepath=basepath)
38 try:
39 rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
40 if not rd:
41 return 1
42
43 check_workspace_recipe(workspace, args.component)
44 pn = rd.getVar('PN')
45
46 if not rd.getVarFlag('do_menuconfig','task'):
47 raise DevtoolError("This recipe does not support menuconfig option")
48
49 workspace_dir = os.path.join(config.workspace_path,'sources')
50 pn_src = os.path.join(workspace_dir,pn)
51
52 # add check to see if oe_local_files exists or not
53 localfilesdir = os.path.join(pn_src,'oe-local-files')
54 if not os.path.exists(localfilesdir):
55 bb.utils.mkdirhier(localfilesdir)
56 # Add gitignore to ensure source tree is clean
57 gitignorefile = os.path.join(localfilesdir,'.gitignore')
58 with open(gitignorefile, 'w') as f:
59 f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
60 f.write('*\n')
61
62 finally:
63 tinfoil.shutdown()
64
65 logger.info('Launching menuconfig')
66 exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
67 fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
68 standard._create_kconfig_diff(pn_src,rd,fragment)
69
70 return 0
71
72def register_commands(subparsers, context):
73 """register devtool subcommands from this plugin"""
74 parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
75 parser_menuconfig.add_argument('component', help='compenent to alter config')
76 parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)
diff --git a/scripts/lib/devtool/package.py b/scripts/lib/devtool/package.py
deleted file mode 100644
index c2367342c3..0000000000
--- a/scripts/lib/devtool/package.py
+++ /dev/null
@@ -1,50 +0,0 @@
1# Development tool - package command plugin
2#
3# Copyright (C) 2014-2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool plugin containing the package subcommands"""
8
9import os
10import subprocess
11import logging
12from bb.process import ExecutionError
13from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
14
15logger = logging.getLogger('devtool')
16
17def package(args, config, basepath, workspace):
18 """Entry point for the devtool 'package' subcommand"""
19 check_workspace_recipe(workspace, args.recipename)
20
21 tinfoil = setup_tinfoil(basepath=basepath, config_only=True)
22 try:
23 image_pkgtype = config.get('Package', 'image_pkgtype', '')
24 if not image_pkgtype:
25 image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
26
27 deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
28 finally:
29 tinfoil.shutdown()
30
31 package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
32 try:
33 exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
34 except bb.process.ExecutionError as e:
35 # We've already seen the output since watch=True, so just ensure we return something to the user
36 return e.exitcode
37
38 logger.info('Your packages are in %s' % deploy_dir_pkg)
39
40 return 0
41
42def register_commands(subparsers, context):
43 """Register devtool subcommands from the package plugin"""
44 if context.fixed_setup:
45 parser_package = subparsers.add_parser('package',
46 help='Build packages for a recipe',
47 description='Builds packages for a recipe\'s output files',
48 group='testbuild', order=-5)
49 parser_package.add_argument('recipename', help='Recipe to package')
50 parser_package.set_defaults(func=package)
diff --git a/scripts/lib/devtool/runqemu.py b/scripts/lib/devtool/runqemu.py
deleted file mode 100644
index ead978aabc..0000000000
--- a/scripts/lib/devtool/runqemu.py
+++ /dev/null
@@ -1,64 +0,0 @@
1# Development tool - runqemu command plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool runqemu plugin"""
9
10import os
11import bb
12import logging
13import argparse
14import glob
15from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19def runqemu(args, config, basepath, workspace):
20 """Entry point for the devtool 'runqemu' subcommand"""
21
22 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
23 try:
24 machine = tinfoil.config_data.getVar('MACHINE')
25 bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'),
26 tinfoil.config_data.getVar('BUILD_ARCH'),
27 tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep))
28 finally:
29 tinfoil.shutdown()
30
31 if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
32 raise DevtoolError('QEMU is not available within this SDK')
33
34 imagename = args.imagename
35 if not imagename:
36 sdk_targets = config.get('SDK', 'sdk_targets', '').split()
37 if sdk_targets:
38 imagename = sdk_targets[0]
39 if not imagename:
40 raise DevtoolError('Unable to determine image name to run, please specify one')
41
42 try:
43 # FIXME runqemu assumes that if OECORE_NATIVE_SYSROOT is set then it shouldn't
44 # run bitbake to find out the values of various environment variables, which
45 # isn't the case for the extensible SDK. Work around it for now.
46 newenv = dict(os.environ)
47 newenv.pop('OECORE_NATIVE_SYSROOT', '')
48 exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True, env=newenv)
49 except bb.process.ExecutionError as e:
50 # We've already seen the output since watch=True, so just ensure we return something to the user
51 return e.exitcode
52
53 return 0
54
55def register_commands(subparsers, context):
56 """Register devtool subcommands from this plugin"""
57 if context.fixed_setup:
58 parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
59 description='Runs QEMU to boot the specified image',
60 group='testbuild', order=-20)
61 parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
62 parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
63 nargs=argparse.REMAINDER)
64 parser_runqemu.set_defaults(func=runqemu)
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
deleted file mode 100644
index 9aefd7e354..0000000000
--- a/scripts/lib/devtool/sdk.py
+++ /dev/null
@@ -1,330 +0,0 @@
1# Development tool - sdk-update command plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import subprocess
10import logging
11import glob
12import shutil
13import errno
14import sys
15import tempfile
16import re
17from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
18
19logger = logging.getLogger('devtool')
20
21def parse_locked_sigs(sigfile_path):
22 """Return <pn:task>:<hash> dictionary"""
23 sig_dict = {}
24 with open(sigfile_path) as f:
25 lines = f.readlines()
26 for line in lines:
27 if ':' in line:
28 taskkey, _, hashval = line.rpartition(':')
29 sig_dict[taskkey.strip()] = hashval.split()[0]
30 return sig_dict
31
32def generate_update_dict(sigfile_new, sigfile_old):
33 """Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
34 update_dict = {}
35 sigdict_new = parse_locked_sigs(sigfile_new)
36 sigdict_old = parse_locked_sigs(sigfile_old)
37 for k in sigdict_new:
38 if k not in sigdict_old:
39 update_dict[k] = sigdict_new[k]
40 continue
41 if sigdict_new[k] != sigdict_old[k]:
42 update_dict[k] = sigdict_new[k]
43 continue
44 return update_dict
45
46def get_sstate_objects(update_dict, sstate_dir):
47 """Return a list containing sstate objects which are to be installed"""
48 sstate_objects = []
49 for k in update_dict:
50 files = set()
51 hashval = update_dict[k]
52 p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
53 files |= set(glob.glob(p))
54 p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
55 files |= set(glob.glob(p))
56 files = list(files)
57 if len(files) == 1:
58 sstate_objects.extend(files)
59 elif len(files) > 1:
60 logger.error("More than one matching sstate object found for %s" % hashval)
61
62 return sstate_objects
63
64def mkdir(d):
65 try:
66 os.makedirs(d)
67 except OSError as e:
68 if e.errno != errno.EEXIST:
69 raise e
70
71def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
72 """Install sstate objects into destination SDK"""
73 sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
74 if not os.path.exists(sstate_dir):
75 logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
76 raise
77 for sb in sstate_objects:
78 dst = sb.replace(src_sdk, dest_sdk)
79 destdir = os.path.dirname(dst)
80 mkdir(destdir)
81 logger.debug("Copying %s to %s" % (sb, dst))
82 shutil.copy(sb, dst)
83
84def check_manifest(fn, basepath):
85 import bb.utils
86 changedfiles = []
87 with open(fn, 'r') as f:
88 for line in f:
89 splitline = line.split()
90 if len(splitline) > 1:
91 chksum = splitline[0]
92 fpath = splitline[1]
93 curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
94 if chksum != curr_chksum:
95 logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
96 changedfiles.append(fpath)
97 return changedfiles
98
99def sdk_update(args, config, basepath, workspace):
100 """Entry point for devtool sdk-update command"""
101 updateserver = args.updateserver
102 if not updateserver:
103 updateserver = config.get('SDK', 'updateserver', '')
104 logger.debug("updateserver: %s" % updateserver)
105
106 # Make sure we are using sdk-update from within SDK
107 logger.debug("basepath = %s" % basepath)
108 old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
109 if not os.path.exists(old_locked_sig_file_path):
110 logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
111 return -1
112 else:
113 logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
114
115 if not '://' in updateserver:
116 logger.error("Update server must be a URL")
117 return -1
118
119 layers_dir = os.path.join(basepath, 'layers')
120 conf_dir = os.path.join(basepath, 'conf')
121
122 # Grab variable values
123 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
124 try:
125 stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR')
126 sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS')
127 site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION')
128 finally:
129 tinfoil.shutdown()
130
131 tmpsdk_dir = tempfile.mkdtemp()
132 try:
133 os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
134 new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
135 # Fetch manifest from server
136 tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
137 ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
138 if ret != 0:
139 logger.error("Cannot dowload files from %s" % updateserver)
140 return ret
141 changedfiles = check_manifest(tmpmanifest, basepath)
142 if not changedfiles:
143 logger.info("Already up-to-date")
144 return 0
145 # Update metadata
146 logger.debug("Updating metadata via git ...")
147 #Check for the status before doing a fetch and reset
148 if os.path.exists(os.path.join(basepath, 'layers/.git')):
149 out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
150 if not out:
151 ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
152 else:
153 logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
154 logger.error("Changed files:\n%s" % out);
155 return -1
156 else:
157 ret = -1
158 if ret != 0:
159 ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
160 if ret != 0:
161 logger.error("Updating metadata via git failed")
162 return ret
163 logger.debug("Updating conf files ...")
164 for changedfile in changedfiles:
165 ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
166 if ret != 0:
167 logger.error("Updating %s failed" % changedfile)
168 return ret
169
170 # Check if UNINATIVE_CHECKSUM changed
171 uninative = False
172 if 'conf/local.conf' in changedfiles:
173 def read_uninative_checksums(fn):
174 chksumitems = []
175 with open(fn, 'r') as f:
176 for line in f:
177 if line.startswith('UNINATIVE_CHECKSUM'):
178 splitline = re.split(r'[\[\]"\']', line)
179 if len(splitline) > 3:
180 chksumitems.append((splitline[1], splitline[3]))
181 return chksumitems
182
183 oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
184 newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
185 if oldsums != newsums:
186 uninative = True
187 for buildarch, chksum in newsums:
188 uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
189 mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
190 ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
191
192 # Ok, all is well at this point - move everything over
193 tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
194 if os.path.exists(tmplayers_dir):
195 shutil.rmtree(layers_dir)
196 shutil.move(tmplayers_dir, layers_dir)
197 for changedfile in changedfiles:
198 destfile = os.path.join(basepath, changedfile)
199 os.remove(destfile)
200 shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
201 os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
202 shutil.move(tmpmanifest, conf_dir)
203 if uninative:
204 shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
205 shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
206
207 if not sstate_mirrors:
208 with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
209 f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
210 f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
211 finally:
212 shutil.rmtree(tmpsdk_dir)
213
214 if not args.skip_prepare:
215 # Find all potentially updateable tasks
216 sdk_update_targets = []
217 tasks = ['do_populate_sysroot', 'do_packagedata']
218 for root, _, files in os.walk(stamps_dir):
219 for fn in files:
220 if not '.sigdata.' in fn:
221 for task in tasks:
222 if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
223 sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
224 # Run bitbake command for the whole SDK
225 logger.info("Preparing build system... (This may take some time.)")
226 try:
227 exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
228 output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
229 runlines = []
230 for line in output.splitlines():
231 if 'Running task ' in line:
232 runlines.append(line)
233 if runlines:
234 logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
235 return -1
236 except bb.process.ExecutionError as e:
237 logger.error('Preparation failed:\n%s' % e.stdout)
238 return -1
239 return 0
240
241def sdk_install(args, config, basepath, workspace):
242 """Entry point for the devtool sdk-install command"""
243
244 import oe.recipeutils
245 import bb.process
246
247 for recipe in args.recipename:
248 if recipe in workspace:
249 raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
250
251 tasks = ['do_populate_sysroot', 'do_packagedata']
252 stampprefixes = {}
253 def checkstamp(recipe):
254 stampprefix = stampprefixes[recipe]
255 stamps = glob.glob(stampprefix + '*')
256 for stamp in stamps:
257 if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
258 return True
259 else:
260 return False
261
262 install_recipes = []
263 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
264 try:
265 for recipe in args.recipename:
266 rd = parse_recipe(config, tinfoil, recipe, True)
267 if not rd:
268 return 1
269 stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0])
270 if checkstamp(recipe):
271 logger.info('%s is already installed' % recipe)
272 else:
273 install_recipes.append(recipe)
274 finally:
275 tinfoil.shutdown()
276
277 if install_recipes:
278 logger.info('Installing %s...' % ', '.join(install_recipes))
279 install_tasks = []
280 for recipe in install_recipes:
281 for task in tasks:
282 if recipe.endswith('-native') and 'package' in task:
283 continue
284 install_tasks.append('%s:%s' % (recipe, task))
285 options = ''
286 if not args.allow_build:
287 options += ' --setscene-only'
288 try:
289 exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
290 except bb.process.ExecutionError as e:
291 raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
292 failed = False
293 for recipe in install_recipes:
294 if checkstamp(recipe):
295 logger.info('Successfully installed %s' % recipe)
296 else:
297 raise DevtoolError('Failed to install %s - unavailable' % recipe)
298 failed = True
299 if failed:
300 return 2
301
302 try:
303 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
304 exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
305 except bb.process.ExecutionError as e:
306 raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
307
308
309def register_commands(subparsers, context):
310 """Register devtool subcommands from the sdk plugin"""
311 if context.fixed_setup:
312 parser_sdk = subparsers.add_parser('sdk-update',
313 help='Update SDK components',
314 description='Updates installed SDK components from a remote server',
315 group='sdk')
316 updateserver = context.config.get('SDK', 'updateserver', '')
317 if updateserver:
318 parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
319 else:
320 parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
321 parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
322 parser_sdk.set_defaults(func=sdk_update)
323
324 parser_sdk_install = subparsers.add_parser('sdk-install',
325 help='Install additional SDK components',
326 description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
327 group='sdk')
328 parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
329 parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
330 parser_sdk_install.set_defaults(func=sdk_install)
diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py
deleted file mode 100644
index 70b81cac5e..0000000000
--- a/scripts/lib/devtool/search.py
+++ /dev/null
@@ -1,109 +0,0 @@
1# Development tool - search command plugin
2#
3# Copyright (C) 2015 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool search plugin"""
9
10import os
11import bb
12import logging
13import argparse
14import re
15from devtool import setup_tinfoil, parse_recipe, DevtoolError
16
17logger = logging.getLogger('devtool')
18
19def search(args, config, basepath, workspace):
20 """Entry point for the devtool 'search' subcommand"""
21
22 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
23 try:
24 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
25 defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
26
27 keyword_rc = re.compile(args.keyword)
28
29 def print_match(pn):
30 rd = parse_recipe(config, tinfoil, pn, True)
31 if not rd:
32 return
33 summary = rd.getVar('SUMMARY')
34 if summary == rd.expand(defsummary):
35 summary = ''
36 print("%s %s" % (pn.ljust(20), summary))
37
38
39 matches = []
40 if os.path.exists(pkgdata_dir):
41 for fn in os.listdir(pkgdata_dir):
42 pfn = os.path.join(pkgdata_dir, fn)
43 if not os.path.isfile(pfn):
44 continue
45
46 packages = []
47 match = False
48 if keyword_rc.search(fn):
49 match = True
50
51 if not match:
52 with open(pfn, 'r') as f:
53 for line in f:
54 if line.startswith('PACKAGES:'):
55 packages = line.split(':', 1)[1].strip().split()
56
57 for pkg in packages:
58 if keyword_rc.search(pkg):
59 match = True
60 break
61 if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
62 with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
63 for line in f:
64 if ': ' in line:
65 splitline = line.split(': ', 1)
66 key = splitline[0]
67 value = splitline[1].strip()
68 key = key.replace(":" + pkg, "")
69 if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']:
70 if keyword_rc.search(value):
71 match = True
72 break
73 if match:
74 print_match(fn)
75 matches.append(fn)
76 else:
77 logger.warning('Package data is not available, results may be limited')
78
79 for recipe in tinfoil.all_recipes():
80 if args.fixed_setup and 'nativesdk' in recipe.inherits():
81 continue
82
83 match = False
84 if keyword_rc.search(recipe.pn):
85 match = True
86 else:
87 for prov in recipe.provides:
88 if keyword_rc.search(prov):
89 match = True
90 break
91 if not match:
92 for rprov in recipe.rprovides:
93 if keyword_rc.search(rprov):
94 match = True
95 break
96 if match and not recipe.pn in matches:
97 print_match(recipe.pn)
98 finally:
99 tinfoil.shutdown()
100
101 return 0
102
103def register_commands(subparsers, context):
104 """Register devtool subcommands from this plugin"""
105 parser_search = subparsers.add_parser('search', help='Search available recipes',
106 description='Searches for available recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name and summary on match.',
107 group='info')
108 parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed, use quotes to avoid shell expansion)')
109 parser_search.set_defaults(func=search, no_workspace=True, fixed_setup=context.fixed_setup)
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
deleted file mode 100644
index cdfdba43ee..0000000000
--- a/scripts/lib/devtool/standard.py
+++ /dev/null
@@ -1,2396 +0,0 @@
1# Development tool - standard commands plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool standard plugins"""
8
9import os
10import sys
11import re
12import shutil
13import subprocess
14import tempfile
15import logging
16import argparse
17import argparse_oe
18import scriptutils
19import errno
20import glob
21from collections import OrderedDict
22
23from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError
24from devtool import parse_recipe
25
26import bb.utils
27
28logger = logging.getLogger('devtool')
29
30override_branch_prefix = 'devtool-override-'
31
32
33def add(args, config, basepath, workspace):
34 """Entry point for the devtool 'add' subcommand"""
35 import bb.data
36 import bb.process
37 import oe.recipeutils
38
39 if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri:
40 raise argparse_oe.ArgumentUsageError('At least one of recipename, srctree, fetchuri or -f/--fetch must be specified', 'add')
41
42 # These are positional arguments, but because we're nice, allow
43 # specifying e.g. source tree without name, or fetch URI without name or
44 # source tree (if we can detect that that is what the user meant)
45 if scriptutils.is_src_url(args.recipename):
46 if not args.fetchuri:
47 if args.fetch:
48 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
49 args.fetchuri = args.recipename
50 args.recipename = ''
51 elif scriptutils.is_src_url(args.srctree):
52 if not args.fetchuri:
53 if args.fetch:
54 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
55 args.fetchuri = args.srctree
56 args.srctree = ''
57 elif args.recipename and not args.srctree:
58 if os.sep in args.recipename:
59 args.srctree = args.recipename
60 args.recipename = None
61 elif os.path.isdir(args.recipename):
62 logger.warning('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
63
64 if not args.fetchuri:
65 if args.srcrev:
66 raise DevtoolError('The -S/--srcrev option is only valid when fetching from an SCM repository')
67 if args.srcbranch:
68 raise DevtoolError('The -B/--srcbranch option is only valid when fetching from an SCM repository')
69
70 if args.srctree and os.path.isfile(args.srctree):
71 args.fetchuri = 'file://' + os.path.abspath(args.srctree)
72 args.srctree = ''
73
74 if args.fetch:
75 if args.fetchuri:
76 raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
77 else:
78 logger.warning('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead')
79 args.fetchuri = args.fetch
80
81 if args.recipename:
82 if args.recipename in workspace:
83 raise DevtoolError("recipe %s is already in your workspace" %
84 args.recipename)
85 reason = oe.recipeutils.validate_pn(args.recipename)
86 if reason:
87 raise DevtoolError(reason)
88
89 if args.srctree:
90 srctree = os.path.abspath(args.srctree)
91 srctreeparent = None
92 tmpsrcdir = None
93 else:
94 srctree = None
95 srctreeparent = get_default_srctree(config)
96 bb.utils.mkdirhier(srctreeparent)
97 tmpsrcdir = tempfile.mkdtemp(prefix='devtoolsrc', dir=srctreeparent)
98
99 if srctree and os.path.exists(srctree):
100 if args.fetchuri:
101 if not os.path.isdir(srctree):
102 raise DevtoolError("Cannot fetch into source tree path %s as "
103 "it exists and is not a directory" %
104 srctree)
105 elif os.listdir(srctree):
106 raise DevtoolError("Cannot fetch into source tree path %s as "
107 "it already exists and is non-empty" %
108 srctree)
109 elif not args.fetchuri:
110 if args.srctree:
111 raise DevtoolError("Specified source tree %s could not be found" %
112 args.srctree)
113 elif srctree:
114 raise DevtoolError("No source tree exists at default path %s - "
115 "either create and populate this directory, "
116 "or specify a path to a source tree, or a "
117 "URI to fetch source from" % srctree)
118 else:
119 raise DevtoolError("You must either specify a source tree "
120 "or a URI to fetch source from")
121
122 if args.version:
123 if '_' in args.version or ' ' in args.version:
124 raise DevtoolError('Invalid version string "%s"' % args.version)
125
126 if args.color == 'auto' and sys.stdout.isatty():
127 color = 'always'
128 else:
129 color = args.color
130 extracmdopts = ''
131 if args.fetchuri:
132 source = args.fetchuri
133 if srctree:
134 extracmdopts += ' -x %s' % srctree
135 else:
136 extracmdopts += ' -x %s' % tmpsrcdir
137 else:
138 source = srctree
139 if args.recipename:
140 extracmdopts += ' -N %s' % args.recipename
141 if args.version:
142 extracmdopts += ' -V %s' % args.version
143 if args.binary:
144 extracmdopts += ' -b'
145 if args.also_native:
146 extracmdopts += ' --also-native'
147 if args.src_subdir:
148 extracmdopts += ' --src-subdir "%s"' % args.src_subdir
149 if args.autorev:
150 extracmdopts += ' -a'
151 if args.npm_dev:
152 extracmdopts += ' --npm-dev'
153 if args.no_pypi:
154 extracmdopts += ' --no-pypi'
155 if args.mirrors:
156 extracmdopts += ' --mirrors'
157 if args.srcrev:
158 extracmdopts += ' --srcrev %s' % args.srcrev
159 if args.srcbranch:
160 extracmdopts += ' --srcbranch %s' % args.srcbranch
161 if args.provides:
162 extracmdopts += ' --provides %s' % args.provides
163
164 tempdir = tempfile.mkdtemp(prefix='devtool')
165 try:
166 try:
167 stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True)
168 except bb.process.ExecutionError as e:
169 if e.exitcode == 15:
170 raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
171 else:
172 raise DevtoolError('Command \'%s\' failed' % e.command)
173
174 recipes = glob.glob(os.path.join(tempdir, '*.bb'))
175 if recipes:
176 recipename = os.path.splitext(os.path.basename(recipes[0]))[0].split('_')[0]
177 if recipename in workspace:
178 raise DevtoolError('A recipe with the same name as the one being created (%s) already exists in your workspace' % recipename)
179 recipedir = os.path.join(config.workspace_path, 'recipes', recipename)
180 bb.utils.mkdirhier(recipedir)
181 recipefile = os.path.join(recipedir, os.path.basename(recipes[0]))
182 appendfile = recipe_to_append(recipefile, config)
183 if os.path.exists(appendfile):
184 # This shouldn't be possible, but just in case
185 raise DevtoolError('A recipe with the same name as the one being created already exists in your workspace')
186 if os.path.exists(recipefile):
187 raise DevtoolError('A recipe file %s already exists in your workspace; this shouldn\'t be there - please delete it before continuing' % recipefile)
188 if tmpsrcdir:
189 srctree = os.path.join(srctreeparent, recipename)
190 if os.path.exists(tmpsrcdir):
191 if os.path.exists(srctree):
192 if os.path.isdir(srctree):
193 try:
194 os.rmdir(srctree)
195 except OSError as e:
196 if e.errno == errno.ENOTEMPTY:
197 raise DevtoolError('Source tree path %s already exists and is not empty' % srctree)
198 else:
199 raise
200 else:
201 raise DevtoolError('Source tree path %s already exists and is not a directory' % srctree)
202 logger.info('Using default source tree path %s' % srctree)
203 shutil.move(tmpsrcdir, srctree)
204 else:
205 raise DevtoolError('Couldn\'t find source tree created by recipetool')
206 bb.utils.mkdirhier(recipedir)
207 shutil.move(recipes[0], recipefile)
208 # Move any additional files created by recipetool
209 for fn in os.listdir(tempdir):
210 shutil.move(os.path.join(tempdir, fn), recipedir)
211 else:
212 raise DevtoolError(f'Failed to create a recipe file for source {source}')
213 attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
214 if os.path.exists(attic_recipe):
215 logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
216 finally:
217 if tmpsrcdir and os.path.exists(tmpsrcdir):
218 shutil.rmtree(tmpsrcdir)
219 shutil.rmtree(tempdir)
220
221 for fn in os.listdir(recipedir):
222 _add_md5(config, recipename, os.path.join(recipedir, fn))
223
224 tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
225 try:
226 try:
227 rd = tinfoil.parse_recipe_file(recipefile, False)
228 except Exception as e:
229 logger.error(str(e))
230 rd = None
231 if not rd:
232 # Parsing failed. We just created this recipe and we shouldn't
233 # leave it in the workdir or it'll prevent bitbake from starting
234 movefn = '%s.parsefailed' % recipefile
235 logger.error('Parsing newly created recipe failed, moving recipe to %s for reference. If this looks to be caused by the recipe itself, please report this error.' % movefn)
236 shutil.move(recipefile, movefn)
237 return 1
238
239 if args.fetchuri and not args.no_git:
240 setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data)
241
242 initial_rev = {}
243 if os.path.exists(os.path.join(srctree, '.git')):
244 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
245 initial_rev["."] = stdout.rstrip()
246 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree)
247 for line in stdout.splitlines():
248 (rev, submodule) = line.split()
249 initial_rev[os.path.relpath(submodule, srctree)] = rev
250
251 if args.src_subdir:
252 srctree = os.path.join(srctree, args.src_subdir)
253
254 bb.utils.mkdirhier(os.path.dirname(appendfile))
255 with open(appendfile, 'w') as f:
256 f.write('inherit externalsrc\n')
257 f.write('EXTERNALSRC = "%s"\n' % srctree)
258
259 b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
260 if b_is_s:
261 f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
262 if initial_rev:
263 for key, value in initial_rev.items():
264 f.write('\n# initial_rev %s: %s\n' % (key, value))
265
266 if args.binary:
267 f.write('do_install:append() {\n')
268 f.write(' rm -rf ${D}/.git\n')
269 f.write(' rm -f ${D}/singletask.lock\n')
270 f.write('}\n')
271
272 if bb.data.inherits_class('npm', rd):
273 f.write('python do_configure:append() {\n')
274 f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n')
275 f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n')
276 f.write(' bb.utils.remove(lockfile)\n')
277 f.write('}\n')
278
279 # Check if the new layer provides recipes whose priorities have been
280 # overriden by PREFERRED_PROVIDER.
281 recipe_name = rd.getVar('PN')
282 provides = rd.getVar('PROVIDES')
283 # Search every item defined in PROVIDES
284 for recipe_provided in provides.split():
285 preferred_provider = 'PREFERRED_PROVIDER_' + recipe_provided
286 current_pprovider = rd.getVar(preferred_provider)
287 if current_pprovider and current_pprovider != recipe_name:
288 if args.fixed_setup:
289 #if we are inside the eSDK add the new PREFERRED_PROVIDER in the workspace layer.conf
290 layerconf_file = os.path.join(config.workspace_path, "conf", "layer.conf")
291 with open(layerconf_file, 'a') as f:
292 f.write('%s = "%s"\n' % (preferred_provider, recipe_name))
293 else:
294 logger.warning('Set \'%s\' in order to use the recipe' % preferred_provider)
295 break
296
297 _add_md5(config, recipename, appendfile)
298
299 check_prerelease_version(rd.getVar('PV'), 'devtool add')
300
301 logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
302
303 finally:
304 tinfoil.shutdown()
305
306 return 0
307
308
309def _check_compatible_recipe(pn, d):
310 """Check if the recipe is supported by devtool"""
311 import bb.data
312 if pn == 'perf':
313 raise DevtoolError("The perf recipe does not actually check out "
314 "source and thus cannot be supported by this tool",
315 4)
316
317 if pn in ['kernel-devsrc', 'package-index'] or pn.startswith('gcc-source'):
318 raise DevtoolError("The %s recipe is not supported by this tool" % pn, 4)
319
320 if bb.data.inherits_class('image', d):
321 raise DevtoolError("The %s recipe is an image, and therefore is not "
322 "supported by this tool" % pn, 4)
323
324 if bb.data.inherits_class('populate_sdk', d):
325 raise DevtoolError("The %s recipe is an SDK, and therefore is not "
326 "supported by this tool" % pn, 4)
327
328 if bb.data.inherits_class('packagegroup', d):
329 raise DevtoolError("The %s recipe is a packagegroup, and therefore is "
330 "not supported by this tool" % pn, 4)
331
332 if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'):
333 # Not an incompatibility error per se, so we don't pass the error code
334 raise DevtoolError("externalsrc is currently enabled for the %s "
335 "recipe. This prevents the normal do_patch task "
336 "from working. You will need to disable this "
337 "first." % pn)
338
339def _dry_run_copy(src, dst, dry_run_outdir, base_outdir):
340 """Common function for copying a file to the dry run output directory"""
341 relpath = os.path.relpath(dst, base_outdir)
342 if relpath.startswith('..'):
343 raise Exception('Incorrect base path %s for path %s' % (base_outdir, dst))
344 dst = os.path.join(dry_run_outdir, relpath)
345 dst_d = os.path.dirname(dst)
346 if dst_d:
347 bb.utils.mkdirhier(dst_d)
348 # Don't overwrite existing files, otherwise in the case of an upgrade
349 # the dry-run written out recipe will be overwritten with an unmodified
350 # version
351 if not os.path.exists(dst):
352 shutil.copy(src, dst)
353
354def _move_file(src, dst, dry_run_outdir=None, base_outdir=None):
355 """Move a file. Creates all the directory components of destination path."""
356 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
357 logger.debug('Moving %s to %s%s' % (src, dst, dry_run_suffix))
358 if dry_run_outdir:
359 # We want to copy here, not move
360 _dry_run_copy(src, dst, dry_run_outdir, base_outdir)
361 else:
362 dst_d = os.path.dirname(dst)
363 if dst_d:
364 bb.utils.mkdirhier(dst_d)
365 shutil.move(src, dst)
366
367def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
368 """Copy a file. Creates all the directory components of destination path."""
369 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
370 logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix))
371 if dry_run_outdir:
372 _dry_run_copy(src, dst, dry_run_outdir, base_outdir)
373 else:
374 dst_d = os.path.dirname(dst)
375 if dst_d:
376 bb.utils.mkdirhier(dst_d)
377 shutil.copy(src, dst)
378
379def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
380 """List contents of a git treeish"""
381 import bb.process
382 cmd = ['git', 'ls-tree', '-z', treeish]
383 if recursive:
384 cmd.append('-r')
385 out, _ = bb.process.run(cmd, cwd=repodir)
386 ret = {}
387 if out:
388 for line in out.split('\0'):
389 if line:
390 split = line.split(None, 4)
391 ret[split[3]] = split[0:3]
392 return ret
393
394def _git_modified(repodir):
395 """List the difference between HEAD and the index"""
396 import bb.process
397 cmd = ['git', 'status', '--porcelain']
398 out, _ = bb.process.run(cmd, cwd=repodir)
399 ret = []
400 if out:
401 for line in out.split("\n"):
402 if line and not line.startswith('??'):
403 ret.append(line[3:])
404 return ret
405
406
407def _git_exclude_path(srctree, path):
408 """Return pathspec (list of paths) that excludes certain path"""
409 # NOTE: "Filtering out" files/paths in this way is not entirely reliable -
410 # we don't catch files that are deleted, for example. A more reliable way
411 # to implement this would be to use "negative pathspecs" which were
412 # introduced in Git v1.9.0. Revisit this when/if the required Git version
413 # becomes greater than that.
414 path = os.path.normpath(path)
415 recurse = True if len(path.split(os.path.sep)) > 1 else False
416 git_files = list(_git_ls_tree(srctree, 'HEAD', recurse).keys())
417 if path in git_files:
418 git_files.remove(path)
419 return git_files
420 else:
421 return ['.']
422
423def _ls_tree(directory):
424 """Recursive listing of files in a directory"""
425 ret = []
426 for root, dirs, files in os.walk(directory):
427 ret.extend([os.path.relpath(os.path.join(root, fname), directory) for
428 fname in files])
429 return ret
430
431
432def extract(args, config, basepath, workspace):
433 """Entry point for the devtool 'extract' subcommand"""
434 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
435 if not tinfoil:
436 # Error already shown
437 return 1
438 try:
439 rd = parse_recipe(config, tinfoil, args.recipename, True)
440 if not rd:
441 return 1
442
443 srctree = os.path.abspath(args.srctree)
444 initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
445 logger.info('Source tree extracted to %s' % srctree)
446
447 if initial_rev:
448 return 0
449 else:
450 return 1
451 finally:
452 tinfoil.shutdown()
453
454def sync(args, config, basepath, workspace):
455 """Entry point for the devtool 'sync' subcommand"""
456 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
457 if not tinfoil:
458 # Error already shown
459 return 1
460 try:
461 rd = parse_recipe(config, tinfoil, args.recipename, True)
462 if not rd:
463 return 1
464
465 srctree = os.path.abspath(args.srctree)
466 initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, True, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=True)
467 logger.info('Source tree %s synchronized' % srctree)
468
469 if initial_rev:
470 return 0
471 else:
472 return 1
473 finally:
474 tinfoil.shutdown()
475
476def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
477 """Extract sources of a recipe"""
478 import oe.path
479 import bb.data
480 import bb.process
481
482 pn = d.getVar('PN')
483
484 _check_compatible_recipe(pn, d)
485
486 if sync:
487 if not os.path.exists(srctree):
488 raise DevtoolError("output path %s does not exist" % srctree)
489 else:
490 if os.path.exists(srctree):
491 if not os.path.isdir(srctree):
492 raise DevtoolError("output path %s exists and is not a directory" %
493 srctree)
494 elif os.listdir(srctree):
495 raise DevtoolError("output path %s already exists and is "
496 "non-empty" % srctree)
497
498 if 'noexec' in (d.getVarFlags('do_unpack', False) or []):
499 raise DevtoolError("The %s recipe has do_unpack disabled, unable to "
500 "extract source" % pn, 4)
501
502 if not sync:
503 # Prepare for shutil.move later on
504 bb.utils.mkdirhier(srctree)
505 os.rmdir(srctree)
506
507 extra_overrides = []
508 if not no_overrides:
509 history = d.varhistory.variable('SRC_URI')
510 for event in history:
511 if not 'flag' in event:
512 if event['op'].startswith((':append[', ':prepend[')):
513 override = event['op'].split('[')[1].split(']')[0]
514 if not override.startswith('pn-'):
515 extra_overrides.append(override)
516 # We want to remove duplicate overrides. If a recipe had multiple
517 # SRC_URI_override += values it would cause mulitple instances of
518 # overrides. This doesn't play nicely with things like creating a
519 # branch for every instance of DEVTOOL_EXTRA_OVERRIDES.
520 extra_overrides = list(set(extra_overrides))
521 if extra_overrides:
522 logger.info('SRC_URI contains some conditional appends/prepends - will create branches to represent these')
523
524 initial_rev = None
525
526 recipefile = d.getVar('FILE')
527 appendfile = recipe_to_append(recipefile, config)
528 is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
529
530 # We need to redirect WORKDIR, STAMPS_DIR etc. under a temporary
531 # directory so that:
532 # (a) we pick up all files that get unpacked to the WORKDIR, and
533 # (b) we don't disturb the existing build
534 # However, with recipe-specific sysroots the sysroots for the recipe
535 # will be prepared under WORKDIR, and if we used the system temporary
536 # directory (i.e. usually /tmp) as used by mkdtemp by default, then
537 # our attempts to hardlink files into the recipe-specific sysroots
538 # will fail on systems where /tmp is a different filesystem, and it
539 # would have to fall back to copying the files which is a waste of
540 # time. Put the temp directory under the WORKDIR to prevent that from
541 # being a problem.
542 tempbasedir = d.getVar('WORKDIR')
543 bb.utils.mkdirhier(tempbasedir)
544 tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir)
545 appendbackup = None
546 try:
547 tinfoil.logger.setLevel(logging.WARNING)
548
549 # FIXME this results in a cache reload under control of tinfoil, which is fine
550 # except we don't get the knotty progress bar
551
552 if os.path.exists(appendfile):
553 appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak')
554 shutil.copyfile(appendfile, appendbackup)
555 else:
556 bb.utils.mkdirhier(os.path.dirname(appendfile))
557 logger.debug('writing append file %s' % appendfile)
558 with open(appendfile, 'a') as f:
559 f.write('###--- _extract_source\n')
560 f.write('deltask do_recipe_qa\n')
561 f.write('deltask do_recipe_qa_setscene\n')
562 f.write('ERROR_QA:remove = "patch-fuzz"\n')
563 f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
564 f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
565 if not is_kernel_yocto:
566 f.write('PATCHTOOL = "git"\n')
567 f.write('PATCH_COMMIT_FUNCTIONS = "1"\n')
568 if extra_overrides:
569 f.write('DEVTOOL_EXTRA_OVERRIDES = "%s"\n' % ':'.join(extra_overrides))
570 f.write('inherit devtool-source\n')
571 f.write('###--- _extract_source\n')
572
573 update_unlockedsigs(basepath, workspace, fixed_setup, [pn])
574
575 sstate_manifests = d.getVar('SSTATE_MANIFESTS')
576 bb.utils.mkdirhier(sstate_manifests)
577 preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
578 with open(preservestampfile, 'w') as f:
579 f.write(d.getVar('STAMP'))
580 tinfoil.modified_files()
581 try:
582 if is_kernel_yocto:
583 # We need to generate the kernel config
584 task = 'do_configure'
585 else:
586 task = 'do_patch'
587
588 if 'noexec' in (d.getVarFlags(task, False) or []) or 'task' not in (d.getVarFlags(task, False) or []):
589 logger.info('The %s recipe has %s disabled. Running only '
590 'do_configure task dependencies' % (pn, task))
591
592 if 'depends' in d.getVarFlags('do_configure', False):
593 pn = d.getVarFlags('do_configure', False)['depends']
594 pn = pn.replace('${PV}', d.getVar('PV'))
595 pn = pn.replace('${COMPILERDEP}', d.getVar('COMPILERDEP'))
596 task = None
597
598 # Run the fetch + unpack tasks
599 res = tinfoil.build_targets(pn,
600 task,
601 handle_events=True)
602 finally:
603 if os.path.exists(preservestampfile):
604 os.remove(preservestampfile)
605
606 if not res:
607 raise DevtoolError('Extracting source for %s failed' % pn)
608
609 if not is_kernel_yocto and ('noexec' in (d.getVarFlags('do_patch', False) or []) or 'task' not in (d.getVarFlags('do_patch', False) or [])):
610 workshareddir = d.getVar('S')
611 if os.path.islink(srctree):
612 os.unlink(srctree)
613
614 os.symlink(workshareddir, srctree)
615
616 # The initial_rev file is created in devtool_post_unpack function that will not be executed if
617 # do_unpack/do_patch tasks are disabled so we have to directly say that source extraction was successful
618 return True, True
619
620 try:
621 with open(os.path.join(tempdir, 'initial_rev'), 'r') as f:
622 initial_rev = f.read()
623
624 with open(os.path.join(tempdir, 'srcsubdir'), 'r') as f:
625 srcsubdir = f.read()
626 except FileNotFoundError as e:
627 raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
628 srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir'))
629
630 # Check if work-shared is empty, if yes
631 # find source and copy to work-shared
632 if is_kernel_yocto:
633 workshareddir = d.getVar('STAGING_KERNEL_DIR')
634 staging_kerVer = get_staging_kver(workshareddir)
635 kernelVersion = d.getVar('LINUX_VERSION')
636
637 # handle dangling symbolic link in work-shared:
638 if os.path.islink(workshareddir):
639 os.unlink(workshareddir)
640
641 if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
642 shutil.rmtree(workshareddir)
643 oe.path.copyhardlinktree(srcsubdir, workshareddir)
644 elif not os.path.exists(workshareddir):
645 oe.path.copyhardlinktree(srcsubdir, workshareddir)
646
647 if sync:
648 try:
649 logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch))
650 bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree)
651
652 # Use git fetch to update the source with the current recipe
653 # To be able to update the currently checked out branch with
654 # possibly new history (no fast-forward) git needs to be told
655 # that's ok
656 logger.info('Syncing source files including patches to git branch: %s' % devbranch)
657 bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
658 except bb.process.ExecutionError as e:
659 raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e))
660
661 else:
662 shutil.move(srcsubdir, srctree)
663
664 if is_kernel_yocto:
665 logger.info('Copying kernel config to srctree')
666 shutil.copy2(os.path.join(tempdir, '.config'), srctree)
667
668 finally:
669 if appendbackup:
670 shutil.copyfile(appendbackup, appendfile)
671 elif os.path.exists(appendfile):
672 os.remove(appendfile)
673 if keep_temp:
674 logger.info('Preserving temporary directory %s' % tempdir)
675 else:
676 shutil.rmtree(tempdir)
677 return initial_rev, srcsubdir_rel
678
679def _add_md5(config, recipename, filename):
680 """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace"""
681 def addfile(fn):
682 md5 = bb.utils.md5_file(fn)
683 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f:
684 md5_str = '%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5)
685 f.seek(0, os.SEEK_SET)
686 if not md5_str in f.read():
687 f.write(md5_str)
688
689 if os.path.isdir(filename):
690 for root, _, files in os.walk(filename):
691 for f in files:
692 addfile(os.path.join(root, f))
693 else:
694 addfile(filename)
695
696def _check_preserve(config, recipename):
697 """Check if a file was manually changed and needs to be saved in 'attic'
698 directory"""
699 origfile = os.path.join(config.workspace_path, '.devtool_md5')
700 newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
701 preservepath = os.path.join(config.workspace_path, 'attic', recipename)
702 with open(origfile, 'r') as f:
703 with open(newfile, 'w') as tf:
704 for line in f.readlines():
705 splitline = line.rstrip().split('|')
706 if splitline[0] == recipename:
707 removefile = os.path.join(config.workspace_path, splitline[1])
708 try:
709 md5 = bb.utils.md5_file(removefile)
710 except IOError as err:
711 if err.errno == 2:
712 # File no longer exists, skip it
713 continue
714 else:
715 raise
716 if splitline[2] != md5:
717 bb.utils.mkdirhier(preservepath)
718 preservefile = os.path.basename(removefile)
719 logger.warning('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
720 shutil.move(removefile, os.path.join(preservepath, preservefile))
721 else:
722 os.remove(removefile)
723 else:
724 tf.write(line)
725 bb.utils.rename(newfile, origfile)
726
727def get_staging_kver(srcdir):
728 # Kernel version from work-shared
729 import itertools
730 try:
731 with open(os.path.join(srcdir, "Makefile")) as f:
732 # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3
733 return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4))
734 except FileNotFoundError:
735 return ""
736
737def get_staging_kbranch(srcdir):
738 import bb.process
739 staging_kbranch = ""
740 if os.path.exists(srcdir) and os.listdir(srcdir):
741 (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
742 staging_kbranch = "".join(branch.split('\n')[0])
743 return staging_kbranch
744
745def get_real_srctree(srctree, s, workdir):
746 # Check that recipe isn't using a shared workdir
747 s = os.path.abspath(s)
748 workdir = os.path.abspath(workdir)
749 if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
750 # Handle if S is set to a subdirectory of the source
751 srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
752 srctree = os.path.join(srctree, srcsubdir)
753 return srctree
754
755def modify(args, config, basepath, workspace):
756 """Entry point for the devtool 'modify' subcommand"""
757 import bb.data
758 import bb.process
759 import oe.recipeutils
760 import oe.patch
761 import oe.path
762
763 if args.recipename in workspace:
764 raise DevtoolError("recipe %s is already in your workspace" %
765 args.recipename)
766
767 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
768 try:
769 rd = parse_recipe(config, tinfoil, args.recipename, True)
770 if not rd:
771 return 1
772
773 pn = rd.getVar('PN')
774 if pn != args.recipename:
775 logger.info('Mapping %s to %s' % (args.recipename, pn))
776 if pn in workspace:
777 raise DevtoolError("recipe %s is already in your workspace" %
778 pn)
779
780 if args.srctree:
781 srctree = os.path.abspath(args.srctree)
782 else:
783 srctree = get_default_srctree(config, pn)
784
785 if args.no_extract and not os.path.isdir(srctree):
786 raise DevtoolError("--no-extract specified and source path %s does "
787 "not exist or is not a directory" %
788 srctree)
789
790 recipefile = rd.getVar('FILE')
791 appendfile = recipe_to_append(recipefile, config, args.wildcard)
792 if os.path.exists(appendfile):
793 raise DevtoolError("Another variant of recipe %s is already in your "
794 "workspace (only one variant of a recipe can "
795 "currently be worked on at once)"
796 % pn)
797
798 _check_compatible_recipe(pn, rd)
799
800 initial_revs = {}
801 commits = {}
802 check_commits = False
803
804 if bb.data.inherits_class('kernel-yocto', rd):
805 # Current set kernel version
806 kernelVersion = rd.getVar('LINUX_VERSION')
807 srcdir = rd.getVar('STAGING_KERNEL_DIR')
808 kbranch = rd.getVar('KBRANCH')
809
810 staging_kerVer = get_staging_kver(srcdir)
811 staging_kbranch = get_staging_kbranch(srcdir)
812 if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
813 oe.path.copyhardlinktree(srcdir, srctree)
814 unpackdir = rd.getVar('UNPACKDIR')
815 srcsubdir = rd.getVar('S')
816
817 # Add locally copied files to gitignore as we add back to the metadata directly
818 local_files = oe.recipeutils.get_recipe_local_files(rd)
819 srcabspath = os.path.abspath(srcsubdir)
820 local_files = [fname for fname in local_files if
821 os.path.exists(os.path.join(unpackdir, fname)) and
822 srcabspath == unpackdir]
823 if local_files:
824 with open(os.path.join(srctree, '.gitignore'), 'a+') as f:
825 f.write('# Ignore local files, by default. Remove following lines'
826 'if you want to commit the directory to Git\n')
827 for fname in local_files:
828 f.write('%s\n' % fname)
829
830 task = 'do_configure'
831 res = tinfoil.build_targets(pn, task, handle_events=True)
832
833 # Copy .config to workspace
834 kconfpath = rd.getVar('B')
835 logger.info('Copying kernel config to workspace')
836 shutil.copy2(os.path.join(kconfpath, '.config'), srctree)
837
838 # Set this to true, we still need to get initial_rev
839 # by parsing the git repo
840 args.no_extract = True
841
842 if not args.no_extract:
843 initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
844 if not initial_revs["."]:
845 return 1
846 logger.info('Source tree extracted to %s' % srctree)
847
848 if os.path.exists(os.path.join(srctree, '.git')):
849 # Get list of commits since this revision
850 (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
851 commits["."] = stdout.split()
852 check_commits = True
853 try:
854 (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
855 except bb.process.ExecutionError:
856 stdout = ""
857 for line in stdout.splitlines():
858 (rev, submodule_path) = line.split()
859 submodule = os.path.relpath(submodule_path, srctree)
860 initial_revs[submodule] = rev
861 (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path)
862 commits[submodule] = stdout.split()
863 else:
864 if os.path.exists(os.path.join(srctree, '.git')):
865 # Check if it's a tree previously extracted by us. This is done
866 # by ensuring that devtool-base and args.branch (devtool) exist.
867 # The check_commits logic will cause an exception if either one
868 # of these doesn't exist
869 try:
870 (stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=srctree)
871 bb.process.run('git rev-parse %s' % args.branch, cwd=srctree)
872 except bb.process.ExecutionError:
873 stdout = ''
874 if stdout:
875 check_commits = True
876 for line in stdout.splitlines():
877 if line.startswith('*'):
878 (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
879 initial_revs["."] = stdout.rstrip()
880 if "." not in initial_revs:
881 # Otherwise, just grab the head revision
882 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
883 initial_revs["."] = stdout.rstrip()
884
885 branch_patches = {}
886 if check_commits:
887 # Check if there are override branches
888 (stdout, _) = bb.process.run('git branch', cwd=srctree)
889 branches = []
890 for line in stdout.rstrip().splitlines():
891 branchname = line[2:].rstrip()
892 if branchname.startswith(override_branch_prefix):
893 branches.append(branchname)
894 if branches:
895 logger.warning('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix))
896 branches.insert(0, args.branch)
897 seen_patches = []
898 for branch in branches:
899 branch_patches[branch] = []
900 (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree)
901 for sha1 in stdout.splitlines():
902 notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip())
903 origpatch = notes.get(oe.patch.GitApplyTree.original_patch)
904 if origpatch and origpatch not in seen_patches:
905 seen_patches.append(origpatch)
906 branch_patches[branch].append(origpatch)
907
908 # Need to grab this here in case the source is within a subdirectory
909 srctreebase = srctree
910 srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
911
912 bb.utils.mkdirhier(os.path.dirname(appendfile))
913 with open(appendfile, 'w') as f:
914 # if not present, add type=git-dependency to the secondary sources
915 # (non local files) so they can be extracted correctly when building a recipe after
916 # doing a devtool modify on it
917 src_uri = rd.getVar('SRC_URI').split()
918 src_uri_append = []
919 src_uri_remove = []
920
921 # Assume first entry is main source extracted in ${S} so skip it
922 src_uri = src_uri[1::]
923
924 # Add "type=git-dependency" to all non local sources
925 for url in src_uri:
926 if not url.startswith('file://') and not 'type=' in url:
927 src_uri_remove.append(url)
928 src_uri_append.append('%s;type=git-dependency' % url)
929
930 if src_uri_remove:
931 f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove))
932 f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append))
933
934 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n')
935 # Local files can be modified/tracked in separate subdir under srctree
936 # Mostly useful for packages with S != WORKDIR
937 f.write('FILESPATH:prepend := "%s:"\n' %
938 os.path.join(srctreebase, 'oe-local-files'))
939 f.write('# srctreebase: %s\n' % srctreebase)
940
941 f.write('\ninherit externalsrc\n')
942 f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
943 f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
944
945 b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
946 if b_is_s:
947 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
948
949 if bb.data.inherits_class('kernel', rd):
950 f.write('\ndo_kernel_configme:prepend() {\n'
951 ' if [ -e ${S}/.config ]; then\n'
952 ' mv ${S}/.config ${S}/.config.old\n'
953 ' fi\n'
954 '}\n')
955 if rd.getVarFlag('do_menuconfig', 'task'):
956 f.write('\ndo_configure:append() {\n'
957 ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n'
958 ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n'
959 ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n'
960 ' fi\n'
961 '}\n')
962 if initial_revs:
963 for name, rev in initial_revs.items():
964 f.write('\n# initial_rev %s: %s\n' % (name, rev))
965 if name in commits:
966 for commit in commits[name]:
967 f.write('# commit %s: %s\n' % (name, commit))
968 if branch_patches:
969 for branch in branch_patches:
970 if branch == args.branch:
971 continue
972 f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch])))
973 if args.debug_build:
974 f.write('\nDEBUG_BUILD = "1"\n')
975
976 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
977
978 _add_md5(config, pn, appendfile)
979
980 logger.info('Recipe %s now set up to build from %s' % (pn, srctree))
981
982 finally:
983 tinfoil.shutdown()
984
985 return 0
986
987
988def rename(args, config, basepath, workspace):
989 """Entry point for the devtool 'rename' subcommand"""
990 import bb
991 import oe.recipeutils
992
993 check_workspace_recipe(workspace, args.recipename)
994
995 if not (args.newname or args.version):
996 raise DevtoolError('You must specify a new name, a version with -V/--version, or both')
997
998 recipefile = workspace[args.recipename]['recipefile']
999 if not recipefile:
1000 raise DevtoolError('devtool rename can only be used where the recipe file itself is in the workspace (e.g. after devtool add)')
1001
1002 if args.newname and args.newname != args.recipename:
1003 reason = oe.recipeutils.validate_pn(args.newname)
1004 if reason:
1005 raise DevtoolError(reason)
1006 newname = args.newname
1007 else:
1008 newname = args.recipename
1009
1010 append = workspace[args.recipename]['bbappend']
1011 appendfn = os.path.splitext(os.path.basename(append))[0]
1012 splitfn = appendfn.split('_')
1013 if len(splitfn) > 1:
1014 origfnver = appendfn.split('_')[1]
1015 else:
1016 origfnver = ''
1017
1018 recipefilemd5 = None
1019 newrecipefilemd5 = None
1020 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1021 try:
1022 rd = parse_recipe(config, tinfoil, args.recipename, True)
1023 if not rd:
1024 return 1
1025
1026 bp = rd.getVar('BP')
1027 bpn = rd.getVar('BPN')
1028 if newname != args.recipename:
1029 localdata = rd.createCopy()
1030 localdata.setVar('PN', newname)
1031 newbpn = localdata.getVar('BPN')
1032 else:
1033 newbpn = bpn
1034 s = rd.getVar('S', False)
1035 src_uri = rd.getVar('SRC_URI', False)
1036 pv = rd.getVar('PV')
1037
1038 # Correct variable values that refer to the upstream source - these
1039 # values must stay the same, so if the name/version are changing then
1040 # we need to fix them up
1041 new_s = s
1042 new_src_uri = src_uri
1043 if newbpn != bpn:
1044 # ${PN} here is technically almost always incorrect, but people do use it
1045 new_s = new_s.replace('${BPN}', bpn)
1046 new_s = new_s.replace('${PN}', bpn)
1047 new_s = new_s.replace('${BP}', '%s-${PV}' % bpn)
1048 new_src_uri = new_src_uri.replace('${BPN}', bpn)
1049 new_src_uri = new_src_uri.replace('${PN}', bpn)
1050 new_src_uri = new_src_uri.replace('${BP}', '%s-${PV}' % bpn)
1051 if args.version and origfnver == pv:
1052 new_s = new_s.replace('${PV}', pv)
1053 new_s = new_s.replace('${BP}', '${BPN}-%s' % pv)
1054 new_src_uri = new_src_uri.replace('${PV}', pv)
1055 new_src_uri = new_src_uri.replace('${BP}', '${BPN}-%s' % pv)
1056 patchfields = {}
1057 if new_s != s:
1058 patchfields['S'] = new_s
1059 if new_src_uri != src_uri:
1060 patchfields['SRC_URI'] = new_src_uri
1061 if patchfields:
1062 recipefilemd5 = bb.utils.md5_file(recipefile)
1063 oe.recipeutils.patch_recipe(rd, recipefile, patchfields)
1064 newrecipefilemd5 = bb.utils.md5_file(recipefile)
1065 finally:
1066 tinfoil.shutdown()
1067
1068 if args.version:
1069 newver = args.version
1070 else:
1071 newver = origfnver
1072
1073 if newver:
1074 newappend = '%s_%s.bbappend' % (newname, newver)
1075 newfile = '%s_%s.bb' % (newname, newver)
1076 else:
1077 newappend = '%s.bbappend' % newname
1078 newfile = '%s.bb' % newname
1079
1080 oldrecipedir = os.path.dirname(recipefile)
1081 newrecipedir = os.path.join(config.workspace_path, 'recipes', newname)
1082 if oldrecipedir != newrecipedir:
1083 bb.utils.mkdirhier(newrecipedir)
1084
1085 newappend = os.path.join(os.path.dirname(append), newappend)
1086 newfile = os.path.join(newrecipedir, newfile)
1087
1088 # Rename bbappend
1089 logger.info('Renaming %s to %s' % (append, newappend))
1090 bb.utils.rename(append, newappend)
1091 # Rename recipe file
1092 logger.info('Renaming %s to %s' % (recipefile, newfile))
1093 bb.utils.rename(recipefile, newfile)
1094
1095 # Rename source tree if it's the default path
1096 appendmd5 = None
1097 newappendmd5 = None
1098 if not args.no_srctree:
1099 srctree = workspace[args.recipename]['srctree']
1100 if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename):
1101 newsrctree = os.path.join(config.workspace_path, 'sources', newname)
1102 logger.info('Renaming %s to %s' % (srctree, newsrctree))
1103 shutil.move(srctree, newsrctree)
1104 # Correct any references (basically EXTERNALSRC*) in the .bbappend
1105 appendmd5 = bb.utils.md5_file(newappend)
1106 appendlines = []
1107 with open(newappend, 'r') as f:
1108 for line in f:
1109 appendlines.append(line)
1110 with open(newappend, 'w') as f:
1111 for line in appendlines:
1112 if srctree in line:
1113 line = line.replace(srctree, newsrctree)
1114 f.write(line)
1115 newappendmd5 = bb.utils.md5_file(newappend)
1116
1117 bpndir = None
1118 newbpndir = None
1119 if newbpn != bpn:
1120 bpndir = os.path.join(oldrecipedir, bpn)
1121 if os.path.exists(bpndir):
1122 newbpndir = os.path.join(newrecipedir, newbpn)
1123 logger.info('Renaming %s to %s' % (bpndir, newbpndir))
1124 shutil.move(bpndir, newbpndir)
1125
1126 bpdir = None
1127 newbpdir = None
1128 if newver != origfnver or newbpn != bpn:
1129 bpdir = os.path.join(oldrecipedir, bp)
1130 if os.path.exists(bpdir):
1131 newbpdir = os.path.join(newrecipedir, '%s-%s' % (newbpn, newver))
1132 logger.info('Renaming %s to %s' % (bpdir, newbpdir))
1133 shutil.move(bpdir, newbpdir)
1134
1135 if oldrecipedir != newrecipedir:
1136 # Move any stray files and delete the old recipe directory
1137 for entry in os.listdir(oldrecipedir):
1138 oldpath = os.path.join(oldrecipedir, entry)
1139 newpath = os.path.join(newrecipedir, entry)
1140 logger.info('Renaming %s to %s' % (oldpath, newpath))
1141 shutil.move(oldpath, newpath)
1142 os.rmdir(oldrecipedir)
1143
1144 # Now take care of entries in .devtool_md5
1145 md5entries = []
1146 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'r') as f:
1147 for line in f:
1148 md5entries.append(line)
1149
1150 if bpndir and newbpndir:
1151 relbpndir = os.path.relpath(bpndir, config.workspace_path) + '/'
1152 else:
1153 relbpndir = None
1154 if bpdir and newbpdir:
1155 relbpdir = os.path.relpath(bpdir, config.workspace_path) + '/'
1156 else:
1157 relbpdir = None
1158
1159 with open(os.path.join(config.workspace_path, '.devtool_md5'), 'w') as f:
1160 for entry in md5entries:
1161 splitentry = entry.rstrip().split('|')
1162 if len(splitentry) > 2:
1163 if splitentry[0] == args.recipename:
1164 splitentry[0] = newname
1165 if splitentry[1] == os.path.relpath(append, config.workspace_path):
1166 splitentry[1] = os.path.relpath(newappend, config.workspace_path)
1167 if appendmd5 and splitentry[2] == appendmd5:
1168 splitentry[2] = newappendmd5
1169 elif splitentry[1] == os.path.relpath(recipefile, config.workspace_path):
1170 splitentry[1] = os.path.relpath(newfile, config.workspace_path)
1171 if recipefilemd5 and splitentry[2] == recipefilemd5:
1172 splitentry[2] = newrecipefilemd5
1173 elif relbpndir and splitentry[1].startswith(relbpndir):
1174 splitentry[1] = os.path.relpath(os.path.join(newbpndir, splitentry[1][len(relbpndir):]), config.workspace_path)
1175 elif relbpdir and splitentry[1].startswith(relbpdir):
1176 splitentry[1] = os.path.relpath(os.path.join(newbpdir, splitentry[1][len(relbpdir):]), config.workspace_path)
1177 entry = '|'.join(splitentry) + '\n'
1178 f.write(entry)
1179 return 0
1180
1181
1182def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refresh=False):
1183 """Get initial and update rev of a recipe. These are the start point of the
1184 whole patchset and start point for the patches to be re-generated/updated.
1185 """
1186 import bb.process
1187
1188 # Get current branch
1189 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
1190 cwd=srctree)
1191 branchname = stdout.rstrip()
1192
1193 # Parse initial rev from recipe if not specified
1194 commits = {}
1195 patches = []
1196 initial_revs = {}
1197 with open(recipe_path, 'r') as f:
1198 for line in f:
1199 pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$'
1200 match = re.search(pattern, line)
1201 if match:
1202 name = match.group(1)
1203 rev = match.group(2)
1204 if line.startswith('# initial_rev'):
1205 if not (name == "." and initial_rev):
1206 initial_revs[name] = rev
1207 elif line.startswith('# commit') and not force_patch_refresh:
1208 if name not in commits:
1209 commits[name] = [rev]
1210 else:
1211 commits[name].append(rev)
1212 elif line.startswith('# patches_%s:' % branchname):
1213 patches = line.split(':')[-1].strip().split(',')
1214
1215 update_revs = dict(initial_revs)
1216 changed_revs = {}
1217 for name, rev in initial_revs.items():
1218 # Find first actually changed revision
1219 stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
1220 rev, cwd=os.path.join(srctree, name))
1221 newcommits = stdout.split()
1222 if name in commits:
1223 for i in range(min(len(commits[name]), len(newcommits))):
1224 if newcommits[i] == commits[name][i]:
1225 update_revs[name] = commits[name][i]
1226
1227 try:
1228 stdout, _ = bb.process.run('git cherry devtool-patched',
1229 cwd=os.path.join(srctree, name))
1230 except bb.process.ExecutionError as err:
1231 stdout = None
1232
1233 if stdout is not None and not force_patch_refresh:
1234 for line in stdout.splitlines():
1235 if line.startswith('+ '):
1236 rev = line.split()[1]
1237 if rev in newcommits:
1238 if name not in changed_revs:
1239 changed_revs[name] = [rev]
1240 else:
1241 changed_revs[name].append(rev)
1242
1243 return initial_revs, update_revs, changed_revs, patches
1244
1245def _remove_file_entries(srcuri, filelist):
1246 """Remove file:// entries from SRC_URI"""
1247 remaining = filelist[:]
1248 entries = []
1249 for fname in filelist:
1250 basename = os.path.basename(fname)
1251 for i in range(len(srcuri)):
1252 if (srcuri[i].startswith('file://') and
1253 os.path.basename(srcuri[i].split(';')[0]) == basename):
1254 entries.append(srcuri[i])
1255 remaining.remove(fname)
1256 srcuri.pop(i)
1257 break
1258 return entries, remaining
1259
1260def _replace_srcuri_entry(srcuri, filename, newentry):
1261 """Replace entry corresponding to specified file with a new entry"""
1262 basename = os.path.basename(filename)
1263 for i in range(len(srcuri)):
1264 if os.path.basename(srcuri[i].split(';')[0]) == basename:
1265 srcuri.pop(i)
1266 srcuri.insert(i, newentry)
1267 break
1268
1269def _remove_source_files(append, files, destpath, no_report_remove=False, dry_run=False):
1270 """Unlink existing patch files"""
1271
1272 dry_run_suffix = ' (dry-run)' if dry_run else ''
1273
1274 for path in files:
1275 if append:
1276 if not destpath:
1277 raise Exception('destpath should be set here')
1278 path = os.path.join(destpath, os.path.basename(path))
1279
1280 if os.path.exists(path):
1281 if not no_report_remove:
1282 logger.info('Removing file %s%s' % (path, dry_run_suffix))
1283 if not dry_run:
1284 # FIXME "git rm" here would be nice if the file in question is
1285 # tracked
1286 # FIXME there's a chance that this file is referred to by
1287 # another recipe, in which case deleting wouldn't be the
1288 # right thing to do
1289 os.remove(path)
1290 # Remove directory if empty
1291 try:
1292 os.rmdir(os.path.dirname(path))
1293 except OSError as ose:
1294 if ose.errno != errno.ENOTEMPTY:
1295 raise
1296
1297
1298def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
1299 """Export patches from srctree to given location.
1300 Returns three-tuple of dicts:
1301 1. updated - patches that already exist in SRCURI
1302 2. added - new patches that don't exist in SRCURI
1303 3 removed - patches that exist in SRCURI but not in exported patches
1304 In each dict the key is the 'basepath' of the URI and value is:
1305 - for updated and added dicts, a dict with 2 optionnal keys:
1306 - 'path': the absolute path to the existing file in recipe space (if any)
1307 - 'patchdir': the directory in wich the patch should be applied (if any)
1308 - for removed dict, the absolute path to the existing file in recipe space
1309 """
1310 import oe.recipeutils
1311 from oe.patch import GitApplyTree
1312 import bb.process
1313 updated = OrderedDict()
1314 added = OrderedDict()
1315 seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
1316
1317 existing_patches = dict((os.path.basename(path), path) for path in
1318 oe.recipeutils.get_recipe_patches(rd))
1319 logger.debug('Existing patches: %s' % existing_patches)
1320
1321 # Generate patches from Git, exclude local files directory
1322 patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
1323 GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec)
1324 for dirpath, dirnames, filenames in os.walk(destdir):
1325 new_patches = filenames
1326 reldirpath = os.path.relpath(dirpath, destdir)
1327 for new_patch in new_patches:
1328 # Strip numbering from patch names. If it's a git sequence named patch,
1329 # the numbers might not match up since we are starting from a different
1330 # revision This does assume that people are using unique shortlog
1331 # values, but they ought to be anyway...
1332 new_basename = seqpatch_re.match(new_patch).group(2)
1333 match_name = None
1334 old_patch = None
1335 for old_patch in existing_patches:
1336 old_basename = seqpatch_re.match(old_patch).group(2)
1337 old_basename_splitext = os.path.splitext(old_basename)
1338 if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
1339 old_patch_noext = os.path.splitext(old_patch)[0]
1340 match_name = old_patch_noext
1341 break
1342 elif new_basename == old_basename:
1343 match_name = old_patch
1344 break
1345 if match_name:
1346 # Rename patch files
1347 if new_patch != match_name:
1348 bb.utils.rename(os.path.join(destdir, new_patch),
1349 os.path.join(destdir, match_name))
1350 # Need to pop it off the list now before checking changed_revs
1351 oldpath = existing_patches.pop(old_patch)
1352 if changed_revs is not None and dirpath in changed_revs:
1353 # Avoid updating patches that have not actually changed
1354 with open(os.path.join(dirpath, match_name), 'r') as f:
1355 firstlineitems = f.readline().split()
1356 # Looking for "From <hash>" line
1357 if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
1358 if not firstlineitems[1] in changed_revs[dirpath]:
1359 continue
1360 # Recompress if necessary
1361 if oldpath.endswith(('.gz', '.Z')):
1362 bb.process.run(['gzip', match_name], cwd=destdir)
1363 if oldpath.endswith('.gz'):
1364 match_name += '.gz'
1365 else:
1366 match_name += '.Z'
1367 elif oldpath.endswith('.bz2'):
1368 bb.process.run(['bzip2', match_name], cwd=destdir)
1369 match_name += '.bz2'
1370 updated[match_name] = {'path' : oldpath}
1371 if reldirpath != ".":
1372 updated[match_name]['patchdir'] = reldirpath
1373 else:
1374 added[new_patch] = {}
1375 if reldirpath != ".":
1376 added[new_patch]['patchdir'] = reldirpath
1377
1378 return (updated, added, existing_patches)
1379
1380
1381def _create_kconfig_diff(srctree, rd, outfile):
1382 """Create a kconfig fragment"""
1383 import bb.process
1384 # Only update config fragment if both config files exist
1385 orig_config = os.path.join(srctree, '.config.baseline')
1386 new_config = os.path.join(srctree, '.config.new')
1387 if os.path.exists(orig_config) and os.path.exists(new_config):
1388 cmd = ['diff', '--new-line-format=%L', '--old-line-format=',
1389 '--unchanged-line-format=', orig_config, new_config]
1390 pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
1391 stderr=subprocess.PIPE)
1392 stdout, stderr = pipe.communicate()
1393 if pipe.returncode == 1:
1394 logger.info("Updating config fragment %s" % outfile)
1395 with open(outfile, 'wb') as fobj:
1396 fobj.write(stdout)
1397 elif pipe.returncode == 0:
1398 logger.info("Would remove config fragment %s" % outfile)
1399 if os.path.exists(outfile):
1400 # Remove fragment file in case of empty diff
1401 logger.info("Removing config fragment %s" % outfile)
1402 os.unlink(outfile)
1403 else:
1404 raise bb.process.ExecutionError(cmd, pipe.returncode, stdout, stderr)
1405 return True
1406 return False
1407
1408
1409def _export_local_files(srctree, rd, destdir, srctreebase):
1410 """Copy local files from srctree to given location.
1411 Returns three-tuple of dicts:
1412 1. updated - files that already exist in SRCURI
1413 2. added - new files files that don't exist in SRCURI
1414 3 removed - files that exist in SRCURI but not in exported files
1415 In each dict the key is the 'basepath' of the URI and value is:
1416 - for updated and added dicts, a dict with 1 optionnal key:
1417 - 'path': the absolute path to the existing file in recipe space (if any)
1418 - for removed dict, the absolute path to the existing file in recipe space
1419 """
1420 import oe.recipeutils
1421 import bb.data
1422 import bb.process
1423
1424 # Find out local files (SRC_URI files that exist in the "recipe space").
1425 # Local files that reside in srctree are not included in patch generation.
1426 # Instead they are directly copied over the original source files (in
1427 # recipe space).
1428 existing_files = oe.recipeutils.get_recipe_local_files(rd)
1429
1430 new_set = None
1431 updated = OrderedDict()
1432 added = OrderedDict()
1433 removed = OrderedDict()
1434
1435 # Get current branch and return early with empty lists
1436 # if on one of the override branches
1437 # (local files are provided only for the main branch and processing
1438 # them against lists from recipe overrides will result in mismatches
1439 # and broken modifications to recipes).
1440 stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
1441 cwd=srctree)
1442 branchname = stdout.rstrip()
1443 if branchname.startswith(override_branch_prefix):
1444 return (updated, added, removed)
1445
1446 files = _git_modified(srctree)
1447 #if not files:
1448 # files = _ls_tree(srctree)
1449 for f in files:
1450 fullfile = os.path.join(srctree, f)
1451 if os.path.exists(os.path.join(fullfile, ".git")):
1452 # submodules handled elsewhere
1453 continue
1454 if f not in existing_files:
1455 added[f] = {}
1456 if os.path.isdir(os.path.join(srctree, f)):
1457 shutil.copytree(fullfile, os.path.join(destdir, f))
1458 else:
1459 shutil.copy2(fullfile, os.path.join(destdir, f))
1460 elif not os.path.exists(fullfile):
1461 removed[f] = existing_files[f]
1462 elif f in existing_files:
1463 updated[f] = {'path' : existing_files[f]}
1464 if os.path.isdir(os.path.join(srctree, f)):
1465 shutil.copytree(fullfile, os.path.join(destdir, f))
1466 else:
1467 shutil.copy2(fullfile, os.path.join(destdir, f))
1468
1469 # Special handling for kernel config
1470 if bb.data.inherits_class('kernel-yocto', rd):
1471 fragment_fn = 'devtool-fragment.cfg'
1472 fragment_path = os.path.join(destdir, fragment_fn)
1473 if _create_kconfig_diff(srctree, rd, fragment_path):
1474 if os.path.exists(fragment_path):
1475 if fragment_fn in removed:
1476 del removed[fragment_fn]
1477 if fragment_fn not in updated and fragment_fn not in added:
1478 added[fragment_fn] = {}
1479 else:
1480 if fragment_fn in updated:
1481 removed[fragment_fn] = updated[fragment_fn]
1482 del updated[fragment_fn]
1483
1484 # Special handling for cml1, ccmake, etc bbclasses that generated
1485 # configuration fragment files that are consumed as source files
1486 for frag_class, frag_name in [("cml1", "fragment.cfg"), ("ccmake", "site-file.cmake")]:
1487 if bb.data.inherits_class(frag_class, rd):
1488 srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name)
1489 if os.path.exists(srcpath):
1490 if frag_name in removed:
1491 del removed[frag_name]
1492 if frag_name not in updated:
1493 added[frag_name] = {}
1494 # copy fragment into destdir
1495 shutil.copy2(srcpath, destdir)
1496
1497 return (updated, added, removed)
1498
1499
1500def _determine_files_dir(rd):
1501 """Determine the appropriate files directory for a recipe"""
1502 recipedir = rd.getVar('FILE_DIRNAME')
1503 for entry in rd.getVar('FILESPATH').split(':'):
1504 relpth = os.path.relpath(entry, recipedir)
1505 if not os.sep in relpth:
1506 # One (or zero) levels below only, so we don't put anything in machine-specific directories
1507 if os.path.isdir(entry):
1508 return entry
1509 return os.path.join(recipedir, rd.getVar('BPN'))
1510
1511
1512def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None):
1513 """Implement the 'srcrev' mode of update-recipe"""
1514 import bb.process
1515 import oe.recipeutils
1516
1517 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
1518
1519 recipefile = rd.getVar('FILE')
1520 recipedir = os.path.basename(recipefile)
1521 logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix))
1522
1523 # Get original SRCREV
1524 old_srcrev = rd.getVar('SRCREV') or ''
1525 if old_srcrev == "INVALID":
1526 raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository')
1527 old_srcrev = {'.': old_srcrev}
1528
1529 # Get HEAD revision
1530 try:
1531 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
1532 except bb.process.ExecutionError as err:
1533 raise DevtoolError('Failed to get HEAD revision in %s: %s' %
1534 (srctree, err))
1535 srcrev = stdout.strip()
1536 if len(srcrev) != 40:
1537 raise DevtoolError('Invalid hash returned by git: %s' % stdout)
1538
1539 destpath = None
1540 remove_files = []
1541 patchfields = {}
1542 patchfields['SRCREV'] = srcrev
1543 orig_src_uri = rd.getVar('SRC_URI', False) or ''
1544 srcuri = orig_src_uri.split()
1545 tempdir = tempfile.mkdtemp(prefix='devtool')
1546 update_srcuri = False
1547 appendfile = None
1548 try:
1549 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1550 srctreebase = workspace[recipename]['srctreebase']
1551 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1552 removedentries = {}
1553 if not no_remove:
1554 # Find list of existing patches in recipe file
1555 patches_dir = tempfile.mkdtemp(dir=tempdir)
1556 upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
1557 patches_dir)
1558 logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
1559
1560 # Remove deleted local files and "overlapping" patches
1561 remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value]
1562 if remove_files:
1563 removedentries = _remove_file_entries(srcuri, remove_files)[0]
1564 update_srcuri = True
1565
1566 if appendlayerdir:
1567 files = dict((os.path.join(local_files_dir, key), val) for
1568 key, val in list(upd_f.items()) + list(new_f.items()))
1569 removevalues = {}
1570 if update_srcuri:
1571 removevalues = {'SRC_URI': removedentries}
1572 patchfields['SRC_URI'] = '\\\n '.join(srcuri)
1573 if dry_run_outdir:
1574 logger.info('Creating bbappend (dry-run)')
1575 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1576 rd, appendlayerdir, files, wildcardver=wildcard_version,
1577 extralines=patchfields, removevalues=removevalues,
1578 redirect_output=dry_run_outdir)
1579 else:
1580 files_dir = _determine_files_dir(rd)
1581 for basepath, param in upd_f.items():
1582 path = param['path']
1583 logger.info('Updating file %s%s' % (basepath, dry_run_suffix))
1584 if os.path.isabs(basepath):
1585 # Original file (probably with subdir pointing inside source tree)
1586 # so we do not want to move it, just copy
1587 _copy_file(basepath, path, dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1588 else:
1589 _move_file(os.path.join(local_files_dir, basepath), path,
1590 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1591 update_srcuri= True
1592 for basepath, param in new_f.items():
1593 path = param['path']
1594 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1595 _move_file(os.path.join(local_files_dir, basepath),
1596 os.path.join(files_dir, basepath),
1597 dry_run_outdir=dry_run_outdir,
1598 base_outdir=recipedir)
1599 srcuri.append('file://%s' % basepath)
1600 update_srcuri = True
1601 if update_srcuri:
1602 patchfields['SRC_URI'] = ' '.join(srcuri)
1603 ret = oe.recipeutils.patch_recipe(rd, recipefile, patchfields, redirect_output=dry_run_outdir)
1604 finally:
1605 shutil.rmtree(tempdir)
1606 if not 'git://' in orig_src_uri:
1607 logger.info('You will need to update SRC_URI within the recipe to '
1608 'point to a git repository where you have pushed your '
1609 'changes')
1610
1611 _remove_source_files(appendlayerdir, remove_files, destpath, no_report_remove, dry_run=dry_run_outdir)
1612 return True, appendfile, remove_files
1613
1614def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False):
1615 """Implement the 'patch' mode of update-recipe"""
1616 import oe.recipeutils
1617
1618 recipefile = rd.getVar('FILE')
1619 recipedir = os.path.dirname(recipefile)
1620 append = workspace[recipename]['bbappend']
1621 if not os.path.exists(append):
1622 raise DevtoolError('unable to find workspace bbappend for recipe %s' %
1623 recipename)
1624 srctreebase = workspace[recipename]['srctreebase']
1625 relpatchdir = os.path.relpath(srctreebase, srctree)
1626 if relpatchdir == '.':
1627 patchdir_params = {}
1628 else:
1629 patchdir_params = {'patchdir': relpatchdir}
1630
1631 def srcuri_entry(basepath, patchdir_params):
1632 if patchdir_params:
1633 paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items())
1634 else:
1635 paramstr = ''
1636 return 'file://%s%s' % (basepath, paramstr)
1637
1638 initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
1639 if not initial_revs:
1640 raise DevtoolError('Unable to find initial revision - please specify '
1641 'it with --initial-rev')
1642
1643 appendfile = None
1644 dl_dir = rd.getVar('DL_DIR')
1645 if not dl_dir.endswith('/'):
1646 dl_dir += '/'
1647
1648 dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
1649
1650 tempdir = tempfile.mkdtemp(prefix='devtool')
1651 try:
1652 local_files_dir = tempfile.mkdtemp(dir=tempdir)
1653 upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
1654
1655 # Get updated patches from source tree
1656 patches_dir = tempfile.mkdtemp(dir=tempdir)
1657 upd_p, new_p, _ = _export_patches(srctree, rd, update_revs,
1658 patches_dir, changed_revs)
1659 # Get all patches from source tree and check if any should be removed
1660 all_patches_dir = tempfile.mkdtemp(dir=tempdir)
1661 _, _, del_p = _export_patches(srctree, rd, initial_revs,
1662 all_patches_dir)
1663 logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
1664 if filter_patches:
1665 new_p = OrderedDict()
1666 upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
1667 del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches)
1668 remove_files = []
1669 if not no_remove:
1670 # Remove deleted local files and patches
1671 remove_files = list(del_f.values()) + list(del_p.values())
1672 updatefiles = False
1673 updaterecipe = False
1674 destpath = None
1675 srcuri = (rd.getVar('SRC_URI', False) or '').split()
1676
1677 if appendlayerdir:
1678 files = OrderedDict((os.path.join(local_files_dir, key), val) for
1679 key, val in list(upd_f.items()) + list(new_f.items()))
1680 files.update(OrderedDict((os.path.join(patches_dir, key), val) for
1681 key, val in list(upd_p.items()) + list(new_p.items())))
1682
1683 params = []
1684 for file, param in files.items():
1685 patchdir_param = dict(patchdir_params)
1686 patchdir = param.get('patchdir', ".")
1687 if patchdir != "." :
1688 if patchdir_param:
1689 patchdir_param['patchdir'] += patchdir
1690 else:
1691 patchdir_param['patchdir'] = patchdir
1692 params.append(patchdir_param)
1693
1694 if files or remove_files:
1695 removevalues = None
1696 if remove_files:
1697 removedentries, remaining = _remove_file_entries(
1698 srcuri, remove_files)
1699 if removedentries or remaining:
1700 remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for
1701 item in remaining]
1702 removevalues = {'SRC_URI': removedentries + remaining}
1703 appendfile, destpath = oe.recipeutils.bbappend_recipe(
1704 rd, appendlayerdir, files,
1705 wildcardver=wildcard_version,
1706 removevalues=removevalues,
1707 redirect_output=dry_run_outdir,
1708 params=params)
1709 else:
1710 logger.info('No patches or local source files needed updating')
1711 else:
1712 # Update existing files
1713 files_dir = _determine_files_dir(rd)
1714 for basepath, param in upd_f.items():
1715 path = param['path']
1716 logger.info('Updating file %s' % basepath)
1717 if os.path.isabs(basepath):
1718 # Original file (probably with subdir pointing inside source tree)
1719 # so we do not want to move it, just copy
1720 _copy_file(basepath, path,
1721 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1722 else:
1723 _move_file(os.path.join(local_files_dir, basepath), path,
1724 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1725 updatefiles = True
1726 for basepath, param in upd_p.items():
1727 path = param['path']
1728 patchdir = param.get('patchdir', ".")
1729 patchdir_param = {}
1730 if patchdir != "." :
1731 patchdir_param = dict(patchdir_params)
1732 if patchdir_param:
1733 patchdir_param['patchdir'] += patchdir
1734 else:
1735 patchdir_param['patchdir'] = patchdir
1736 patchfn = os.path.join(patches_dir, patchdir, basepath)
1737 if os.path.dirname(path) + '/' == dl_dir:
1738 # This is a a downloaded patch file - we now need to
1739 # replace the entry in SRC_URI with our local version
1740 logger.info('Replacing remote patch %s with updated local version' % basepath)
1741 path = os.path.join(files_dir, basepath)
1742 _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param))
1743 updaterecipe = True
1744 else:
1745 logger.info('Updating patch %s%s' % (basepath, dry_run_suffix))
1746 _move_file(patchfn, path,
1747 dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
1748 updatefiles = True
1749 # Add any new files
1750 for basepath, param in new_f.items():
1751 logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
1752 _move_file(os.path.join(local_files_dir, basepath),
1753 os.path.join(files_dir, basepath),
1754 dry_run_outdir=dry_run_outdir,
1755 base_outdir=recipedir)
1756 srcuri.append(srcuri_entry(basepath, patchdir_params))
1757 updaterecipe = True
1758 for basepath, param in new_p.items():
1759 patchdir = param.get('patchdir', ".")
1760 logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix))
1761 _move_file(os.path.join(patches_dir, patchdir, basepath),
1762 os.path.join(files_dir, basepath),
1763 dry_run_outdir=dry_run_outdir,
1764 base_outdir=recipedir)
1765 params = dict(patchdir_params)
1766 if patchdir != "." :
1767 if params:
1768 params['patchdir'] += patchdir
1769 else:
1770 params['patchdir'] = patchdir
1771
1772 srcuri.append(srcuri_entry(basepath, params))
1773 updaterecipe = True
1774 # Update recipe, if needed
1775 if _remove_file_entries(srcuri, remove_files)[0]:
1776 updaterecipe = True
1777 if updaterecipe:
1778 if not dry_run_outdir:
1779 logger.info('Updating recipe %s' % os.path.basename(recipefile))
1780 ret = oe.recipeutils.patch_recipe(rd, recipefile,
1781 {'SRC_URI': ' '.join(srcuri)},
1782 redirect_output=dry_run_outdir)
1783 elif not updatefiles:
1784 # Neither patches nor recipe were updated
1785 logger.info('No patches or files need updating')
1786 return False, None, []
1787 finally:
1788 shutil.rmtree(tempdir)
1789
1790 _remove_source_files(appendlayerdir, remove_files, destpath, no_report_remove, dry_run=dry_run_outdir)
1791 return True, appendfile, remove_files
1792
1793def _guess_recipe_update_mode(srctree, rdata):
1794 """Guess the recipe update mode to use"""
1795 import bb.process
1796 src_uri = (rdata.getVar('SRC_URI') or '').split()
1797 git_uris = [uri for uri in src_uri if uri.startswith('git://')]
1798 if not git_uris:
1799 return 'patch'
1800 # Just use the first URI for now
1801 uri = git_uris[0]
1802 # Check remote branch
1803 params = bb.fetch.decodeurl(uri)[5]
1804 upstr_branch = params['branch'] if 'branch' in params else 'master'
1805 # Check if current branch HEAD is found in upstream branch
1806 stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
1807 head_rev = stdout.rstrip()
1808 stdout, _ = bb.process.run('git branch -r --contains %s' % head_rev,
1809 cwd=srctree)
1810 remote_brs = [branch.strip() for branch in stdout.splitlines()]
1811 if 'origin/' + upstr_branch in remote_brs:
1812 return 'srcrev'
1813
1814 return 'patch'
1815
1816def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False):
1817 import bb.data
1818 import bb.process
1819 srctree = workspace[recipename]['srctree']
1820 if mode == 'auto':
1821 mode = _guess_recipe_update_mode(srctree, rd)
1822
1823 override_branches = []
1824 mainbranch = None
1825 startbranch = None
1826 if not no_overrides:
1827 stdout, _ = bb.process.run('git branch', cwd=srctree)
1828 other_branches = []
1829 for line in stdout.splitlines():
1830 branchname = line[2:]
1831 if line.startswith('* '):
1832 if 'HEAD' in line:
1833 raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"')
1834 startbranch = branchname
1835 if branchname.startswith(override_branch_prefix):
1836 override_branches.append(branchname)
1837 else:
1838 other_branches.append(branchname)
1839
1840 if override_branches:
1841 logger.debug('_update_recipe: override branches: %s' % override_branches)
1842 logger.debug('_update_recipe: other branches: %s' % other_branches)
1843 if startbranch.startswith(override_branch_prefix):
1844 if len(other_branches) == 1:
1845 mainbranch = other_branches[1]
1846 else:
1847 raise DevtoolError('Unable to determine main branch - please check out the main branch in source tree first')
1848 else:
1849 mainbranch = startbranch
1850
1851 checkedout = None
1852 anyupdated = False
1853 appendfile = None
1854 allremoved = []
1855 if override_branches:
1856 logger.info('Handling main branch (%s)...' % mainbranch)
1857 if startbranch != mainbranch:
1858 bb.process.run('git checkout %s' % mainbranch, cwd=srctree)
1859 checkedout = mainbranch
1860 try:
1861 branchlist = [mainbranch] + override_branches
1862 for branch in branchlist:
1863 crd = bb.data.createCopy(rd)
1864 if branch != mainbranch:
1865 logger.info('Handling branch %s...' % branch)
1866 override = branch[len(override_branch_prefix):]
1867 crd.appendVar('OVERRIDES', ':%s' % override)
1868 bb.process.run('git checkout %s' % branch, cwd=srctree)
1869 checkedout = branch
1870
1871 if mode == 'srcrev':
1872 updated, appendf, removed = _update_recipe_srcrev(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir)
1873 elif mode == 'patch':
1874 updated, appendf, removed = _update_recipe_patch(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir, force_patch_refresh)
1875 else:
1876 raise DevtoolError('update_recipe: invalid mode %s' % mode)
1877 if updated:
1878 anyupdated = True
1879 if appendf:
1880 appendfile = appendf
1881 allremoved.extend(removed)
1882 finally:
1883 if startbranch and checkedout != startbranch:
1884 bb.process.run('git checkout %s' % startbranch, cwd=srctree)
1885
1886 return anyupdated, appendfile, allremoved
1887
1888def update_recipe(args, config, basepath, workspace):
1889 """Entry point for the devtool 'update-recipe' subcommand"""
1890 check_workspace_recipe(workspace, args.recipename)
1891
1892 if args.append:
1893 if not os.path.exists(args.append):
1894 raise DevtoolError('bbappend destination layer directory "%s" '
1895 'does not exist' % args.append)
1896 if not os.path.exists(os.path.join(args.append, 'conf', 'layer.conf')):
1897 raise DevtoolError('conf/layer.conf not found in bbappend '
1898 'destination layer "%s"' % args.append)
1899
1900 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
1901 try:
1902
1903 rd = parse_recipe(config, tinfoil, args.recipename, True)
1904 if not rd:
1905 return 1
1906
1907 dry_run_output = None
1908 dry_run_outdir = None
1909 if args.dry_run:
1910 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
1911 dry_run_outdir = dry_run_output.name
1912 updated, _, _ = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
1913
1914 if updated:
1915 rf = rd.getVar('FILE')
1916 if rf.startswith(config.workspace_path):
1917 logger.warning('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
1918 finally:
1919 tinfoil.shutdown()
1920
1921 return 0
1922
1923
1924def status(args, config, basepath, workspace):
1925 """Entry point for the devtool 'status' subcommand"""
1926 if workspace:
1927 for recipe, value in sorted(workspace.items()):
1928 recipefile = value['recipefile']
1929 if recipefile:
1930 recipestr = ' (%s)' % recipefile
1931 else:
1932 recipestr = ''
1933 print("%s: %s%s" % (recipe, value['srctree'], recipestr))
1934 else:
1935 logger.info('No recipes currently in your workspace - you can use "devtool modify" to work on an existing recipe or "devtool add" to add a new one')
1936 return 0
1937
1938
1939def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
1940 """Reset one or more recipes"""
1941 import bb.process
1942 import oe.path
1943
1944 def clean_preferred_provider(pn, layerconf_path):
1945 """Remove PREFERRED_PROVIDER from layer.conf'"""
1946 import re
1947 layerconf_file = os.path.join(layerconf_path, 'conf', 'layer.conf')
1948 new_layerconf_file = os.path.join(layerconf_path, 'conf', '.layer.conf')
1949 pprovider_found = False
1950 with open(layerconf_file, 'r') as f:
1951 lines = f.readlines()
1952 with open(new_layerconf_file, 'a') as nf:
1953 for line in lines:
1954 pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$'
1955 if not re.match(pprovider_exp, line):
1956 nf.write(line)
1957 else:
1958 pprovider_found = True
1959 if pprovider_found:
1960 shutil.move(new_layerconf_file, layerconf_file)
1961 else:
1962 os.remove(new_layerconf_file)
1963
1964 if recipes and not no_clean:
1965 if len(recipes) == 1:
1966 logger.info('Cleaning sysroot for recipe %s...' % recipes[0])
1967 else:
1968 logger.info('Cleaning sysroot for recipes %s...' % ', '.join(recipes))
1969 # If the recipe file itself was created in the workspace, and
1970 # it uses BBCLASSEXTEND, then we need to also clean the other
1971 # variants
1972 targets = []
1973 for recipe in recipes:
1974 targets.append(recipe)
1975 recipefile = workspace[recipe]['recipefile']
1976 if recipefile and os.path.exists(recipefile):
1977 targets.extend(get_bbclassextend_targets(recipefile, recipe))
1978 try:
1979 exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % ' '.join(targets))
1980 except bb.process.ExecutionError as e:
1981 raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you '
1982 'wish, you may specify -n/--no-clean to '
1983 'skip running this command when resetting' %
1984 (e.command, e.stdout))
1985
1986 for pn in recipes:
1987 _check_preserve(config, pn)
1988
1989 appendfile = workspace[pn]['bbappend']
1990 if os.path.exists(appendfile):
1991 # This shouldn't happen, but is possible if devtool errored out prior to
1992 # writing the md5 file. We need to delete this here or the recipe won't
1993 # actually be reset
1994 os.remove(appendfile)
1995
1996 preservepath = os.path.join(config.workspace_path, 'attic', pn, pn)
1997 def preservedir(origdir):
1998 if os.path.exists(origdir):
1999 for root, dirs, files in os.walk(origdir):
2000 for fn in files:
2001 logger.warning('Preserving %s in %s' % (fn, preservepath))
2002 _move_file(os.path.join(origdir, fn),
2003 os.path.join(preservepath, fn))
2004 for dn in dirs:
2005 preservedir(os.path.join(root, dn))
2006 os.rmdir(origdir)
2007
2008 recipefile = workspace[pn]['recipefile']
2009 if recipefile and oe.path.is_path_parent(config.workspace_path, recipefile):
2010 # This should always be true if recipefile is set, but just in case
2011 preservedir(os.path.dirname(recipefile))
2012 # We don't automatically create this dir next to appends, but the user can
2013 preservedir(os.path.join(config.workspace_path, 'appends', pn))
2014
2015 srctreebase = workspace[pn]['srctreebase']
2016 if os.path.isdir(srctreebase):
2017 if os.listdir(srctreebase):
2018 if remove_work:
2019 logger.info('-r argument used on %s, removing source tree.'
2020 ' You will lose any unsaved work' %pn)
2021 shutil.rmtree(srctreebase)
2022 else:
2023 # We don't want to risk wiping out any work in progress
2024 if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')):
2025 from datetime import datetime
2026 preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S")))
2027 logger.info('Preserving source tree in %s\nIf you no '
2028 'longer need it then please delete it manually.\n'
2029 'It is also possible to reuse it via devtool source tree argument.'
2030 % preservesrc)
2031 bb.utils.mkdirhier(os.path.dirname(preservesrc))
2032 shutil.move(srctreebase, preservesrc)
2033 else:
2034 logger.info('Leaving source tree %s as-is; if you no '
2035 'longer need it then please delete it manually'
2036 % srctreebase)
2037 else:
2038 # This is unlikely, but if it's empty we can just remove it
2039 os.rmdir(srctreebase)
2040
2041 clean_preferred_provider(pn, config.workspace_path)
2042
2043def reset(args, config, basepath, workspace):
2044 """Entry point for the devtool 'reset' subcommand"""
2045
2046 recipes = ""
2047
2048 if args.recipename:
2049 if args.all:
2050 raise DevtoolError("Recipe cannot be specified if -a/--all is used")
2051 else:
2052 for recipe in args.recipename:
2053 check_workspace_recipe(workspace, recipe, checksrc=False)
2054 elif not args.all:
2055 raise DevtoolError("Recipe must be specified, or specify -a/--all to "
2056 "reset all recipes")
2057 if args.all:
2058 recipes = list(workspace.keys())
2059 else:
2060 recipes = args.recipename
2061
2062 _reset(recipes, args.no_clean, args.remove_work, config, basepath, workspace)
2063
2064 return 0
2065
2066
2067def _get_layer(layername, d):
2068 """Determine the base layer path for the specified layer name/path"""
2069 layerdirs = d.getVar('BBLAYERS').split()
2070 layers = {} # {basename: layer_paths}
2071 for p in layerdirs:
2072 bn = os.path.basename(p)
2073 if bn not in layers:
2074 layers[bn] = [p]
2075 else:
2076 layers[bn].append(p)
2077 # Provide some shortcuts
2078 if layername.lower() in ['oe-core', 'openembedded-core']:
2079 layername = 'meta'
2080 layer_paths = layers.get(layername, None)
2081 if not layer_paths:
2082 return os.path.abspath(layername)
2083 elif len(layer_paths) == 1:
2084 return os.path.abspath(layer_paths[0])
2085 else:
2086 # multiple layers having the same base name
2087 logger.warning("Multiple layers have the same base name '%s', use the first one '%s'." % (layername, layer_paths[0]))
2088 logger.warning("Consider using path instead of base name to specify layer:\n\t\t%s" % '\n\t\t'.join(layer_paths))
2089 return os.path.abspath(layer_paths[0])
2090
2091
2092def finish(args, config, basepath, workspace):
2093 """Entry point for the devtool 'finish' subcommand"""
2094 import bb
2095 import oe.recipeutils
2096
2097 check_workspace_recipe(workspace, args.recipename)
2098
2099 dry_run_suffix = ' (dry-run)' if args.dry_run else ''
2100
2101 # Grab the equivalent of COREBASE without having to initialise tinfoil
2102 corebasedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
2103
2104 srctree = workspace[args.recipename]['srctree']
2105 check_git_repo_op(srctree, [corebasedir])
2106 dirty = check_git_repo_dirty(srctree)
2107 if dirty:
2108 if args.force:
2109 logger.warning('Source tree is not clean, continuing as requested by -f/--force')
2110 else:
2111 raise DevtoolError('Source tree is not clean:\n\n%s\nEnsure you have committed your changes or use -f/--force if you are sure there\'s nothing that needs to be committed' % dirty)
2112
2113 no_clean = args.no_clean
2114 remove_work=args.remove_work
2115 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
2116 try:
2117 rd = parse_recipe(config, tinfoil, args.recipename, True)
2118 if not rd:
2119 return 1
2120
2121 destlayerdir = _get_layer(args.destination, tinfoil.config_data)
2122 recipefile = rd.getVar('FILE')
2123 recipedir = os.path.dirname(recipefile)
2124 origlayerdir = oe.recipeutils.find_layerdir(recipefile)
2125
2126 if not os.path.isdir(destlayerdir):
2127 raise DevtoolError('Unable to find layer or directory matching "%s"' % args.destination)
2128
2129 if os.path.abspath(destlayerdir) == config.workspace_path:
2130 raise DevtoolError('"%s" specifies the workspace layer - that is not a valid destination' % args.destination)
2131
2132 # If it's an upgrade, grab the original path
2133 origpath = None
2134 origfilelist = None
2135 append = workspace[args.recipename]['bbappend']
2136 with open(append, 'r') as f:
2137 for line in f:
2138 if line.startswith('# original_path:'):
2139 origpath = line.split(':')[1].strip()
2140 elif line.startswith('# original_files:'):
2141 origfilelist = line.split(':')[1].split()
2142
2143 destlayerbasedir = oe.recipeutils.find_layerdir(destlayerdir)
2144
2145 if origlayerdir == config.workspace_path:
2146 # Recipe file itself is in workspace, update it there first
2147 appendlayerdir = None
2148 origrelpath = None
2149 if origpath:
2150 origlayerpath = oe.recipeutils.find_layerdir(origpath)
2151 if origlayerpath:
2152 origrelpath = os.path.relpath(origpath, origlayerpath)
2153 destpath = oe.recipeutils.get_bbfile_path(rd, destlayerdir, origrelpath)
2154 if not destpath:
2155 raise DevtoolError("Unable to determine destination layer path - check that %s specifies an actual layer and %s/conf/layer.conf specifies BBFILES. You may also need to specify a more complete path." % (args.destination, destlayerdir))
2156 # Warn if the layer isn't in bblayers.conf (the code to create a bbappend will do this in other cases)
2157 layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
2158 if not os.path.abspath(destlayerbasedir) in layerdirs:
2159 bb.warn('Specified destination layer is not currently enabled in bblayers.conf, so the %s recipe will now be unavailable in your current configuration until you add the layer there' % args.recipename)
2160
2161 elif destlayerdir == origlayerdir:
2162 # Same layer, update the original recipe
2163 appendlayerdir = None
2164 destpath = None
2165 else:
2166 # Create/update a bbappend in the specified layer
2167 appendlayerdir = destlayerdir
2168 destpath = None
2169
2170 # Actually update the recipe / bbappend
2171 removing_original = (origpath and origfilelist and oe.recipeutils.find_layerdir(origpath) == destlayerbasedir)
2172 dry_run_output = None
2173 dry_run_outdir = None
2174 if args.dry_run:
2175 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
2176 dry_run_outdir = dry_run_output.name
2177 updated, appendfile, removed = _update_recipe(args.recipename, workspace, rd, args.mode, appendlayerdir, wildcard_version=True, no_remove=False, no_report_remove=removing_original, initial_rev=args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
2178 removed = [os.path.relpath(pth, recipedir) for pth in removed]
2179
2180 # Remove any old files in the case of an upgrade
2181 if removing_original:
2182 for fn in origfilelist:
2183 fnp = os.path.join(origpath, fn)
2184 if fn in removed or not os.path.exists(os.path.join(recipedir, fn)):
2185 logger.info('Removing file %s%s' % (fnp, dry_run_suffix))
2186 if not args.dry_run:
2187 try:
2188 os.remove(fnp)
2189 except FileNotFoundError:
2190 pass
2191
2192 if origlayerdir == config.workspace_path and destpath:
2193 # Recipe file itself is in the workspace - need to move it and any
2194 # associated files to the specified layer
2195 no_clean = True
2196 logger.info('Moving recipe file to %s%s' % (destpath, dry_run_suffix))
2197 for root, _, files in os.walk(recipedir):
2198 for fn in files:
2199 srcpath = os.path.join(root, fn)
2200 relpth = os.path.relpath(os.path.dirname(srcpath), recipedir)
2201 destdir = os.path.abspath(os.path.join(destpath, relpth))
2202 destfp = os.path.join(destdir, fn)
2203 _move_file(srcpath, destfp, dry_run_outdir=dry_run_outdir, base_outdir=destpath)
2204
2205 if dry_run_outdir:
2206 import difflib
2207 comparelist = []
2208 for root, _, files in os.walk(dry_run_outdir):
2209 for fn in files:
2210 outf = os.path.join(root, fn)
2211 relf = os.path.relpath(outf, dry_run_outdir)
2212 logger.debug('dry-run: output file %s' % relf)
2213 if fn.endswith('.bb'):
2214 if origfilelist and origpath and destpath:
2215 # Need to match this up with the pre-upgrade recipe file
2216 for origf in origfilelist:
2217 if origf.endswith('.bb'):
2218 comparelist.append((os.path.abspath(os.path.join(origpath, origf)),
2219 outf,
2220 os.path.abspath(os.path.join(destpath, relf))))
2221 break
2222 else:
2223 # Compare to the existing recipe
2224 comparelist.append((recipefile, outf, recipefile))
2225 elif fn.endswith('.bbappend'):
2226 if appendfile:
2227 if os.path.exists(appendfile):
2228 comparelist.append((appendfile, outf, appendfile))
2229 else:
2230 comparelist.append((None, outf, appendfile))
2231 else:
2232 if destpath:
2233 recipedest = destpath
2234 elif appendfile:
2235 recipedest = os.path.dirname(appendfile)
2236 else:
2237 recipedest = os.path.dirname(recipefile)
2238 destfp = os.path.join(recipedest, relf)
2239 if os.path.exists(destfp):
2240 comparelist.append((destfp, outf, destfp))
2241 output = ''
2242 for oldfile, newfile, newfileshow in comparelist:
2243 if oldfile:
2244 with open(oldfile, 'r') as f:
2245 oldlines = f.readlines()
2246 else:
2247 oldfile = '/dev/null'
2248 oldlines = []
2249 with open(newfile, 'r') as f:
2250 newlines = f.readlines()
2251 if not newfileshow:
2252 newfileshow = newfile
2253 diff = difflib.unified_diff(oldlines, newlines, oldfile, newfileshow)
2254 difflines = list(diff)
2255 if difflines:
2256 output += ''.join(difflines)
2257 if output:
2258 logger.info('Diff of changed files:\n%s' % output)
2259 finally:
2260 tinfoil.shutdown()
2261
2262 # Everything else has succeeded, we can now reset
2263 if args.dry_run:
2264 logger.info('Resetting recipe (dry-run)')
2265 else:
2266 _reset([args.recipename], no_clean=no_clean, remove_work=remove_work, config=config, basepath=basepath, workspace=workspace)
2267
2268 return 0
2269
2270
2271def get_default_srctree(config, recipename=''):
2272 """Get the default srctree path"""
2273 srctreeparent = config.get('General', 'default_source_parent_dir', config.workspace_path)
2274 if recipename:
2275 return os.path.join(srctreeparent, 'sources', recipename)
2276 else:
2277 return os.path.join(srctreeparent, 'sources')
2278
2279def register_commands(subparsers, context):
2280 """Register devtool subcommands from this plugin"""
2281
2282 defsrctree = get_default_srctree(context.config)
2283 parser_add = subparsers.add_parser('add', help='Add a new recipe',
2284 description='Adds a new recipe to the workspace to build a specified source tree. Can optionally fetch a remote URI and unpack it to create the source tree.',
2285 group='starting', order=100)
2286 parser_add.add_argument('recipename', nargs='?', help='Name for new recipe to add (just name - no version, path or extension). If not specified, will attempt to auto-detect it.')
2287 parser_add.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
2288 parser_add.add_argument('fetchuri', nargs='?', help='Fetch the specified URI and extract it to create the source tree')
2289 group = parser_add.add_mutually_exclusive_group()
2290 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
2291 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
2292 parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
2293 parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
2294 parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true")
2295 parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
2296 parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
2297 group = parser_add.add_mutually_exclusive_group()
2298 group.add_argument('--srcrev', '-S', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
2299 group.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
2300 parser_add.add_argument('--srcbranch', '-B', help='Branch in source repository if fetching from an SCM such as git (default master)')
2301 parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure). Useful with binary packages e.g. RPMs.', action='store_true')
2302 parser_add.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
2303 parser_add.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
2304 parser_add.add_argument('--mirrors', help='Enable PREMIRRORS and MIRRORS for source tree fetching (disable by default).', action="store_true")
2305 parser_add.add_argument('--provides', '-p', help='Specify an alias for the item provided by the recipe. E.g. virtual/libgl')
2306 parser_add.set_defaults(func=add, fixed_setup=context.fixed_setup)
2307
2308 parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
2309 description='Sets up the build environment to modify the source for an existing recipe. The default behaviour is to extract the source being fetched by the recipe into a git tree so you can work on it; alternatively if you already have your own pre-prepared source tree you can specify -n/--no-extract.',
2310 group='starting', order=90)
2311 parser_modify.add_argument('recipename', help='Name of existing recipe to edit (just name - no version, path or extension)')
2312 parser_modify.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
2313 parser_modify.add_argument('--wildcard', '-w', action="store_true", help='Use wildcard for unversioned bbappend')
2314 group = parser_modify.add_mutually_exclusive_group()
2315 group.add_argument('--extract', '-x', action="store_true", help='Extract source for recipe (default)')
2316 group.add_argument('--no-extract', '-n', action="store_true", help='Do not extract source, expect it to exist')
2317 group = parser_modify.add_mutually_exclusive_group()
2318 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
2319 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
2320 parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
2321 parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2322 parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
2323 parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe')
2324 parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup)
2325
2326 parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
2327 description='Extracts the source for an existing recipe',
2328 group='advanced')
2329 parser_extract.add_argument('recipename', help='Name of recipe to extract the source for')
2330 parser_extract.add_argument('srctree', help='Path to where to extract the source tree')
2331 parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (default "%(default)s")')
2332 parser_extract.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
2333 parser_extract.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
2334 parser_extract.set_defaults(func=extract, fixed_setup=context.fixed_setup)
2335
2336 parser_sync = subparsers.add_parser('sync', help='Synchronize the source tree for an existing recipe',
2337 description='Synchronize the previously extracted source tree for an existing recipe',
2338 formatter_class=argparse.ArgumentDefaultsHelpFormatter,
2339 group='advanced')
2340 parser_sync.add_argument('recipename', help='Name of recipe to sync the source for')
2341 parser_sync.add_argument('srctree', help='Path to the source tree')
2342 parser_sync.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
2343 parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
2344 parser_sync.set_defaults(func=sync, fixed_setup=context.fixed_setup)
2345
2346 parser_rename = subparsers.add_parser('rename', help='Rename a recipe file in the workspace',
2347 description='Renames the recipe file for a recipe in the workspace, changing the name or version part or both, ensuring that all references within the workspace are updated at the same time. Only works when the recipe file itself is in the workspace, e.g. after devtool add. Particularly useful when devtool add did not automatically determine the correct name.',
2348 group='working', order=10)
2349 parser_rename.add_argument('recipename', help='Current name of recipe to rename')
2350 parser_rename.add_argument('newname', nargs='?', help='New name for recipe (optional, not needed if you only want to change the version)')
2351 parser_rename.add_argument('--version', '-V', help='Change the version (NOTE: this does not change the version fetched by the recipe, just the version in the recipe file name)')
2352 parser_rename.add_argument('--no-srctree', '-s', action='store_true', help='Do not rename the source tree directory (if the default source tree path has been used) - keeping the old name may be desirable if there are internal/other external references to this path')
2353 parser_rename.set_defaults(func=rename)
2354
2355 parser_update_recipe = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe',
2356 description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV). Note that these changes need to have been committed to the git repository in order to be recognised.',
2357 group='working', order=-90)
2358 parser_update_recipe.add_argument('recipename', help='Name of recipe to update')
2359 parser_update_recipe.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
2360 parser_update_recipe.add_argument('--initial-rev', help='Override starting revision for patches')
2361 parser_update_recipe.add_argument('--append', '-a', help='Write changes to a bbappend in the specified layer instead of the recipe', metavar='LAYERDIR')
2362 parser_update_recipe.add_argument('--wildcard-version', '-w', help='In conjunction with -a/--append, use a wildcard to make the bbappend apply to any recipe version', action='store_true')
2363 parser_update_recipe.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
2364 parser_update_recipe.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
2365 parser_update_recipe.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
2366 parser_update_recipe.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
2367 parser_update_recipe.set_defaults(func=update_recipe)
2368
2369 parser_status = subparsers.add_parser('status', help='Show workspace status',
2370 description='Lists recipes currently in your workspace and the paths to their respective external source trees',
2371 group='info', order=100)
2372 parser_status.set_defaults(func=status)
2373
2374 parser_reset = subparsers.add_parser('reset', help='Remove a recipe from your workspace',
2375 description='Removes the specified recipe(s) from your workspace (resetting its state back to that defined by the metadata).',
2376 group='working', order=-100)
2377 parser_reset.add_argument('recipename', nargs='*', help='Recipe to reset')
2378 parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
2379 parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
2380 parser_reset.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory along with append')
2381 parser_reset.set_defaults(func=reset)
2382
2383 parser_finish = subparsers.add_parser('finish', help='Finish working on a recipe in your workspace',
2384 description='Pushes any committed changes to the specified recipe to the specified layer and removes it from your workspace. Roughly equivalent to an update-recipe followed by reset, except the update-recipe step will do the "right thing" depending on the recipe and the destination layer specified. Note that your changes must have been committed to the git repository in order to be recognised.',
2385 group='working', order=-100)
2386 parser_finish.add_argument('recipename', help='Recipe to finish')
2387 parser_finish.add_argument('destination', help='Layer/path to put recipe into. Can be the name of a layer configured in your bblayers.conf, the path to the base of a layer, or a partial path inside a layer. %(prog)s will attempt to complete the path based on the layer\'s structure.')
2388 parser_finish.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
2389 parser_finish.add_argument('--initial-rev', help='Override starting revision for patches')
2390 parser_finish.add_argument('--force', '-f', action="store_true", help='Force continuing even if there are uncommitted changes in the source tree repository')
2391 parser_finish.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory under workspace')
2392 parser_finish.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
2393 parser_finish.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
2394 parser_finish.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
2395 parser_finish.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
2396 parser_finish.set_defaults(func=finish)
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
deleted file mode 100644
index 0dace1fb24..0000000000
--- a/scripts/lib/devtool/upgrade.py
+++ /dev/null
@@ -1,715 +0,0 @@
1# Development tool - upgrade command plugin
2#
3# Copyright (C) 2014-2017 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7"""Devtool upgrade plugin"""
8
9import os
10import sys
11import re
12import shutil
13import tempfile
14import logging
15import argparse
16import scriptutils
17import errno
18import bb
19
20devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib'
21sys.path = sys.path + [devtool_path]
22
23import oe.recipeutils
24from devtool import standard
25from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs, check_prerelease_version
26
27logger = logging.getLogger('devtool')
28
29def _run(cmd, cwd=''):
30 logger.debug("Running command %s> %s" % (cwd,cmd))
31 return bb.process.run('%s' % cmd, cwd=cwd)
32
33def _get_srctree(tmpdir):
34 srctree = tmpdir
35 dirs = os.listdir(tmpdir)
36 if len(dirs) == 1:
37 srctree = os.path.join(tmpdir, dirs[0])
38 else:
39 raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
40 return srctree
41
42def _copy_source_code(orig, dest):
43 for path in standard._ls_tree(orig):
44 dest_dir = os.path.join(dest, os.path.dirname(path))
45 bb.utils.mkdirhier(dest_dir)
46 dest_path = os.path.join(dest, path)
47 shutil.move(os.path.join(orig, path), dest_path)
48
49def _remove_patch_dirs(recipefolder):
50 for root, dirs, files in os.walk(recipefolder):
51 for d in dirs:
52 shutil.rmtree(os.path.join(root,d))
53
54def _recipe_contains(rd, var):
55 rf = rd.getVar('FILE')
56 varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
57 for var, fn in varfiles.items():
58 if fn and fn.startswith(os.path.dirname(rf) + os.sep):
59 return True
60 return False
61
62def _rename_recipe_dirs(oldpv, newpv, path):
63 for root, dirs, files in os.walk(path):
64 # Rename directories with the version in their name
65 for olddir in dirs:
66 if olddir.find(oldpv) != -1:
67 newdir = olddir.replace(oldpv, newpv)
68 if olddir != newdir:
69 shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
70 # Rename any inc files with the version in their name (unusual, but possible)
71 for oldfile in files:
72 if oldfile.endswith('.inc'):
73 if oldfile.find(oldpv) != -1:
74 newfile = oldfile.replace(oldpv, newpv)
75 if oldfile != newfile:
76 bb.utils.rename(os.path.join(path, oldfile),
77 os.path.join(path, newfile))
78
79def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path):
80 oldrecipe = os.path.basename(oldrecipe)
81 if oldrecipe.endswith('_%s.bb' % oldpv):
82 newrecipe = '%s_%s.bb' % (pn, newpv)
83 if oldrecipe != newrecipe:
84 shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
85 else:
86 newrecipe = oldrecipe
87 return os.path.join(path, newrecipe)
88
89def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path):
90 _rename_recipe_dirs(oldpv, newpv, path)
91 return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path)
92
93def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
94 """Writes an append file"""
95 if not os.path.exists(rc):
96 raise DevtoolError("bbappend not created because %s does not exist" % rc)
97
98 appendpath = os.path.join(workspace, 'appends')
99 if not os.path.exists(appendpath):
100 bb.utils.mkdirhier(appendpath)
101
102 brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
103
104 srctree = os.path.abspath(srctree)
105 pn = d.getVar('PN')
106 af = os.path.join(appendpath, '%s.bbappend' % brf)
107 with open(af, 'w') as f:
108 f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
109 # Local files can be modified/tracked in separate subdir under srctree
110 # Mostly useful for packages with S != WORKDIR
111 f.write('FILESPATH:prepend := "%s:"\n' %
112 os.path.join(srctreebase, 'oe-local-files'))
113 f.write('# srctreebase: %s\n' % srctreebase)
114 f.write('inherit externalsrc\n')
115 f.write(('# NOTE: We use pn- overrides here to avoid affecting'
116 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
117 f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
118 b_is_s = use_external_build(same_dir, no_same_dir, d)
119 if b_is_s:
120 f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
121 f.write('\n')
122 if revs:
123 for name, rev in revs.items():
124 f.write('# initial_rev %s: %s\n' % (name, rev))
125 if copied:
126 f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
127 f.write('# original_files: %s\n' % ' '.join(copied))
128 return af
129
130def _cleanup_on_error(rd, srctree):
131 if os.path.exists(rd):
132 shutil.rmtree(rd)
133 srctree = os.path.abspath(srctree)
134 if os.path.exists(srctree):
135 shutil.rmtree(srctree)
136
137def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
138 if not keep_failure:
139 _cleanup_on_error(rd, srctree)
140 logger.error(e)
141 if extramsg:
142 logger.error(extramsg)
143 if keep_failure:
144 logger.info('Preserving failed upgrade files (--keep-failure)')
145 sys.exit(1)
146
147def _get_uri(rd):
148 srcuris = rd.getVar('SRC_URI').split()
149 if not len(srcuris):
150 raise DevtoolError('SRC_URI not found on recipe')
151 # Get first non-local entry in SRC_URI - usually by convention it's
152 # the first entry, but not always!
153 srcuri = None
154 for entry in srcuris:
155 if not entry.startswith('file://'):
156 srcuri = entry
157 break
158 if not srcuri:
159 raise DevtoolError('Unable to find non-local entry in SRC_URI')
160 srcrev = '${AUTOREV}'
161 if '://' in srcuri:
162 # Fetch a URL
163 rev_re = re.compile(';rev=([^;]+)')
164 res = rev_re.search(srcuri)
165 if res:
166 srcrev = res.group(1)
167 srcuri = rev_re.sub('', srcuri)
168 return srcuri, srcrev
169
170def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
171 """Extract sources of a recipe with a new version"""
172 import oe.patch
173
174 def __run(cmd):
175 """Simple wrapper which calls _run with srctree as cwd"""
176 return _run(cmd, srctree)
177
178 crd = rd.createCopy()
179
180 pv = crd.getVar('PV')
181 crd.setVar('PV', newpv)
182
183 tmpsrctree = None
184 uri, rev = _get_uri(crd)
185 if srcrev:
186 rev = srcrev
187 paths = [srctree]
188 if uri.startswith('git://') or uri.startswith('gitsm://'):
189 __run('git fetch')
190 __run('git checkout %s' % rev)
191 __run('git tag -f --no-sign devtool-base-new')
192 __run('git submodule update --recursive')
193 __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'')
194 (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
195 paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
196 checksums = {}
197 _, _, _, _, _, params = bb.fetch2.decodeurl(uri)
198 srcsubdir_rel = params.get('destsuffix', 'git')
199 if not srcbranch:
200 check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
201 get_branch = [x.strip() for x in check_branch.splitlines()]
202 # Remove HEAD reference point and drop remote prefix
203 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
204 if len(get_branch) == 1:
205 # If srcrev is on only ONE branch, then use that branch
206 srcbranch = get_branch[0]
207 elif 'main' in get_branch:
208 # If srcrev is on multiple branches, then choose 'main' if it is one of them
209 srcbranch = 'main'
210 elif 'master' in get_branch:
211 # Otherwise choose 'master' if it is one of the branches
212 srcbranch = 'master'
213 else:
214 # If get_branch contains more than one objects, then display error and exit.
215 mbrch = '\n ' + '\n '.join(get_branch)
216 raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
217 else:
218 __run('git checkout devtool-base -b devtool-%s' % newpv)
219
220 tmpdir = tempfile.mkdtemp(prefix='devtool')
221 try:
222 checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
223 except scriptutils.FetchUrlFailure as e:
224 raise DevtoolError(e)
225
226 if ftmpdir and keep_temp:
227 logger.info('Fetch temp directory is %s' % ftmpdir)
228
229 tmpsrctree = _get_srctree(tmpdir)
230 srctree = os.path.abspath(srctree)
231 srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
232
233 # Delete all sources so we ensure no stray files are left over
234 for item in os.listdir(srctree):
235 if item in ['.git', 'oe-local-files']:
236 continue
237 itempath = os.path.join(srctree, item)
238 if os.path.isdir(itempath):
239 shutil.rmtree(itempath)
240 else:
241 os.remove(itempath)
242
243 # Copy in new ones
244 _copy_source_code(tmpsrctree, srctree)
245
246 (stdout,_) = __run('git ls-files --modified --others')
247 filelist = stdout.splitlines()
248 pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist))
249 pbar.start()
250 batchsize = 100
251 for i in range(0, len(filelist), batchsize):
252 batch = filelist[i:i+batchsize]
253 __run('git add -f -A %s' % ' '.join(['"%s"' % item for item in batch]))
254 pbar.update(i)
255 pbar.finish()
256
257 useroptions = []
258 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
259 __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
260 __run('git tag -f --no-sign devtool-base-%s' % newpv)
261
262 revs = {}
263 for path in paths:
264 (stdout, _) = _run('git rev-parse HEAD', cwd=path)
265 revs[os.path.relpath(path, srctree)] = stdout.rstrip()
266
267 if no_patch:
268 patches = oe.recipeutils.get_recipe_patches(crd)
269 if patches:
270 logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
271 else:
272 for path in paths:
273 _run('git checkout devtool-patched -b %s' % branch, cwd=path)
274 (stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
275 branches_to_rebase = [branch] + stdout.split()
276 target_branch = revs[os.path.relpath(path, srctree)]
277
278 # There is a bug (or feature?) in git rebase where if a commit with
279 # a note is fully rebased away by being part of an old commit, the
280 # note is still attached to the old commit. Avoid this by making
281 # sure all old devtool related commits have a note attached to them
282 # (this assumes git config notes.rewriteMode is set to ignore).
283 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
284 for rev in stdout.splitlines():
285 if not oe.patch.GitApplyTree.getNotes(path, rev):
286 oe.patch.GitApplyTree.addNote(path, rev, "dummy")
287
288 for b in branches_to_rebase:
289 logger.info("Rebasing {} onto {}".format(b, target_branch))
290 _run('git checkout %s' % b, cwd=path)
291 try:
292 _run('git rebase %s' % target_branch, cwd=path)
293 except bb.process.ExecutionError as e:
294 if 'conflict' in e.stdout:
295 logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
296 _run('git rebase --abort', cwd=path)
297 else:
298 logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
299
300 # Remove any dummy notes added above.
301 (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
302 for rev in stdout.splitlines():
303 oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
304
305 _run('git checkout %s' % branch, cwd=path)
306
307 if tmpsrctree:
308 if keep_temp:
309 logger.info('Preserving temporary directory %s' % tmpsrctree)
310 else:
311 shutil.rmtree(tmpsrctree)
312 if tmpdir != tmpsrctree:
313 shutil.rmtree(tmpdir)
314
315 return (revs, checksums, srcbranch, srcsubdir_rel)
316
317def _add_license_diff_to_recipe(path, diff):
318 notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
319# The following is the difference between the old and the new license text.
320# Please update the LICENSE value if needed, and summarize the changes in
321# the commit message via 'License-Update:' tag.
322# (example: 'License-Update: copyright years updated.')
323#
324# The changes:
325#
326"""
327 commented_diff = "\n".join(["# {}".format(l) for l in diff.split('\n')])
328 with open(path, 'rb') as f:
329 orig_content = f.read()
330 with open(path, 'wb') as f:
331 f.write(notice_text.encode())
332 f.write(commented_diff.encode())
333 f.write("\n#\n\n".encode())
334 f.write(orig_content)
335
336def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
337 """Creates the new recipe under workspace"""
338
339 pn = rd.getVar('PN')
340 path = os.path.join(workspace, 'recipes', pn)
341 bb.utils.mkdirhier(path)
342 copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
343 if not copied:
344 raise DevtoolError('Internal error - no files were copied for recipe %s' % pn)
345 logger.debug('Copied %s to %s' % (copied, path))
346
347 oldpv = rd.getVar('PV')
348 if not newpv:
349 newpv = oldpv
350 origpath = rd.getVar('FILE')
351 fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path)
352 logger.debug('Upgraded %s => %s' % (origpath, fullpath))
353
354 newvalues = {}
355 if _recipe_contains(rd, 'PV') and newpv != oldpv:
356 newvalues['PV'] = newpv
357
358 if srcrev:
359 newvalues['SRCREV'] = srcrev
360
361 if srcbranch:
362 src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
363 changed = False
364 replacing = True
365 new_src_uri = []
366 for entry in src_uri:
367 try:
368 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
369 except bb.fetch2.MalformedUrl as e:
370 raise DevtoolError("Could not decode SRC_URI: {}".format(e))
371 if replacing and scheme in ['git', 'gitsm']:
372 branch = params.get('branch', 'master')
373 if rd.expand(branch) != srcbranch:
374 # Handle case where branch is set through a variable
375 res = re.match(r'\$\{([^}@]+)\}', branch)
376 if res:
377 newvalues[res.group(1)] = srcbranch
378 # We know we won't change SRC_URI now, so break out
379 break
380 else:
381 params['branch'] = srcbranch
382 entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
383 changed = True
384 replacing = False
385 new_src_uri.append(entry)
386 if changed:
387 newvalues['SRC_URI'] = ' '.join(new_src_uri)
388
389 newvalues['PR'] = None
390
391 # Work out which SRC_URI entries have changed in case the entry uses a name
392 crd = rd.createCopy()
393 crd.setVar('PV', newpv)
394 for var, value in newvalues.items():
395 crd.setVar(var, value)
396 old_src_uri = (rd.getVar('SRC_URI') or '').split()
397 new_src_uri = (crd.getVar('SRC_URI') or '').split()
398 newnames = []
399 addnames = []
400 for newentry in new_src_uri:
401 _, _, _, _, _, params = bb.fetch2.decodeurl(newentry)
402 if 'name' in params:
403 newnames.append(params['name'])
404 if newentry not in old_src_uri:
405 addnames.append(params['name'])
406 # Find what's been set in the original recipe
407 oldnames = []
408 oldsums = []
409 noname = False
410 for varflag in rd.getVarFlags('SRC_URI'):
411 for checksum in checksums:
412 if varflag.endswith('.' + checksum):
413 name = varflag.rsplit('.', 1)[0]
414 if name not in oldnames:
415 oldnames.append(name)
416 oldsums.append(checksum)
417 elif varflag == checksum:
418 noname = True
419 oldsums.append(checksum)
420 # Even if SRC_URI has named entries it doesn't have to actually use the name
421 if noname and addnames and addnames[0] not in oldnames:
422 addnames = []
423 # Drop any old names (the name actually might include ${PV})
424 for name in oldnames:
425 if name not in newnames:
426 for checksum in oldsums:
427 newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
428
429 nameprefix = '%s.' % addnames[0] if addnames else ''
430
431 # md5sum is deprecated, remove any traces of it. If it was the only old
432 # checksum, then replace it with the default checksums.
433 if 'md5sum' in oldsums:
434 newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
435 oldsums.remove('md5sum')
436 if not oldsums:
437 oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
438
439 for checksum in oldsums:
440 newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
441
442 if srcsubdir_new != srcsubdir_old:
443 s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
444 s_subdir_new = os.path.relpath(os.path.abspath(crd.getVar('S')), crd.getVar('WORKDIR'))
445 if srcsubdir_old == s_subdir_old and srcsubdir_new != s_subdir_new:
446 # Subdir for old extracted source matches what S points to (it should!)
447 # but subdir for new extracted source doesn't match what S will be
448 newvalues['S'] = '${WORKDIR}/%s' % srcsubdir_new.replace(newpv, '${PV}')
449 if crd.expand(newvalues['S']) == crd.expand('${WORKDIR}/${BP}'):
450 # It's the default, drop it
451 # FIXME what if S is being set in a .inc?
452 newvalues['S'] = None
453 logger.info('Source subdirectory has changed, dropping S value since it now matches the default ("${WORKDIR}/${BP}")')
454 else:
455 logger.info('Source subdirectory has changed, updating S value')
456
457 if license_diff:
458 newlicchksum = " ".join(["file://{}".format(l['path']) +
459 (";beginline={}".format(l['beginline']) if l['beginline'] else "") +
460 (";endline={}".format(l['endline']) if l['endline'] else "") +
461 (";md5={}".format(l['actual_md5'])) for l in new_licenses])
462 newvalues["LIC_FILES_CHKSUM"] = newlicchksum
463 _add_license_diff_to_recipe(fullpath, license_diff)
464
465 tinfoil.modified_files()
466 try:
467 rd = tinfoil.parse_recipe_file(fullpath, False)
468 except bb.tinfoil.TinfoilCommandFailed as e:
469 _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
470 oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
471
472 return fullpath, copied
473
474
475def _check_git_config():
476 def getconfig(name):
477 try:
478 value = bb.process.run('git config %s' % name)[0].strip()
479 except bb.process.ExecutionError as e:
480 if e.exitcode == 1:
481 value = None
482 else:
483 raise
484 return value
485
486 username = getconfig('user.name')
487 useremail = getconfig('user.email')
488 configerr = []
489 if not username:
490 configerr.append('Please set your name using:\n git config --global user.name')
491 if not useremail:
492 configerr.append('Please set your email using:\n git config --global user.email')
493 if configerr:
494 raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
495
496def _extract_licenses(srcpath, recipe_licenses):
497 licenses = []
498 for url in recipe_licenses.split():
499 license = {}
500 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
501 license['path'] = path
502 license['md5'] = parm.get('md5', '')
503 license['beginline'], license['endline'] = 0, 0
504 if 'beginline' in parm:
505 license['beginline'] = int(parm['beginline'])
506 if 'endline' in parm:
507 license['endline'] = int(parm['endline'])
508 license['text'] = []
509 with open(os.path.join(srcpath, path), 'rb') as f:
510 import hashlib
511 actual_md5 = hashlib.md5()
512 lineno = 0
513 for line in f:
514 lineno += 1
515 if (lineno >= license['beginline']) and ((lineno <= license['endline']) or not license['endline']):
516 license['text'].append(line.decode(errors='ignore'))
517 actual_md5.update(line)
518 license['actual_md5'] = actual_md5.hexdigest()
519 licenses.append(license)
520 return licenses
521
522def _generate_license_diff(old_licenses, new_licenses):
523 need_diff = False
524 for l in new_licenses:
525 if l['md5'] != l['actual_md5']:
526 need_diff = True
527 break
528 if need_diff == False:
529 return None
530
531 import difflib
532 diff = ''
533 for old, new in zip(old_licenses, new_licenses):
534 for line in difflib.unified_diff(old['text'], new['text'], old['path'], new['path']):
535 diff = diff + line
536 return diff
537
538def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil):
539 tasks = []
540 for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split():
541 logger.info('Running extra recipe upgrade task: %s' % task)
542 res = tinfoil.build_targets(pn, task, handle_events=True)
543
544 if not res:
545 raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn))
546
547def upgrade(args, config, basepath, workspace):
548 """Entry point for the devtool 'upgrade' subcommand"""
549
550 if args.recipename in workspace:
551 raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
552 if args.srcbranch and not args.srcrev:
553 raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
554
555 _check_git_config()
556
557 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
558 try:
559 rd = parse_recipe(config, tinfoil, args.recipename, True)
560 if not rd:
561 return 1
562
563 pn = rd.getVar('PN')
564 if pn != args.recipename:
565 logger.info('Mapping %s to %s' % (args.recipename, pn))
566 if pn in workspace:
567 raise DevtoolError("recipe %s is already in your workspace" % pn)
568
569 if args.srctree:
570 srctree = os.path.abspath(args.srctree)
571 else:
572 srctree = standard.get_default_srctree(config, pn)
573
574 srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
575
576 # try to automatically discover latest version and revision if not provided on command line
577 if not args.version and not args.srcrev:
578 version_info = oe.recipeutils.get_recipe_upstream_version(rd)
579 if version_info['version'] and not version_info['version'].endswith("new-commits-available"):
580 args.version = version_info['version']
581 if version_info['revision']:
582 args.srcrev = version_info['revision']
583 if not args.version and not args.srcrev:
584 raise DevtoolError("Automatic discovery of latest version/revision failed - you must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option.")
585
586 standard._check_compatible_recipe(pn, rd)
587 old_srcrev = rd.getVar('SRCREV')
588 if old_srcrev == 'INVALID':
589 old_srcrev = None
590 if old_srcrev and not args.srcrev:
591 raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
592 old_ver = rd.getVar('PV')
593 if old_ver == args.version and old_srcrev == args.srcrev:
594 raise DevtoolError("Current and upgrade versions are the same version")
595 if args.version:
596 if bb.utils.vercmp_string(args.version, old_ver) < 0:
597 logger.warning('Upgrade version %s compares as less than the current version %s. If you are using a package feed for on-target upgrades or providing this recipe for general consumption, then you should increment PE in the recipe (or if there is no current PE value set, set it to "1")' % (args.version, old_ver))
598 check_prerelease_version(args.version, 'devtool upgrade')
599
600 rf = None
601 license_diff = None
602 try:
603 logger.info('Extracting current version source...')
604 rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
605 old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
606 logger.info('Extracting upgraded version source...')
607 rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
608 args.srcrev, args.srcbranch, args.branch, args.keep_temp,
609 tinfoil, rd)
610 new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
611 license_diff = _generate_license_diff(old_licenses, new_licenses)
612 rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
613 except (bb.process.CmdError, DevtoolError) as e:
614 recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN'))
615 _upgrade_error(e, recipedir, srctree, args.keep_failure)
616 standard._add_md5(config, pn, os.path.dirname(rf))
617
618 af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
619 copied, config.workspace_path, rd)
620 standard._add_md5(config, pn, af)
621
622 _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil)
623
624 update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
625
626 logger.info('Upgraded source extracted to %s' % srctree)
627 logger.info('New recipe is %s' % rf)
628 if license_diff:
629 logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.')
630 preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN'))
631 if preferred_version:
632 logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version)
633 finally:
634 tinfoil.shutdown()
635 return 0
636
637def latest_version(args, config, basepath, workspace):
638 """Entry point for the devtool 'latest_version' subcommand"""
639 tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
640 try:
641 rd = parse_recipe(config, tinfoil, args.recipename, True)
642 if not rd:
643 return 1
644 version_info = oe.recipeutils.get_recipe_upstream_version(rd)
645 # "new-commits-available" is an indication that upstream never issues version tags
646 if not version_info['version'].endswith("new-commits-available"):
647 logger.info("Current version: {}".format(version_info['current_version']))
648 logger.info("Latest version: {}".format(version_info['version']))
649 if version_info['revision']:
650 logger.info("Latest version's commit: {}".format(version_info['revision']))
651 else:
652 logger.info("Latest commit: {}".format(version_info['revision']))
653 finally:
654 tinfoil.shutdown()
655 return 0
656
657def check_upgrade_status(args, config, basepath, workspace):
658 def _print_status(recipe):
659 print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'],
660 recipe['cur_ver'],
661 recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"),
662 recipe['maintainer'],
663 recipe['revision'] if recipe['revision'] != 'N/A' else "",
664 "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else ""))
665 if not args.recipe:
666 logger.info("Checking the upstream status for all recipes may take a few minutes")
667 results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
668 for recipegroup in results:
669 upgrades = [r for r in recipegroup if r['status'] != 'MATCH']
670 currents = [r for r in recipegroup if r['status'] == 'MATCH']
671 if len(upgrades) > 1:
672 print("These recipes need to be upgraded together {")
673 for r in sorted(upgrades, key=lambda r:r['pn']):
674 _print_status(r)
675 if len(upgrades) > 1:
676 print("}")
677 for r in currents:
678 if args.all:
679 _print_status(r)
680
681def register_commands(subparsers, context):
682 """Register devtool subcommands from this plugin"""
683
684 defsrctree = standard.get_default_srctree(context.config)
685
686 parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
687 description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
688 group='starting')
689 parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
690 parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
691 parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV). If omitted, latest upstream version will be determined and used, if possible.')
692 parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (useful when fetching from an SCM such as git)')
693 parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
694 parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
695 parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
696 parser_upgrade.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
697 group = parser_upgrade.add_mutually_exclusive_group()
698 group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
699 group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
700 parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
701 parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
702 parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
703
704 parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
705 description='Queries the upstream server for what the latest upstream release is (for git, tags are checked, for tarballs, a list of them is obtained, and one with the highest version number is reported)',
706 group='info')
707 parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
708 parser_latest_version.set_defaults(func=latest_version)
709
710 parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
711 description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
712 group='info')
713 parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
714 parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
715 parser_check_upgrade_status.set_defaults(func=check_upgrade_status)
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py
deleted file mode 100644
index bf39f71b11..0000000000
--- a/scripts/lib/devtool/utilcmds.py
+++ /dev/null
@@ -1,242 +0,0 @@
1# Development tool - utility commands plugin
2#
3# Copyright (C) 2015-2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8"""Devtool utility plugins"""
9
10import os
11import sys
12import shutil
13import tempfile
14import logging
15import argparse
16import subprocess
17import scriptutils
18from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
19from devtool import parse_recipe
20
21logger = logging.getLogger('devtool')
22
23def _find_recipe_path(args, config, basepath, workspace):
24 if args.any_recipe:
25 logger.warning('-a/--any-recipe option is now always active, and thus the option will be removed in a future release')
26 if args.recipename in workspace:
27 recipefile = workspace[args.recipename]['recipefile']
28 else:
29 recipefile = None
30 if not recipefile:
31 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
32 try:
33 rd = parse_recipe(config, tinfoil, args.recipename, True)
34 if not rd:
35 raise DevtoolError("Failed to find specified recipe")
36 recipefile = rd.getVar('FILE')
37 finally:
38 tinfoil.shutdown()
39 return recipefile
40
41
42def find_recipe(args, config, basepath, workspace):
43 """Entry point for the devtool 'find-recipe' subcommand"""
44 recipefile = _find_recipe_path(args, config, basepath, workspace)
45 print(recipefile)
46 return 0
47
48
49def edit_recipe(args, config, basepath, workspace):
50 """Entry point for the devtool 'edit-recipe' subcommand"""
51 return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
52
53
54def configure_help(args, config, basepath, workspace):
55 """Entry point for the devtool 'configure-help' subcommand"""
56 import oe.utils
57
58 check_workspace_recipe(workspace, args.recipename)
59 tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
60 try:
61 rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
62 if not rd:
63 return 1
64 b = rd.getVar('B')
65 s = rd.getVar('S')
66 configurescript = os.path.join(s, 'configure')
67 confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (bb.build.listtasks(rd))
68 configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
69 extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
70 extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
71 do_configure = rd.getVar('do_configure') or ''
72 do_configure_noexpand = rd.getVar('do_configure', False) or ''
73 packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
74 autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
75 cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
76 cmake_do_configure = rd.getVar('cmake_do_configure')
77 pn = rd.getVar('PN')
78 finally:
79 tinfoil.shutdown()
80
81 if 'doc' in packageconfig:
82 del packageconfig['doc']
83
84 if autotools and not os.path.exists(configurescript):
85 logger.info('Running do_configure to generate configure script')
86 try:
87 stdout, _ = exec_build_env_command(config.init_path, basepath,
88 'bitbake -c configure %s' % args.recipename,
89 stderr=subprocess.STDOUT)
90 except bb.process.ExecutionError:
91 pass
92
93 if confdisabled or do_configure.strip() in ('', ':'):
94 raise DevtoolError("do_configure task has been disabled for this recipe")
95 elif args.no_pager and not os.path.exists(configurescript):
96 raise DevtoolError("No configure script found and no other information to display")
97 else:
98 configopttext = ''
99 if autotools and configureopts:
100 configopttext = '''
101Arguments currently passed to the configure script:
102
103%s
104
105Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
106 if extra_oeconf:
107 configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
108
109%s''' % extra_oeconf
110
111 elif cmake:
112 in_cmake = False
113 cmake_cmd = ''
114 for line in cmake_do_configure.splitlines():
115 if in_cmake:
116 cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
117 if not line.endswith('\\'):
118 break
119 if line.lstrip().startswith('cmake '):
120 cmake_cmd = line.strip().rstrip('\\')
121 if line.endswith('\\'):
122 in_cmake = True
123 else:
124 break
125 if cmake_cmd:
126 configopttext = '''
127The current cmake command line:
128
129%s
130
131Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
132
133%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
134 else:
135 configopttext = '''
136The current implementation of cmake_do_configure:
137
138cmake_do_configure() {
139%s
140}
141
142Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
143
144%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
145
146 elif do_configure:
147 configopttext = '''
148The current implementation of do_configure:
149
150do_configure() {
151%s
152}''' % do_configure.rstrip()
153 if '${EXTRA_OECONF}' in do_configure_noexpand:
154 configopttext += '''
155
156Arguments specified through EXTRA_OECONF (which you can change or add to easily):
157
158%s''' % extra_oeconf
159
160 if packageconfig:
161 configopttext += '''
162
163Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
164
165 if args.arg:
166 helpargs = ' '.join(args.arg)
167 elif cmake:
168 helpargs = '-LH'
169 else:
170 helpargs = '--help'
171
172 msg = '''configure information for %s
173------------------------------------------
174%s''' % (pn, configopttext)
175
176 if cmake:
177 msg += '''
178
179The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
180------------------------------------------''' % (helpargs, pn)
181 elif os.path.exists(configurescript):
182 msg += '''
183
184The ./configure %s output for %s follows.
185------------------------------------------''' % (helpargs, pn)
186
187 olddir = os.getcwd()
188 tmppath = tempfile.mkdtemp()
189 with tempfile.NamedTemporaryFile('w', delete=False) as tf:
190 if not args.no_header:
191 tf.write(msg + '\n')
192 tf.close()
193 try:
194 try:
195 cmd = 'cat %s' % tf.name
196 if cmake:
197 cmd += '; cmake %s %s 2>&1' % (helpargs, s)
198 os.chdir(b)
199 elif os.path.exists(configurescript):
200 cmd += '; %s %s' % (configurescript, helpargs)
201 if sys.stdout.isatty() and not args.no_pager:
202 pager = os.environ.get('PAGER', 'less')
203 cmd = '(%s) | %s' % (cmd, pager)
204 subprocess.check_call(cmd, shell=True)
205 except subprocess.CalledProcessError as e:
206 return e.returncode
207 finally:
208 os.chdir(olddir)
209 shutil.rmtree(tmppath)
210 os.remove(tf.name)
211
212
213def register_commands(subparsers, context):
214 """Register devtool subcommands from this plugin"""
215 parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file',
216 description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
217 group='working')
218 parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
219 # FIXME drop -a at some point in future
220 parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
221 parser_edit_recipe.set_defaults(func=edit_recipe)
222
223 # Find-recipe
224 parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file',
225 description='Finds a recipe file. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
226 group='working')
227 parser_find_recipe.add_argument('recipename', help='Recipe to find')
228 # FIXME drop -a at some point in future
229 parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
230 parser_find_recipe.set_defaults(func=find_recipe)
231
232 # NOTE: Needed to override the usage string here since the default
233 # gets the order wrong - recipename must come before --arg
234 parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
235 usage='devtool configure-help [options] recipename [--arg ...]',
236 description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
237 group='working')
238 parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
239 parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
240 parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
241 parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
242 parser_configure_help.set_defaults(func=configure_help)