summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/contrib/image-manifest523
1 files changed, 523 insertions, 0 deletions
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest
new file mode 100755
index 0000000000..3c07a73a4e
--- /dev/null
+++ b/scripts/contrib/image-manifest
@@ -0,0 +1,523 @@
1#!/usr/bin/env python3
2
3# Script to extract information from image manifests
4#
5# Copyright (C) 2018 Intel Corporation
6# Copyright (C) 2021 Wind River Systems, Inc.
7#
8# SPDX-License-Identifier: GPL-2.0-only
9#
10
11import sys
12import os
13import argparse
14import logging
15import json
16import shutil
17import tempfile
18import tarfile
19from collections import OrderedDict
20
21scripts_path = os.path.dirname(__file__)
22lib_path = scripts_path + '/../lib'
23sys.path = sys.path + [lib_path]
24
25import scriptutils
26logger = scriptutils.logger_create(os.path.basename(__file__))
27
28import argparse_oe
29import scriptpath
30bitbakepath = scriptpath.add_bitbake_lib_path()
31if not bitbakepath:
32 logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
33 sys.exit(1)
34logger.debug('Using standard bitbake path %s' % bitbakepath)
35scriptpath.add_oe_lib_path()
36
37import bb.tinfoil
38import bb.utils
39import oe.utils
40import oe.recipeutils
41
42def get_pkg_list(manifest):
43 pkglist = []
44 with open(manifest, 'r') as f:
45 for line in f:
46 linesplit = line.split()
47 if len(linesplit) == 3:
48 # manifest file
49 pkglist.append(linesplit[0])
50 elif len(linesplit) == 1:
51 # build dependency file
52 pkglist.append(linesplit[0])
53 return sorted(pkglist)
54
55def list_packages(args):
56 pkglist = get_pkg_list(args.manifest)
57 for pkg in pkglist:
58 print('%s' % pkg)
59
60def pkg2recipe(tinfoil, pkg):
61 if "-native" in pkg:
62 logger.info('skipping %s' % pkg)
63 return None
64
65 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
66 pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
67 logger.debug('pkgdatafile %s' % pkgdatafile)
68 try:
69 f = open(pkgdatafile, 'r')
70 for line in f:
71 if line.startswith('PN:'):
72 recipe = line.split(':', 1)[1].strip()
73 return recipe
74 except Exception:
75 logger.warning('%s is missing' % pkgdatafile)
76 return None
77
78def get_recipe_list(manifest, tinfoil):
79 pkglist = get_pkg_list(manifest)
80 recipelist = []
81 for pkg in pkglist:
82 recipe = pkg2recipe(tinfoil,pkg)
83 if recipe:
84 if not recipe in recipelist:
85 recipelist.append(recipe)
86
87 return sorted(recipelist)
88
89def list_recipes(args):
90 import bb.tinfoil
91 with bb.tinfoil.Tinfoil() as tinfoil:
92 tinfoil.logger.setLevel(logger.getEffectiveLevel())
93 tinfoil.prepare(config_only=True)
94 recipelist = get_recipe_list(args.manifest, tinfoil)
95 for recipe in sorted(recipelist):
96 print('%s' % recipe)
97
98def list_layers(args):
99
100 def find_git_repo(pth):
101 checkpth = pth
102 while checkpth != os.sep:
103 if os.path.exists(os.path.join(checkpth, '.git')):
104 return checkpth
105 checkpth = os.path.dirname(checkpth)
106 return None
107
108 def get_git_remote_branch(repodir):
109 try:
110 stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir)
111 except bb.process.ExecutionError as e:
112 stdout = None
113 if stdout:
114 return stdout.strip()
115 else:
116 return None
117
118 def get_git_head_commit(repodir):
119 try:
120 stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir)
121 except bb.process.ExecutionError as e:
122 stdout = None
123 if stdout:
124 return stdout.strip()
125 else:
126 return None
127
128 def get_git_repo_url(repodir, remote='origin'):
129 import bb.process
130 # Try to get upstream repo location from origin remote
131 try:
132 stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir)
133 except bb.process.ExecutionError as e:
134 stdout = None
135 if stdout:
136 for line in stdout.splitlines():
137 splitline = line.split()
138 if len(splitline) > 1:
139 if splitline[0] == remote and scriptutils.is_src_url(splitline[1]):
140 return splitline[1]
141 return None
142
143 with bb.tinfoil.Tinfoil() as tinfoil:
144 tinfoil.logger.setLevel(logger.getEffectiveLevel())
145 tinfoil.prepare(config_only=False)
146 layers = OrderedDict()
147 for layerdir in tinfoil.config_data.getVar('BBLAYERS').split():
148 layerdata = OrderedDict()
149 layername = os.path.basename(layerdir)
150 logger.debug('layername %s, layerdir %s' % (layername, layerdir))
151 if layername in layers:
152 logger.warning('layername %s is not unique in configuration' % layername)
153 layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir)
154 logger.debug('trying layername %s' % layername)
155 if layername in layers:
156 logger.error('Layer name %s is not unique in configuration' % layername)
157 sys.exit(2)
158 repodir = find_git_repo(layerdir)
159 if repodir:
160 remotebranch = get_git_remote_branch(repodir)
161 remote = 'origin'
162 if remotebranch and '/' in remotebranch:
163 rbsplit = remotebranch.split('/', 1)
164 layerdata['actual_branch'] = rbsplit[1]
165 remote = rbsplit[0]
166 layerdata['vcs_url'] = get_git_repo_url(repodir, remote)
167 if os.path.abspath(repodir) != os.path.abspath(layerdir):
168 layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir)
169 commit = get_git_head_commit(repodir)
170 if commit:
171 layerdata['vcs_commit'] = commit
172 layers[layername] = layerdata
173
174 json.dump(layers, args.output, indent=2)
175
176def get_recipe(args):
177 with bb.tinfoil.Tinfoil() as tinfoil:
178 tinfoil.logger.setLevel(logger.getEffectiveLevel())
179 tinfoil.prepare(config_only=True)
180
181 recipe = pkg2recipe(tinfoil, args.package)
182 print(' %s package provided by %s' % (args.package, recipe))
183
184def pkg_dependencies(args):
185 def get_recipe_info(tinfoil, recipe):
186 try:
187 info = tinfoil.get_recipe_info(recipe)
188 except Exception:
189 logger.error('Failed to get recipe info for: %s' % recipe)
190 sys.exit(1)
191 if not info:
192 logger.warning('No recipe info found for: %s' % recipe)
193 sys.exit(1)
194 append_files = tinfoil.get_file_appends(info.fn)
195 appends = True
196 data = tinfoil.parse_recipe_file(info.fn, appends, append_files)
197 data.pn = info.pn
198 data.pv = info.pv
199 return data
200
201 def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order):
202 spaces = ' ' * order
203 data = recipe_info[rn]
204 if args.native:
205 logger.debug('%s- %s' % (spaces, data.pn))
206 elif "-native" not in data.pn:
207 if "cross" not in data.pn:
208 logger.debug('%s- %s' % (spaces, data.pn))
209
210 depends = []
211 for dep in data.depends:
212 if dep not in assume_provided:
213 depends.append(dep)
214
215 # First find all dependencies not in package list.
216 for dep in depends:
217 if dep not in packages:
218 packages.append(dep)
219 dep_data = get_recipe_info(tinfoil, dep)
220 # Do this once now to reduce the number of bitbake calls.
221 dep_data.depends = dep_data.getVar('DEPENDS').split()
222 recipe_info[dep] = dep_data
223
224 # Then recursively analyze all of the dependencies for the current recipe.
225 for dep in depends:
226 find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1)
227
228 with bb.tinfoil.Tinfoil() as tinfoil:
229 tinfoil.logger.setLevel(logger.getEffectiveLevel())
230 tinfoil.prepare()
231
232 assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split()
233 logger.debug('assumed provided:')
234 for ap in sorted(assume_provided):
235 logger.debug(' - %s' % ap)
236
237 recipe = pkg2recipe(tinfoil, args.package)
238 data = get_recipe_info(tinfoil, recipe)
239 data.depends = []
240 depends = data.getVar('DEPENDS').split()
241 for dep in depends:
242 if dep not in assume_provided:
243 data.depends.append(dep)
244
245 recipe_info = dict([(recipe, data)])
246 packages = []
247 find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1)
248
249 print('\nThe following packages are required to build %s' % recipe)
250 for p in sorted(packages):
251 data = recipe_info[p]
252 if "-native" not in data.pn:
253 if "cross" not in data.pn:
254 print(" %s (%s)" % (data.pn,p))
255
256 if args.native:
257 print('\nThe following native packages are required to build %s' % recipe)
258 for p in sorted(packages):
259 data = recipe_info[p]
260 if "-native" in data.pn:
261 print(" %s(%s)" % (data.pn,p))
262 if "cross" in data.pn:
263 print(" %s(%s)" % (data.pn,p))
264
265def default_config():
266 vlist = OrderedDict()
267 vlist['PV'] = 'yes'
268 vlist['SUMMARY'] = 'no'
269 vlist['DESCRIPTION'] = 'no'
270 vlist['SECTION'] = 'no'
271 vlist['LICENSE'] = 'yes'
272 vlist['HOMEPAGE'] = 'no'
273 vlist['BUGTRACKER'] = 'no'
274 vlist['PROVIDES'] = 'no'
275 vlist['BBCLASSEXTEND'] = 'no'
276 vlist['DEPENDS'] = 'no'
277 vlist['PACKAGECONFIG'] = 'no'
278 vlist['SRC_URI'] = 'yes'
279 vlist['SRCREV'] = 'yes'
280 vlist['EXTRA_OECONF'] = 'no'
281 vlist['EXTRA_OESCONS'] = 'no'
282 vlist['EXTRA_OECMAKE'] = 'no'
283 vlist['EXTRA_OEMESON'] = 'no'
284
285 clist = OrderedDict()
286 clist['variables'] = vlist
287 clist['filepath'] = 'no'
288 clist['sha256sum'] = 'no'
289 clist['layerdir'] = 'no'
290 clist['layer'] = 'no'
291 clist['inherits'] = 'no'
292 clist['source_urls'] = 'no'
293 clist['packageconfig_opts'] = 'no'
294 clist['patches'] = 'no'
295 clist['packagedir'] = 'no'
296 return clist
297
298def dump_config(args):
299 config = default_config()
300 f = open('default_config.json', 'w')
301 json.dump(config, f, indent=2)
302 logger.info('Default config list dumped to default_config.json')
303
304def export_manifest_info(args):
305
306 def handle_value(value):
307 if value:
308 return oe.utils.squashspaces(value)
309 else:
310 return value
311
312 if args.config:
313 logger.debug('config: %s' % args.config)
314 f = open(args.config, 'r')
315 config = json.load(f, object_pairs_hook=OrderedDict)
316 else:
317 config = default_config()
318 if logger.isEnabledFor(logging.DEBUG):
319 print('Configuration:')
320 json.dump(config, sys.stdout, indent=2)
321 print('')
322
323 tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-')
324 logger.debug('tmp dir: %s' % tmpoutdir)
325
326 # export manifest
327 shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest"))
328
329 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
330 tinfoil.logger.setLevel(logger.getEffectiveLevel())
331 tinfoil.prepare(config_only=False)
332
333 pkglist = get_pkg_list(args.manifest)
334 # export pkg list
335 f = open(os.path.join(tmpoutdir, "pkgs"), 'w')
336 for pkg in pkglist:
337 f.write('%s\n' % pkg)
338 f.close()
339
340 recipelist = []
341 for pkg in pkglist:
342 recipe = pkg2recipe(tinfoil,pkg)
343 if recipe:
344 if not recipe in recipelist:
345 recipelist.append(recipe)
346 recipelist.sort()
347 # export recipe list
348 f = open(os.path.join(tmpoutdir, "recipes"), 'w')
349 for recipe in recipelist:
350 f.write('%s\n' % recipe)
351 f.close()
352
353 try:
354 rvalues = OrderedDict()
355 for pn in sorted(recipelist):
356 logger.debug('Package: %s' % pn)
357 rd = tinfoil.parse_recipe(pn)
358
359 rvalues[pn] = OrderedDict()
360
361 for varname in config['variables']:
362 if config['variables'][varname] == 'yes':
363 rvalues[pn][varname] = handle_value(rd.getVar(varname))
364
365 fpth = rd.getVar('FILE')
366 layerdir = oe.recipeutils.find_layerdir(fpth)
367 if config['filepath'] == 'yes':
368 rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir)
369 if config['sha256sum'] == 'yes':
370 rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth)
371
372 if config['layerdir'] == 'yes':
373 rvalues[pn]['layerdir'] = layerdir
374
375 if config['layer'] == 'yes':
376 rvalues[pn]['layer'] = os.path.basename(layerdir)
377
378 if config['inherits'] == 'yes':
379 gr = set(tinfoil.config_data.getVar("__inherit_cache") or [])
380 lr = set(rd.getVar("__inherit_cache") or [])
381 rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr})
382
383 if config['source_urls'] == 'yes':
384 rvalues[pn]['source_urls'] = []
385 for url in (rd.getVar('SRC_URI') or '').split():
386 if not url.startswith('file://'):
387 url = url.split(';')[0]
388 rvalues[pn]['source_urls'].append(url)
389
390 if config['packageconfig_opts'] == 'yes':
391 rvalues[pn]['packageconfig_opts'] = OrderedDict()
392 for key in rd.getVarFlags('PACKAGECONFIG').keys():
393 if key == 'doc':
394 continue
395 rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key, True)
396
397 if config['patches'] == 'yes':
398 patches = oe.recipeutils.get_recipe_patches(rd)
399 rvalues[pn]['patches'] = []
400 if patches:
401 recipeoutdir = os.path.join(tmpoutdir, pn, 'patches')
402 bb.utils.mkdirhier(recipeoutdir)
403 for patch in patches:
404 # Patches may be in other layers too
405 patchlayerdir = oe.recipeutils.find_layerdir(patch)
406 # patchlayerdir will be None for remote patches, which we ignore
407 # (since currently they are considered as part of sources)
408 if patchlayerdir:
409 rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir)))
410 shutil.copy(patch, recipeoutdir)
411
412 if config['packagedir'] == 'yes':
413 pn_dir = os.path.join(tmpoutdir, pn)
414 bb.utils.mkdirhier(pn_dir)
415 f = open(os.path.join(pn_dir, 'recipe.json'), 'w')
416 json.dump(rvalues[pn], f, indent=2)
417 f.close()
418
419 with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f:
420 json.dump(rvalues, f, indent=2)
421
422 if args.output:
423 outname = os.path.basename(args.output)
424 else:
425 outname = os.path.splitext(os.path.basename(args.manifest))[0]
426 if outname.endswith('.tar.gz'):
427 outname = outname[:-7]
428 elif outname.endswith('.tgz'):
429 outname = outname[:-4]
430
431 tarfn = outname
432 if tarfn.endswith(os.sep):
433 tarfn = tarfn[:-1]
434 if not tarfn.endswith(('.tar.gz', '.tgz')):
435 tarfn += '.tar.gz'
436 with open(tarfn, 'wb') as f:
437 with tarfile.open(None, "w:gz", f) as tar:
438 tar.add(tmpoutdir, outname)
439 finally:
440 shutil.rmtree(tmpoutdir)
441
442
443def main():
444 parser = argparse_oe.ArgumentParser(description="Image manifest utility",
445 epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
446 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
447 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
448 subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
449 subparsers.required = True
450
451 # get recipe info
452 parser_get_recipes = subparsers.add_parser('recipe-info',
453 help='Get recipe info',
454 description='Get recipe information for a package')
455 parser_get_recipes.add_argument('package', help='Package name')
456 parser_get_recipes.set_defaults(func=get_recipe)
457
458 # list runtime dependencies
459 parser_pkg_dep = subparsers.add_parser('list-depends',
460 help='List dependencies',
461 description='List dependencies required to build the package')
462 parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true')
463 parser_pkg_dep.add_argument('package', help='Package name')
464 parser_pkg_dep.set_defaults(func=pkg_dependencies)
465
466 # list recipes
467 parser_recipes = subparsers.add_parser('list-recipes',
468 help='List recipes producing packages within an image',
469 description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata')
470 parser_recipes.add_argument('manifest', help='Manifest file')
471 parser_recipes.set_defaults(func=list_recipes)
472
473 # list packages
474 parser_packages = subparsers.add_parser('list-packages',
475 help='List packages within an image',
476 description='Lists packages that went into an image, using the manifest')
477 parser_packages.add_argument('manifest', help='Manifest file')
478 parser_packages.set_defaults(func=list_packages)
479
480 # list layers
481 parser_layers = subparsers.add_parser('list-layers',
482 help='List included layers',
483 description='Lists included layers')
484 parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified',
485 default=sys.stdout, type=argparse.FileType('w'))
486 parser_layers.set_defaults(func=list_layers)
487
488 # dump default configuration file
489 parser_dconfig = subparsers.add_parser('dump-config',
490 help='Dump default config',
491 description='Dump default config to default_config.json')
492 parser_dconfig.set_defaults(func=dump_config)
493
494 # export recipe info for packages in manifest
495 parser_export = subparsers.add_parser('manifest-info',
496 help='Export recipe info for a manifest',
497 description='Export recipe information using the manifest')
498 parser_export.add_argument('-c', '--config', help='load config from json file')
499 parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified')
500 parser_export.add_argument('manifest', help='Manifest file')
501 parser_export.set_defaults(func=export_manifest_info)
502
503 args = parser.parse_args()
504
505 if args.debug:
506 logger.setLevel(logging.DEBUG)
507 logger.debug("Debug Enabled")
508 elif args.quiet:
509 logger.setLevel(logging.ERROR)
510
511 ret = args.func(args)
512
513 return ret
514
515
516if __name__ == "__main__":
517 try:
518 ret = main()
519 except Exception:
520 ret = 1
521 import traceback
522 traceback.print_exc()
523 sys.exit(ret)